From d967bcdba2cf8d321c34837eceba2b48a62aa42b Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Mon, 2 Oct 2023 17:01:36 +0000 Subject: [PATCH 01/87] Update cromwell version from 86 to 87 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index cfdd3352dc2..4de639d9d41 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -6,7 +6,7 @@ import com.github.sbt.git.SbtGit object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "86" + val cromwellVersion = "87" /** * Returns true if this project should be considered a snapshot. From c7b043b7b534454d49561f3e2a2b9cf99a3c661b Mon Sep 17 00:00:00 2001 From: Justin Variath Thomas Date: Wed, 4 Oct 2023 11:48:35 -0400 Subject: [PATCH 02/87] WX-1282 Update failedJobs Query to use `lo_get` instead of INNER JOIN against pg_largeobject (#7228) --- .../slick/tables/MetadataEntryComponent.scala | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala index 1c1225c195d..8c3c0aa27c9 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala @@ -319,22 +319,14 @@ trait MetadataEntryComponent { if(isPostgres) s"${'"'}$identifier${'"'}" else identifier } - def dbMetadataValueColCheckName(isPostgres: Boolean): String = { - if(isPostgres) "obj.data" else "METADATA_VALUE" + def evaluateMetadataValue(isPostgres: Boolean, colName: String): String = { + if(isPostgres) s"convert_from(lo_get(${colName}::oid), 'UTF8')" else colName } def attemptAndIndexSelectStatement(callFqn: String, scatterIndex: String, retryAttempt: String, variablePrefix: String): String = { s"SELECT ${callFqn}, MAX(COALESCE(${scatterIndex}, 0)) as ${variablePrefix}Scatter, MAX(COALESCE(${retryAttempt}, 0)) AS ${variablePrefix}Retry" } - def pgObjectInnerJoinStatement(isPostgres: Boolean, metadataValColName: String): String = { - if(isPostgres) s"INNER JOIN pg_largeobject obj ON me.${metadataValColName} = cast(obj.loid as text)" else "" - } - - def failedTaskGroupByClause(metadataValue: String, callFqn: String): String = { - return s"GROUP BY ${callFqn}, ${metadataValue}" - } - val workflowUuid = dbIdentifierWrapper("WORKFLOW_EXECUTION_UUID", isPostgres) val callFqn = dbIdentifierWrapper("CALL_FQN", isPostgres) val scatterIndex = dbIdentifierWrapper("JOB_SCATTER_INDEX", isPostgres) @@ -358,11 +350,10 @@ trait MetadataEntryComponent { FROM #${metadataEntry} me INNER JOIN #${wmse} wmse ON wmse.#${workflowUuid} = me.#${workflowUuid} - #${pgObjectInnerJoinStatement(isPostgres, metadataValue)} WHERE (wmse.#${rootUuid} = $rootWorkflowId OR wmse.#${workflowUuid} = $rootWorkflowId) - AND (me.#${metadataKey} in ('executionStatus', 'backendStatus') AND #${dbMetadataValueColCheckName(isPostgres)} = 'Failed') - #${failedTaskGroupByClause(dbMetadataValueColCheckName(isPostgres), callFqn)} - HAVING #${dbMetadataValueColCheckName(isPostgres)} = 'Failed' + AND (me.#${metadataKey} in ('executionStatus', 'backendStatus') AND #${evaluateMetadataValue(isPostgres, metadataValue)} = 'Failed') + GROUP BY #${callFqn}, #${metadataValue} + HAVING #${evaluateMetadataValue(isPostgres, metadataValue)} = 'Failed' ) AS failedCalls ON me.#${callFqn} = failedCalls.#${callFqn} INNER JOIN ( From 2634f2f34023075f8e0dda3794e3a3ec81f2c096 Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Wed, 4 Oct 2023 14:12:04 -0400 Subject: [PATCH 03/87] [WX-1234] Update Release Process with Docker Instructions (#7231) Co-authored-by: Adam Nichols --- processes/release_processes/README.MD | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/processes/release_processes/README.MD b/processes/release_processes/README.MD index 146c2479620..0d2f4e9475e 100644 --- a/processes/release_processes/README.MD +++ b/processes/release_processes/README.MD @@ -90,6 +90,13 @@ The workflow outputs its status to the console. * Announce release in `#dsp-workflows`, set expectations about when the new version will be available in Terra. * **One business day later,** confirm that [the Homebrew package](https://formulae.brew.sh/formula/cromwell) has the latest version. If it doesn't, start investigation by looking at [Homebrew PR's](https://github.com/Homebrew/homebrew-core/pulls?q=is%3Apr+cromwell). +### Publish Docker Image +* If the release workflow went well, it's time to also publish Docker images for this release. +* `git checkout` the Cromwell hash that was just published (i.e. the one directly BEFORE the "Update Cromwell version from x to x+1" commit that the publish WDL makes). It's important that the image being built uses the exact same code as the .jar files published to github. +* Run `sbt -Dproject.isSnapshot=false -Dproject.isRelease=true dockerBuildAndPush` from your local Cromwell directory. +* Grab a cup of coffee, and verify that all of the new images were pushed successfully. For example, you should now be able to do `docker pull broadinstitute/cromwell:{new version #}` + * The list of images is `cromwell`, `cromiam`, `cromwell-drs-localizer`, and `womtool` + ### How to Deploy Cromwell in CaaS staging and CaaS prod CaaS is "Cromwell as a Service". It is used by a couple of Broad teams (Pipelines and Epigenomics), though the long-term plan is for those teams to migrate to using Terra. From 4289ec1e78a6134b69c9502a282186a37f006886 Mon Sep 17 00:00:00 2001 From: Chris Llanwarne Date: Thu, 5 Oct 2023 17:24:36 -0400 Subject: [PATCH 04/87] WM-2294: Allow role-setter action to run on change (#7233) --- .../src/main/resources/metadata_changesets/set_table_role.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database/migration/src/main/resources/metadata_changesets/set_table_role.xml b/database/migration/src/main/resources/metadata_changesets/set_table_role.xml index 48a56bb6091..67219feeca3 100644 --- a/database/migration/src/main/resources/metadata_changesets/set_table_role.xml +++ b/database/migration/src/main/resources/metadata_changesets/set_table_role.xml @@ -11,7 +11,7 @@ This changeset will be applied whenever the 'sharedCromwellDbRole' property is set. It runs every time to ensure the role is set correctly after all other changesets. --> - + From c4e7566ab65daf87b0a2982f0a3fe28d4f6a0846 Mon Sep 17 00:00:00 2001 From: Chris Llanwarne Date: Fri, 6 Oct 2023 10:28:42 -0400 Subject: [PATCH 05/87] WM-2296: Callback should supply fully qualified output names (#7234) --- .../finalization/WorkflowCallbackActor.scala | 2 +- .../WorkflowCallbackActorSpec.scala | 22 +++++++++++-------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala index 516184d023a..c8eb1790365 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala @@ -155,7 +155,7 @@ class WorkflowCallbackActor(serviceRegistryActor: ActorRef, } private def performCallback(workflowId: WorkflowId, callbackUri: URI, terminalState: WorkflowState, outputs: CallOutputs, failures: List[String]): Future[Done] = { - val callbackPostBody = CallbackMessage(workflowId.toString, terminalState.toString, outputs.outputs.map(entry => (entry._1.name, entry._2)), failures) + val callbackPostBody = CallbackMessage(workflowId.toString, terminalState.toString, outputs.outputs.map(entry => (entry._1.identifier.fullyQualifiedName.value, entry._2)), failures) for { entity <- Marshal(callbackPostBody).to[RequestEntity] headers <- makeHeaders diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala index 97479d348ae..383e75458ff 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala @@ -1,27 +1,29 @@ package cromwell.engine.workflow.lifecycle.finalization -import akka.testkit._ import akka.http.scaladsl.client.RequestBuilding.Post import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.model.{HttpResponse, StatusCodes} -import akka.testkit.TestProbe +import akka.testkit.{TestProbe, _} import common.mock.MockSugar import cromwell.core.retry.SimpleExponentialBackoff -import org.mockito.Mockito._ -import cromwell.core.{CallOutputs, TestKitSuite, WorkflowFailed, WorkflowId, WorkflowSucceeded} +import cromwell.core._ import cromwell.engine.workflow.lifecycle.finalization.WorkflowCallbackActor.PerformCallbackCommand import cromwell.engine.workflow.lifecycle.finalization.WorkflowCallbackJsonSupport._ import cromwell.services.metadata.MetadataService.PutMetadataAction import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue} -import cromwell.util.{GracefulShutdownHelper, WomMocks} +import cromwell.util.GracefulShutdownHelper +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers +import wom.graph.GraphNodePort.GraphNodeOutputPort +import wom.graph.WomIdentifier +import wom.types.WomStringType import wom.values.WomString import java.net.URI import java.time.Instant -import scala.concurrent.duration._ import scala.concurrent.Future +import scala.concurrent.duration._ class WorkflowCallbackActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar { @@ -36,7 +38,9 @@ class WorkflowCallbackActorSpec private val deathWatch = TestProbe("deathWatch") private val mockUri = new URI("http://example.com") private val basicConfig = WorkflowCallbackConfig.empty.copy(enabled = true).copy(retryBackoff = SimpleExponentialBackoff(100.millis, 200.millis, 1.1)) - private val basicOutputs = WomMocks.mockOutputExpectations(List("foo" -> WomString("bar")).toMap) + private val basicOutputs = CallOutputs(Map( + GraphNodeOutputPort(WomIdentifier("foo", "wf.foo"), WomStringType, null) -> WomString("bar") + )) private val httpSuccess = Future.successful(HttpResponse.apply(StatusCodes.OK)) private val httpFailure = Future.successful(HttpResponse.apply(StatusCodes.GatewayTimeout)) @@ -64,7 +68,7 @@ class WorkflowCallbackActorSpec val expectedPostBody = CallbackMessage( workflowId.toString, WorkflowSucceeded.toString, - basicOutputs.outputs.map(entry => (entry._1.name, entry._2)), + Map(("wf.foo", WomString("bar"))), List.empty ) val expectedRequest = Post(mockUri.toString, expectedPostBody) @@ -114,7 +118,7 @@ class WorkflowCallbackActorSpec val expectedPostBody = CallbackMessage( workflowId.toString, WorkflowSucceeded.toString, - basicOutputs.outputs.map(entry => (entry._1.name, entry._2)), + Map(("wf.foo", WomString("bar"))), List.empty ) val expectedRequest = Post(mockUri.toString, expectedPostBody) From b6aae148052fdc4773cba679f564eb5947cd8975 Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Wed, 11 Oct 2023 08:59:51 -0400 Subject: [PATCH 06/87] [WX-499] DRS Parallel Downloads Follow-up (#7229) --- .../downloaders/BulkAccessUrlDownloader.scala | 55 ++++++++----------- .../BulkAccessUrlDownloaderSpec.scala | 43 ++++++--------- project/Dependencies.scala | 1 + 3 files changed, 39 insertions(+), 60 deletions(-) diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala index 4668c5072ed..f47265d7730 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala @@ -1,7 +1,7 @@ package drs.localizer.downloaders import cats.effect.{ExitCode, IO} -import cloud.nio.impl.drs.{AccessUrl, DrsResolverResponse} +import cloud.nio.impl.drs.{AccessUrl} import com.typesafe.scalalogging.StrictLogging import java.nio.charset.StandardCharsets @@ -9,6 +9,9 @@ import java.nio.file.{Files, Path, Paths} import scala.sys.process.{Process, ProcessLogger} import scala.util.matching.Regex import drs.localizer.ResolvedDrsUrl +import spray.json.DefaultJsonProtocol.{StringJsonFormat, listFormat, mapFormat} +import spray.json._ + case class GetmResult(returnCode: Int, stderr: String) /** * Getm is a python tool that is used to download resolved DRS uris quickly and in parallel. @@ -40,39 +43,25 @@ case class BulkAccessUrlDownloader(resolvedUrls : List[ResolvedDrsUrl]) extends * @return Filepath of a getm-manifest.json that Getm can use to download multiple files in parallel. */ def generateJsonManifest(resolvedUrls : List[ResolvedDrsUrl]): IO[Path] = { - def toJsonString(drsResponse: DrsResolverResponse, destinationFilepath: String): String = { - //NB: trailing comma is being removed in generateJsonManifest - val accessUrl: AccessUrl = drsResponse.accessUrl.getOrElse(AccessUrl("missing", None)) - drsResponse.hashes.map(_ => { - val checksum = GetmChecksum(drsResponse.hashes, accessUrl).value.getOrElse("error_calculating_checksum") - val checksumAlgorithm = GetmChecksum(drsResponse.hashes, accessUrl).getmAlgorithm - s""" { - | "url" : "${accessUrl.url}", - | "filepath" : "$destinationFilepath", - | "checksum" : "$checksum", - | "checksum-algorithm" : "$checksumAlgorithm" - | }, - |""".stripMargin - }).getOrElse( - s""" { - | "url" : "${accessUrl.url}", - | "filepath" : "$destinationFilepath" - | }, - |""".stripMargin - ) - } - IO { - var jsonString: String = "[\n" - for (resolvedUrl <- resolvedUrls) { - jsonString += toJsonString(resolvedUrl.drsResponse, resolvedUrl.downloadDestinationPath) - } - if(jsonString.contains(',')) { - //remove trailing comma from array elements, but don't crash on empty list. - jsonString = jsonString.substring(0, jsonString.lastIndexOf(",")) - } - jsonString += "\n]" - Files.write(getmManifestPath, jsonString.getBytes(StandardCharsets.UTF_8)) + def resolvedUrlToJsonMap(resolvedUrl: ResolvedDrsUrl): Map[String,String] = { + val accessUrl: AccessUrl = resolvedUrl.drsResponse.accessUrl.getOrElse(AccessUrl("missing", None)) + resolvedUrl.drsResponse.hashes.map{_ => + val checksum = GetmChecksum(resolvedUrl.drsResponse.hashes, accessUrl).value.getOrElse("error_calculating_checksum") + val checksumAlgorithm = GetmChecksum(resolvedUrl.drsResponse.hashes, accessUrl).getmAlgorithm + Map( + ("url", accessUrl.url), + ("filepath", resolvedUrl.downloadDestinationPath), + ("checksum", checksum), + ("checksum-algorithm", checksumAlgorithm) + ) + }.getOrElse(Map( + ("url", accessUrl.url), + ("filepath", resolvedUrl.downloadDestinationPath) + )) } + + val jsonArray: String = resolvedUrls.map(resolved => resolvedUrlToJsonMap(resolved)).toJson.prettyPrint + IO(Files.write(getmManifestPath, jsonArray.getBytes(StandardCharsets.UTF_8))) } def deleteJsonManifest() = { diff --git a/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala b/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala index 7b96ece8d0a..0e0714febb8 100644 --- a/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala +++ b/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala @@ -20,22 +20,17 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w val threeElements: List[ResolvedDrsUrl] = List(ex1, ex2, ex3) it should "correctly parse a collection of Access Urls into a manifest.json" in { - val expected: String = - s"""|[ - | { - | "url" : "https://my.fake/url123", - | "filepath" : "path/to/local/download/dest" - | }, - | { - | "url" : "https://my.fake/url1234", - | "filepath" : "path/to/local/download/dest2" - | }, - | { - | "url" : "https://my.fake/url1235", - | "filepath" : "path/to/local/download/dest3" - | } - |]""".stripMargin - + val expected = + """[{ + | "url": "https://my.fake/url123", + | "filepath": "path/to/local/download/dest" + |}, { + | "url": "https://my.fake/url1234", + | "filepath": "path/to/local/download/dest2" + |}, { + | "url": "https://my.fake/url1235", + | "filepath": "path/to/local/download/dest3" + |}]""".stripMargin val downloader = BulkAccessUrlDownloader(threeElements) val filepath: IO[Path] = downloader.generateJsonManifest(threeElements) @@ -45,11 +40,7 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w } it should "properly construct empty JSON array from empty list." in { - val expected: String = - s"""|[ - | - |]""".stripMargin - + val expected: String = "[]" val downloader = BulkAccessUrlDownloader(emptyList) val filepath: IO[Path] = downloader.generateJsonManifest(emptyList) val source = scala.io.Source.fromFile(filepath.unsafeRunSync().toString) @@ -59,12 +50,10 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w it should "properly construct JSON array from single element list." in { val expected: String = - s"""|[ - | { - | "url" : "https://my.fake/url123", - | "filepath" : "path/to/local/download/dest" - | } - |]""".stripMargin + s"""|[{ + | "url": "https://my.fake/url123", + | "filepath": "path/to/local/download/dest" + |}]""".stripMargin val downloader = BulkAccessUrlDownloader(oneElement) val filepath: IO[Path] = downloader.generateJsonManifest(oneElement) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index b2e94cc7470..2bc4ec34f00 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -652,6 +652,7 @@ object Dependencies { "com.softwaremill.sttp" %% "circe" % sttpV, "com.github.scopt" %% "scopt" % scoptV, "org.apache.commons" % "commons-csv" % commonsCsvV, + "io.spray" %% "spray-json" % sprayJsonV, ) ++ circeDependencies ++ catsDependencies ++ slf4jBindingDependencies ++ languageFactoryDependencies ++ azureDependencies val allProjectDependencies: List[ModuleID] = From aeacb3a41426c7971e5b5675d03bec4727d7bb12 Mon Sep 17 00:00:00 2001 From: dspeck1 Date: Fri, 13 Oct 2023 12:29:30 -0500 Subject: [PATCH 07/87] WX-1318 gcp batch: Add GPU driver install (#7235) Co-authored-by: Adam Nichols --- ...cpBatchAsyncBackendJobExecutionActor.scala | 5 -- .../api/GcpBatchRequestFactoryImpl.scala | 53 +++++++++---------- .../models/GcpBatchRuntimeAttributes.scala | 9 +++- .../google/batch/runnable/UserRunnable.scala | 3 -- 4 files changed, 33 insertions(+), 37 deletions(-) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala index b2b10d92afb..ae228ad503b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala @@ -829,12 +829,7 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar _ <- evaluateRuntimeAttributes _ <- uploadScriptFile() customLabels <- Future.fromTry(GcpLabel.fromWorkflowOptions(workflowDescriptor.workflowOptions)) - _ = customLabels.foreach(x => println(s"ZZZ Custom Labels - $x")) batchParameters <- generateInputOutputParameters - _ = batchParameters.fileInputParameters.foreach(x => println(s"ZZZ File InputParameters - $x")) - _ = batchParameters.jobInputParameters.foreach(x => println(s"ZZZ InputParameters - $x")) - _ = batchParameters.fileOutputParameters.foreach(x => println(s"ZZZ File OutputParameters - $x")) - _ = batchParameters.jobOutputParameters.foreach(x => println(s"ZZZ OutputParameters - $x")) createParameters = createBatchParameters(batchParameters, customLabels) drsLocalizationManifestCloudPath = jobPaths.callExecutionRoot / GcpBatchJobPaths.DrsLocalizationManifestName _ <- uploadDrsLocalizationManifest(createParameters, drsLocalizationManifestCloudPath) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala index 435992b8cd8..9c482d852d1 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala @@ -1,17 +1,14 @@ package cromwell.backend.google.batch.api -import com.google.cloud.batch.v1.AllocationPolicy.Accelerator -import com.google.cloud.batch.v1.{DeleteJobRequest, GetJobRequest, JobName} -import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration -import cromwell.backend.google.batch.models.GcpBatchRequest -import cromwell.backend.google.batch.runnable._ -import cromwell.backend.google.batch.util.BatchUtilityConversions -import com.google.cloud.batch.v1.AllocationPolicy.{AttachedDisk, InstancePolicy, InstancePolicyOrTemplate, LocationPolicy, NetworkInterface, NetworkPolicy, ProvisioningModel} +import com.google.cloud.batch.v1.AllocationPolicy._ import com.google.cloud.batch.v1.LogsPolicy.Destination -import com.google.cloud.batch.v1.{AllocationPolicy, ComputeResource, CreateJobRequest, Job, LogsPolicy, Runnable, ServiceAccount, TaskGroup, TaskSpec, Volume} +import com.google.cloud.batch.v1.{AllocationPolicy, ComputeResource, CreateJobRequest, DeleteJobRequest, GetJobRequest, Job, JobName, LogsPolicy, Runnable, ServiceAccount, TaskGroup, TaskSpec, Volume} import com.google.protobuf.Duration import cromwell.backend.google.batch.io.GcpBatchAttachedDisk -import cromwell.backend.google.batch.models.VpcAndSubnetworkProjectLabelValues +import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration +import cromwell.backend.google.batch.models.{GcpBatchRequest, VpcAndSubnetworkProjectLabelValues} +import cromwell.backend.google.batch.runnable._ +import cromwell.backend.google.batch.util.BatchUtilityConversions import scala.jdk.CollectionConverters._ @@ -61,10 +58,11 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe .build } - private def createInstancePolicy(cpuPlatform: String, spotModel: ProvisioningModel, accelerators: Option[Accelerator.Builder], attachedDisks: List[AttachedDisk]) = { + private def createInstancePolicy(cpuPlatform: String, spotModel: ProvisioningModel, accelerators: Option[Accelerator.Builder], attachedDisks: List[AttachedDisk]): InstancePolicy.Builder = { //set GPU count to 0 if not included in workflow - val gpuAccelerators = accelerators.getOrElse(Accelerator.newBuilder.setCount(0).setType("")) + val gpuAccelerators = accelerators.getOrElse(Accelerator.newBuilder.setCount(0).setType("")) // TODO: Driver version + val instancePolicy = InstancePolicy .newBuilder .setProvisioningModel(spotModel) @@ -83,7 +81,6 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe } - private def createNetworkPolicy(networkInterface: NetworkInterface): NetworkPolicy = { NetworkPolicy .newBuilder @@ -113,8 +110,9 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe } - private def createAllocationPolicy(data: GcpBatchRequest, locationPolicy: LocationPolicy, instancePolicy: InstancePolicy, networkPolicy: NetworkPolicy, serviceAccount: ServiceAccount) = { - AllocationPolicy + private def createAllocationPolicy(data: GcpBatchRequest, locationPolicy: LocationPolicy, instancePolicy: InstancePolicy, networkPolicy: NetworkPolicy, serviceAccount: ServiceAccount, accelerators: Option[Accelerator.Builder]) = { + + val allocationPolicy = AllocationPolicy .newBuilder .setLocation(locationPolicy) .setNetwork(networkPolicy) @@ -122,13 +120,19 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe .putLabels("goog-batch-worker", "true") .putAllLabels((data.createParameters.googleLabels.map(label => label.key -> label.value).toMap.asJava)) .setServiceAccount(serviceAccount) - .addInstances(InstancePolicyOrTemplate - .newBuilder - .setPolicy(instancePolicy) - .build) - .build + .buildPartial() + + val gpuAccelerators = accelerators.getOrElse(Accelerator.newBuilder.setCount(0).setType("")) + + //add GPUs if GPU count is greater than or equal to 1 + if (gpuAccelerators.getCount >= 1) { + allocationPolicy.toBuilder.addInstances(InstancePolicyOrTemplate.newBuilder.setPolicy(instancePolicy).setInstallGpuDrivers(true).build) + } else { + allocationPolicy.toBuilder.addInstances(InstancePolicyOrTemplate.newBuilder.setPolicy(instancePolicy).build) + } } + override def submitRequest(data: GcpBatchRequest): CreateJobRequest = { val batchAttributes = data.gcpBatchParameters.batchAttributes @@ -160,10 +164,6 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe // Batch defaults to 1 task val taskCount: Long = 1 - println(f"command script container path ${data.createParameters.commandScriptContainerPath}") - println(f"cloud workflow root ${data.createParameters.cloudWorkflowRoot}") - println(f"all parameters:\n ${data.createParameters.allParameters.mkString("\n")}") - // parse preemption value and set value for Spot. Spot is replacement for preemptible val spotModel = toProvisioningModel(runtimeAttributes.preemptible) @@ -205,11 +205,11 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe val taskGroup: TaskGroup = createTaskGroup(taskCount, taskSpec) val instancePolicy = createInstancePolicy(cpuPlatform, spotModel, accelerators, allDisks) val locationPolicy = LocationPolicy.newBuilder.addAllowedLocations(zones).build - val allocationPolicy = createAllocationPolicy(data, locationPolicy, instancePolicy.build, networkPolicy, gcpSa) + val allocationPolicy = createAllocationPolicy(data, locationPolicy, instancePolicy.build, networkPolicy, gcpSa, accelerators) val job = Job .newBuilder .addTaskGroups(taskGroup) - .setAllocationPolicy(allocationPolicy) + .setAllocationPolicy(allocationPolicy.build()) .putLabels("submitter", "cromwell") // label to signify job submitted by cromwell for larger tracking purposes within GCP batch .putLabels("goog-batch-worker", "true") .putAllLabels((data.createParameters.googleLabels.map(label => label.key -> label.value).toMap.asJava)) @@ -218,9 +218,6 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe .setDestination(Destination.CLOUD_LOGGING) .build) - println(f"job shell ${data.createParameters.jobShell}") - println(f"script container path ${data.createParameters.commandScriptContainerPath}") - println(f"labels ${data.createParameters.googleLabels}") CreateJobRequest .newBuilder diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala index 9fe3327f35d..c9dc62b0cc7 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala @@ -19,6 +19,8 @@ import wom.values.{WomArray, WomBoolean, WomInteger, WomString, WomValue} object GpuResource { + val DefaultNvidiaDriverVersion = "418.87.00" + final case class GpuType(name: String) { override def toString: String = name } @@ -99,6 +101,9 @@ object GcpBatchRuntimeAttributes { private def cpuPlatformValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = cpuPlatformValidationInstance private def gpuTypeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[GpuType] = GpuTypeValidation.optional + val GpuDriverVersionKey = "nvidiaDriverVersion" + private def gpuDriverValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = new StringRuntimeAttributesValidation(GpuDriverVersionKey).optional + private def gpuCountValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optional private def gpuMinValidation(runtimeConfig: Option[Config]):OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optionalMin @@ -159,6 +164,7 @@ object GcpBatchRuntimeAttributes { StandardValidatedRuntimeAttributesBuilder.default(runtimeConfig).withValidation( gpuCountValidation(runtimeConfig), gpuTypeValidation(runtimeConfig), + gpuDriverValidation(runtimeConfig), cpuValidation(runtimeConfig), cpuPlatformValidation(runtimeConfig), cpuMinValidation(runtimeConfig), @@ -189,8 +195,9 @@ object GcpBatchRuntimeAttributes { .extractOption(gpuTypeValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) lazy val gpuCount: Option[Int Refined Positive] = RuntimeAttributesValidation .extractOption(gpuCountValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + lazy val gpuDriver: Option[String] = RuntimeAttributesValidation.extractOption(gpuDriverValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) - val gpuResource: Option[GpuResource] = if (gpuType.isDefined || gpuCount.isDefined) { + val gpuResource: Option[GpuResource] = if (gpuType.isDefined || gpuCount.isDefined || gpuDriver.isDefined) { Option(GpuResource(gpuType.getOrElse(GpuType.DefaultGpuType), gpuCount .getOrElse(GpuType.DefaultGpuCount))) } else { diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala index b7f5a026476..d2d499b3127 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala @@ -8,9 +8,6 @@ trait UserRunnable { def userRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]): List[Runnable] = { - println(f"job shell ${createParameters.jobShell}") - println(f"script container path ${createParameters.commandScriptContainerPath}") - val userRunnable = RunnableBuilder.userRunnable( docker = createParameters.dockerImage, scriptContainerPath = createParameters.commandScriptContainerPath.pathAsString, From c9d4ce4287a5da9bff097244b664135171021f7e Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Fri, 13 Oct 2023 16:02:33 -0400 Subject: [PATCH 08/87] WX-1232 Include useful workflow ids in TES tags (#7221) --- .../main/scala/cromwell/backend/backend.scala | 1 + .../cromwell/backend/impl/tes/TesTask.scala | 19 +++-- .../backend/impl/tes/TesTaskSpec.scala | 70 ++++++++++++++++++- 3 files changed, 83 insertions(+), 7 deletions(-) diff --git a/backend/src/main/scala/cromwell/backend/backend.scala b/backend/src/main/scala/cromwell/backend/backend.scala index ea413c10367..33d16f34b5a 100644 --- a/backend/src/main/scala/cromwell/backend/backend.scala +++ b/backend/src/main/scala/cromwell/backend/backend.scala @@ -84,6 +84,7 @@ case class BackendWorkflowDescriptor(id: WorkflowId, val rootWorkflow = breadCrumbs.headOption.map(_.callable).getOrElse(callable) val possiblyNotRootWorkflowId = id.toPossiblyNotRoot val rootWorkflowId = breadCrumbs.headOption.map(_.id).getOrElse(id).toRoot + val possibleParentWorkflowId = breadCrumbs.lastOption.map(_.id) override def toString: String = s"[BackendWorkflowDescriptor id=${id.shortString} workflowName=${callable.name}]" def getWorkflowOption(key: WorkflowOption) = workflowOptions.get(key).toOption diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala index a345e87ebf7..4a6e77641e3 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala @@ -2,14 +2,13 @@ package cromwell.backend.impl.tes import common.collections.EnhancedCollections._ import common.util.StringUtil._ import cromwell.backend.impl.tes.OutputMode.OutputMode -import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptor} +import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptor, BackendWorkflowDescriptor} import cromwell.core.logging.JobLogger import cromwell.core.path.{DefaultPathBuilder, Path} import net.ceedubs.ficus.Ficus._ import scala.language.postfixOps import scala.util.Try - import wdl.draft2.model.FullyQualifiedName import wdl4s.parser.MemoryUnit import wom.InstantiatedCommand @@ -244,6 +243,8 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, preferedWorkflowExecutionIdentity, Option(tesPaths.tesTaskRoot) ) + + val tags: Map[String, Option[String]] = TesTask.makeTags(jobDescriptor.workflowDescriptor) } object TesTask { @@ -287,6 +288,16 @@ object TesTask { ) } + def makeTags(workflowDescriptor: BackendWorkflowDescriptor): Map[String, Option[String]] = { + // In addition to passing through any workflow labels, include relevant workflow ids as tags. + val baseTags = workflowDescriptor.customLabels.asMap.map { case (k, v) => (k, Option(v)) } + baseTags ++ Map( + "workflow_id" -> Option(workflowDescriptor.id.toString), + "root_workflow_id" -> Option(workflowDescriptor.rootWorkflowId.toString), + "parent_workflow_id" -> workflowDescriptor.possibleParentWorkflowId.map(_.toString) + ) + } + def makeTask(tesTask: TesTask): Task = { Task( id = None, @@ -298,7 +309,7 @@ object TesTask { resources = Option(tesTask.resources), executors = tesTask.executors, volumes = None, - tags = Option(tesTask.jobDescriptor.workflowDescriptor.customLabels.asMap), + tags = Option(tesTask.tags), logs = None ) } @@ -314,7 +325,7 @@ final case class Task(id: Option[String], resources: Option[Resources], executors: Seq[Executor], volumes: Option[Seq[String]], - tags: Option[Map[String, String]], + tags: Option[Map[String, Option[String]]], logs: Option[Seq[TaskLog]]) final case class Executor(image: String, diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala index 25a8f55f682..b7887b29944 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala @@ -3,8 +3,8 @@ package cromwell.backend.impl.tes import common.assertion.CromwellTimeoutSpec import common.mock.MockSugar import cromwell.backend.validation.ContinueOnReturnCodeSet -import cromwell.backend.{BackendSpec, TestConfig} -import cromwell.core.WorkflowOptions +import cromwell.backend.{BackendSpec, BackendWorkflowDescriptor, TestConfig} +import cromwell.core.{RootWorkflowId, WorkflowId, WorkflowOptions} import cromwell.core.labels.Labels import cromwell.core.logging.JobLogger import cromwell.core.path.DefaultPathBuilder @@ -13,6 +13,8 @@ import org.scalatest.matchers.should.Matchers import spray.json.{JsObject, JsValue} import wom.InstantiatedCommand +import java.util.UUID + class TesTaskSpec extends AnyFlatSpec with CromwellTimeoutSpec @@ -117,6 +119,42 @@ class TesTaskSpec val emptyWorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld, labels = Labels("foo" -> "bar")) + val jobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, + Map.empty, + emptyWorkflowOptions, + Set.empty) + val tesPaths = TesJobPaths(jobDescriptor.key, + jobDescriptor.workflowDescriptor, + TestConfig.emptyConfig) + val tesTask = TesTask(jobDescriptor, + TestConfig.emptyBackendConfigDescriptor, + jobLogger, + tesPaths, + runtimeAttributes, + DefaultPathBuilder.build("").get, + "", + InstantiatedCommand("command"), + "", + Map.empty, + "", + OutputMode.ROOT) + + val task = TesTask.makeTask(tesTask) + + task.tags shouldBe Option( + Map( + "foo" -> Option("bar"), + "workflow_id" -> Option(workflowDescriptor.id.toString), + "root_workflow_id" -> Option(workflowDescriptor.id.toString), + "parent_workflow_id" -> None + ) + ) + } + + it should "put workflow ids in tags" in { + val jobLogger = mock[JobLogger] + val emptyWorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) + val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld) val jobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, emptyWorkflowOptions, @@ -139,6 +177,32 @@ class TesTaskSpec val task = TesTask.makeTask(tesTask) - task.tags shouldBe Option(Map("foo" -> "bar")) + task.tags shouldBe Option( + Map( + "workflow_id" -> Option(workflowDescriptor.id.toString), + "root_workflow_id" -> Option(workflowDescriptor.id.toString), + "parent_workflow_id" -> None + ) + ) + } + + it should "put non-root workflow ids in tags" in { + // Doing this test with mocks rather than real job/workflow descriptors as above because + // getting the subworkflow structure build was really hard. + val rootWorkflowId = RootWorkflowId(UUID.randomUUID()) + val subWorkflowId = WorkflowId(UUID.randomUUID()) + val subSubWorkflowId = WorkflowId(UUID.randomUUID()) + + val workflowDescriptor = mock[BackendWorkflowDescriptor] + workflowDescriptor.customLabels returns Labels.empty + workflowDescriptor.id returns subSubWorkflowId + workflowDescriptor.rootWorkflowId returns rootWorkflowId + workflowDescriptor.possibleParentWorkflowId returns Option(subWorkflowId) + + TesTask.makeTags(workflowDescriptor) shouldBe Map( + "workflow_id" -> Option(subSubWorkflowId.toString), + "root_workflow_id" -> Option(rootWorkflowId.toString), + "parent_workflow_id" -> Option(subWorkflowId.toString) + ) } } From 4d16f01c9d969e726a9efc6debd19dcd968bdf01 Mon Sep 17 00:00:00 2001 From: Justin Variath Thomas Date: Thu, 26 Oct 2023 15:33:33 -0400 Subject: [PATCH 09/87] WX-1307 Azure E2E test (#7239) Co-authored-by: Janet Gainer-Dewar --- .github/workflows/azure_e2e_run_workflow.yml | 136 +++++++++++++++++++ .gitignore | 6 + 2 files changed, 142 insertions(+) create mode 100644 .github/workflows/azure_e2e_run_workflow.yml diff --git a/.github/workflows/azure_e2e_run_workflow.yml b/.github/workflows/azure_e2e_run_workflow.yml new file mode 100644 index 00000000000..d2df9b38441 --- /dev/null +++ b/.github/workflows/azure_e2e_run_workflow.yml @@ -0,0 +1,136 @@ +name: 'Azure e2e - Run Workflow' +on: + schedule: + - cron: '0 16 * * *' # UTC 4pm, EST 11am, EDT 12pm + workflow_dispatch: + inputs: + target-branch: + description: 'Branch name of dsp-reusable-workflows repo to run tests on' + required: true + default: 'main' + type: string + owner-subject: + description: 'Owner of billing project' + required: true + default: 'hermione.owner@quality.firecloud.org' + type: string + service-account: + description: 'Email address or unique identifier of the Google Cloud service account for which to generate credentials' + required: true + default: 'firecloud-qa@broad-dsde-qa.iam.gserviceaccount.com' + type: string + +env: + BROADBOT_TOKEN: '${{ secrets.BROADBOT_GITHUB_TOKEN }}' # github token for access to kick off a job in the private repo + RUN_NAME_SUFFIX: '${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt }}' + +jobs: + + # This job provisions useful parameters for e2e tests + params-gen: + runs-on: ubuntu-latest + permissions: + contents: 'read' + id-token: 'write' + outputs: + project-name: ${{ steps.gen.outputs.project_name }} + bee-name: '${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt}}-dev' + steps: + - name: Generate a random billing project name + id: 'gen' + run: | + project_name=$(echo "tmp-billing-project-$(uuidgen)" | cut -c -30) + echo "project_name=${project_name}" >> $GITHUB_OUTPUT + + create-bee-workflow: + runs-on: ubuntu-latest + needs: [params-gen] + permissions: + contents: 'read' + id-token: 'write' + steps: + - name: Dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: bee-create + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + # NOTE: Opting to use "prod" instead of custom tag since I specifically want to test against the current prod state + # NOTE: For testing/development purposes I'm using dev + inputs: '{ "bee-name": "${{ needs.params-gen.outputs.bee-name }}", "version-template": "dev", "bee-template-name": "rawls-e2e-azure-tests"}' + + create-and-attach-billing-project-to-landing-zone-workflow: + runs-on: ubuntu-latest + needs: [create-bee-workflow, params-gen] + steps: + - name: dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: attach-billing-project-to-landing-zone.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + inputs: '{ + "run-name": "attach-billing-project-to-landing-zone-${{ env.RUN_NAME_SUFFIX }}", + "bee-name": "${{ needs.params-gen.outputs.bee-name }}", + "billing-project": "${{ needs.params-gen.outputs.project-name }}", + "billing-project-creator": "${{ inputs.owner-subject }}", + "service-account": "${{inputs.service-account}}" }' + + run-cromwell-az-e2e: + needs: [params-gen, create-and-attach-billing-project-to-landing-zone-workflow] + permissions: + contents: read + id-token: write + uses: "broadinstitute/dsp-reusable-workflows/.github/workflows/cromwell-az-e2e-test.yaml@main" + with: + branch: "${{ inputs.target-branch }}" + bee-name: "${{ needs.params-gen.outputs.bee-name }}" + billing-project-name: "${{ needs.params-gen.outputs.project-name }}" + + delete-billing-project-v2-from-bee-workflow: + continue-on-error: true + runs-on: ubuntu-latest + needs: [run-cromwell-az-e2e, create-and-attach-billing-project-to-landing-zone-workflow, params-gen] + if: always() + steps: + - name: dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: .github/workflows/delete-billing-project-v2-from-bee.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + inputs: '{ + "run-name": "delete-billing-project-v2-from-bee-${{ env.RUN_NAME_SUFFIX }}", + "bee-name": "${{ needs.params-gen.outputs.bee-name }}", + "billing-project": "${{ needs.params-gen.outputs.project-name }}", + "billing-project-owner": "${{ inputs.owner-subject }}", + "service-account": "${{ inputs.service-account }}", + "silent-on-failure": "false" }' + + destroy-bee-workflow: + runs-on: ubuntu-latest + needs: [params-gen, create-bee-workflow, delete-billing-project-v2-from-bee-workflow] + if: always() + permissions: + contents: 'read' + id-token: 'write' + steps: + - name: dispatch to terra-github-workflows + uses: broadinstitute/workflow-dispatch@v3 + with: + workflow: bee-destroy.yaml + repo: broadinstitute/terra-github-workflows + ref: refs/heads/main + token: ${{ env.BROADBOT_TOKEN }} + inputs: '{ "bee-name": "${{ needs.params-gen.outputs.bee-name }}" }' + wait-for-completion: true + + report-workflow: + uses: broadinstitute/sherlock/.github/workflows/client-report-workflow.yaml@main + with: + notify-slack-channels-upon-workflow-failure: "#cromwell_jenkins_ci_errors" + permissions: + id-token: write diff --git a/.gitignore b/.gitignore index a5b72f6b263..250b6aa3c16 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,9 @@ console_output.txt expected.json run_mode_metadata.json +#bloop files +/.bloop + # custom config cromwell-executions cromwell-test-executions @@ -55,3 +58,6 @@ tesk_application.conf **/venv/ exome_germline_single_sample_v1.3/ **/*.pyc + +# GHA credentials +gha-creds-*.json From 5bc7ac314d1320e0a68c4fae7920993a711a80fe Mon Sep 17 00:00:00 2001 From: dspeck1 Date: Wed, 1 Nov 2023 08:42:19 -0500 Subject: [PATCH 10/87] WX-1340 GCP Batch: Mount with extra colon issue and multiple zones support (#7240) Co-authored-by: Adam Nichols --- .../google/batch/runnable/RunnableBuilder.scala | 11 ++++++++--- .../google/batch/util/BatchUtilityConversions.scala | 3 +-- .../google/batch/runnable/RunnableBuilderSpec.scala | 4 ++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala index c68e3dbdd29..7f35528fad3 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala @@ -4,7 +4,6 @@ import com.google.cloud.batch.v1.Runnable.Container import com.google.cloud.batch.v1.{Environment,Runnable, Volume} import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration import cromwell.backend.google.batch.models.{BatchParameter, GcpBatchInput, GcpBatchOutput} -//import cromwell.backend.google.batch.runnable.RunnableLabels._ import cromwell.core.path.Path import mouse.all.anySyntaxMouse @@ -60,8 +59,14 @@ object RunnableBuilder { def withVolumes(volumes: List[Volume]): Runnable.Builder = { val formattedVolumes = volumes.map { volume => val mountPath = volume.getMountPath - val mountOptions = Option(volume.getMountOptionsList).map(_.asScala.toList).getOrElse(List.empty) - s"$mountPath:$mountPath:${mountOptions.mkString(",")}" + + val mountOptions = Option(volume.getMountOptionsList) + .map(_.asScala) + .filter(_.nonEmpty) + .map(_.mkString(":", ",", "")) + .getOrElse("") + + s"$mountPath:$mountPath$mountOptions" } builder.setContainer( diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala index 87b4762f5a5..612c95651e9 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala @@ -8,10 +8,9 @@ import wom.format.MemorySize trait BatchUtilityConversions { - // construct zones string def toZonesPath(zones: Vector[String]): String = { - "zones/" + zones.mkString(",") + zones.map(zone => "zones/" + zone).mkString(" ") } // lowercase text to match gcp label requirements diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala index d0e4eac0b87..04d2c7d43f5 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala @@ -76,7 +76,7 @@ class RunnableBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc runnable.getContainer.getCommandsList.asScala shouldBe expectedCommand runnable.getAlwaysRun shouldBe true runnable.getLabelsMap shouldBe memoryRetryRunnableExpectedLabels - runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => s"${v.getMountPath}:${v.getMountPath}:") + runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => s"${v.getMountPath}:${v.getMountPath}") } it should "return cloud sdk runnable for multiple keys in retry-with-double-memory" in { @@ -89,6 +89,6 @@ class RunnableBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc runnable.getContainer.getCommandsList.asScala shouldBe expectedCommand runnable.getAlwaysRun shouldBe true runnable.getLabelsMap shouldBe memoryRetryRunnableExpectedLabels - runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => s"${v.getMountPath}:${v.getMountPath}:") + runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => s"${v.getMountPath}:${v.getMountPath}") } } \ No newline at end of file From e880371efd2f5f85bc4c9ef99b3cec78a3e94447 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Wed, 1 Nov 2023 20:40:40 -0400 Subject: [PATCH 11/87] WX-1339 Make `throwExceptionOnExecuteError` false for PAPI aborts (#7245) --- .../pipelines/v2beta/api/request/AbortRequestHandler.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala index 974c8710766..b4d9421b451 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala @@ -35,7 +35,7 @@ trait AbortRequestHandler extends LazyLogging { this: RequestHandler => // The Genomics batch endpoint doesn't seem to be able to handle abort requests on V2 operations at the moment // For now, don't batch the request and execute it on its own def handleRequest(abortQuery: PAPIAbortRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = { - Future(abortQuery.httpRequest.execute()) map { + Future(abortQuery.httpRequest.setThrowExceptionOnExecuteError(false).execute()) map { case response if response.isSuccessStatusCode => abortQuery.requester ! PAPIAbortRequestSuccessful(abortQuery.jobId.jobId) Success(()) From 74de9e33d1c35885db1d084a4b2b1ac22c144796 Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Fri, 3 Nov 2023 09:58:03 -0400 Subject: [PATCH 12/87] WX-1338 Fix cron invocation of E2E test (#7244) --- .github/workflows/azure_e2e_run_workflow.yml | 23 ++------------------ 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/.github/workflows/azure_e2e_run_workflow.yml b/.github/workflows/azure_e2e_run_workflow.yml index d2df9b38441..5071fc37e8b 100644 --- a/.github/workflows/azure_e2e_run_workflow.yml +++ b/.github/workflows/azure_e2e_run_workflow.yml @@ -3,22 +3,6 @@ on: schedule: - cron: '0 16 * * *' # UTC 4pm, EST 11am, EDT 12pm workflow_dispatch: - inputs: - target-branch: - description: 'Branch name of dsp-reusable-workflows repo to run tests on' - required: true - default: 'main' - type: string - owner-subject: - description: 'Owner of billing project' - required: true - default: 'hermione.owner@quality.firecloud.org' - type: string - service-account: - description: 'Email address or unique identifier of the Google Cloud service account for which to generate credentials' - required: true - default: 'firecloud-qa@broad-dsde-qa.iam.gserviceaccount.com' - type: string env: BROADBOT_TOKEN: '${{ secrets.BROADBOT_GITHUB_TOKEN }}' # github token for access to kick off a job in the private repo @@ -75,8 +59,7 @@ jobs: "run-name": "attach-billing-project-to-landing-zone-${{ env.RUN_NAME_SUFFIX }}", "bee-name": "${{ needs.params-gen.outputs.bee-name }}", "billing-project": "${{ needs.params-gen.outputs.project-name }}", - "billing-project-creator": "${{ inputs.owner-subject }}", - "service-account": "${{inputs.service-account}}" }' + "service-account": "firecloud-qa@broad-dsde-qa.iam.gserviceaccount.com" }' run-cromwell-az-e2e: needs: [params-gen, create-and-attach-billing-project-to-landing-zone-workflow] @@ -85,7 +68,6 @@ jobs: id-token: write uses: "broadinstitute/dsp-reusable-workflows/.github/workflows/cromwell-az-e2e-test.yaml@main" with: - branch: "${{ inputs.target-branch }}" bee-name: "${{ needs.params-gen.outputs.bee-name }}" billing-project-name: "${{ needs.params-gen.outputs.project-name }}" @@ -106,8 +88,7 @@ jobs: "run-name": "delete-billing-project-v2-from-bee-${{ env.RUN_NAME_SUFFIX }}", "bee-name": "${{ needs.params-gen.outputs.bee-name }}", "billing-project": "${{ needs.params-gen.outputs.project-name }}", - "billing-project-owner": "${{ inputs.owner-subject }}", - "service-account": "${{ inputs.service-account }}", + "service-account": "firecloud-qa@broad-dsde-qa.iam.gserviceaccount.com", "silent-on-failure": "false" }' destroy-bee-workflow: From 110ca3e3c1bcd205eb290b95dfc986581f6c5e3c Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Fri, 3 Nov 2023 13:06:19 -0400 Subject: [PATCH 13/87] WX-1341 Better logging when a runner stops picking up new workflows (#7246) --- .../engine/workflow/WorkflowManagerActor.scala | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala index e84d9091ad0..c3b1b998a94 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala @@ -179,11 +179,16 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) params.jobExecutionTokenDispenserActor ! FetchLimitedGroups stay() case Event(ReplyLimitedGroups(groups), stateData) => + val wfCount = stateData.workflows.size + val swfCount = stateData.subWorkflows.size + val maxNewWorkflows = maxWorkflowsToLaunch min (maxWorkflowsRunning - wfCount - swfCount) + val fetchCountLog = s"Fetching $maxNewWorkflows new workflows ($wfCount workflows and $swfCount subworkflows in flight)" if (groups.nonEmpty) - log.info(s"Excluding groups from workflow launch: ${groups.mkString(", ")}") + log.info(s"${fetchCountLog}, excluding groups: ${groups.mkString(", ")}") + else if (maxNewWorkflows < 1) + log.info(s"${fetchCountLog}, no groups excluded from workflow launch.") else - log.debug("No groups excluded from workflow launch.") - val maxNewWorkflows = maxWorkflowsToLaunch min (maxWorkflowsRunning - stateData.workflows.size - stateData.subWorkflows.size) + log.debug(s"${fetchCountLog}, no groups excluded from workflow launch.") params.workflowStore ! WorkflowStoreActor.FetchRunnableWorkflows(maxNewWorkflows, excludedGroups = groups) stay() case Event(WorkflowStoreEngineActor.NoNewWorkflowsToStart, _) => From 2a93f2810452b6c4b9ded2aa9bb2c183f930025b Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Mon, 13 Nov 2023 13:19:39 -0500 Subject: [PATCH 14/87] [WX-1260] Acquire sas token from task runner (#7241) Co-authored-by: Adam Nichols --- .../StandardAsyncExecutionActor.scala | 9 +- .../blob/BlobFileSystemManager.scala | 70 ++++++-- .../filesystems/blob/BlobPathBuilder.scala | 5 +- .../WorkspaceManagerApiClientProvider.scala | 2 + ...cpBatchAsyncBackendJobExecutionActor.scala | 6 +- ...inesApiAsyncBackendJobExecutionActor.scala | 4 +- .../TesAsyncBackendJobExecutionActor.scala | 161 ++++++++++++++++-- .../impl/tes/TesRuntimeAttributes.scala | 39 ++++- .../cromwell/backend/impl/tes/TesTask.scala | 42 +++-- ...TesAsyncBackendJobExecutionActorSpec.scala | 155 +++++++++++++++++ .../impl/tes/TesInitializationActorSpec.scala | 4 +- .../impl/tes/TesRuntimeAttributesSpec.scala | 12 ++ .../backend/impl/tes/TesTaskSpec.scala | 1 + 13 files changed, 455 insertions(+), 55 deletions(-) create mode 100644 supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index c98e429c63d..59eb7f08269 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -1,7 +1,6 @@ package cromwell.backend.standard import java.io.IOException - import akka.actor.{Actor, ActorLogging, ActorRef} import akka.event.LoggingReceive import cats.implicits._ @@ -329,7 +328,7 @@ trait StandardAsyncExecutionActor } /** Any custom code that should be run within commandScriptContents before the instantiated command. */ - def scriptPreamble: String = "" + def scriptPreamble: ErrorOr[String] = "".valid def cwd: Path = commandDirectory def rcPath: Path = cwd./(jobPaths.returnCodeFilename) @@ -427,10 +426,12 @@ trait StandardAsyncExecutionActor |find . -type d -exec sh -c '[ -z "$$(ls -A '"'"'{}'"'"')" ] && touch '"'"'{}'"'"'/.file' \\; |)""".stripMargin) + val errorOrPreamble: ErrorOr[String] = scriptPreamble + // The `tee` trickery below is to be able to redirect to known filenames for CWL while also streaming // stdout and stderr for PAPI to periodically upload to cloud storage. // https://stackoverflow.com/questions/692000/how-do-i-write-stderr-to-a-file-while-using-tee-with-a-pipe - (errorOrDirectoryOutputs, errorOrGlobFiles).mapN((directoryOutputs, globFiles) => + (errorOrDirectoryOutputs, errorOrGlobFiles, errorOrPreamble).mapN((directoryOutputs, globFiles, preamble) => s"""|#!$jobShell |DOCKER_OUTPUT_DIR_LINK |cd ${cwd.pathAsString} @@ -464,7 +465,7 @@ trait StandardAsyncExecutionActor |) |mv $rcTmpPath $rcPath |""".stripMargin - .replace("SCRIPT_PREAMBLE", scriptPreamble) + .replace("SCRIPT_PREAMBLE", preamble) .replace("ENVIRONMENT_VARIABLES", environmentVariables) .replace("INSTANTIATED_COMMAND", commandString) .replace("SCRIPT_EPILOGUE", scriptEpilogue) diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala index e3de6783d85..8f03dbe7e33 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala @@ -15,6 +15,7 @@ import java.nio.file.spi.FileSystemProvider import java.time.temporal.ChronoUnit import java.time.{Duration, OffsetDateTime} import java.util.UUID +import scala.collection.mutable import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} @@ -160,12 +161,14 @@ object BlobSasTokenGenerator { */ def createBlobTokenGenerator(workspaceManagerClient: WorkspaceManagerApiClientProvider, overrideWsmAuthToken: Option[String]): BlobSasTokenGenerator = { - WSMBlobSasTokenGenerator(workspaceManagerClient, overrideWsmAuthToken) + new WSMBlobSasTokenGenerator(workspaceManagerClient, overrideWsmAuthToken) } } -case class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvider, +case class WSMTerraCoordinates(wsmEndpoint: String, workspaceId: UUID, containerResourceId: UUID) + +class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvider, overrideWsmAuthToken: Option[String]) extends BlobSasTokenGenerator { /** @@ -178,17 +181,14 @@ case class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClient * @return an AzureSasCredential for accessing a blob container */ def generateBlobSasToken(endpoint: EndpointURL, container: BlobContainerName): Try[AzureSasCredential] = { - val wsmAuthToken: Try[String] = overrideWsmAuthToken match { - case Some(t) => Success(t) - case None => AzureCredentials.getAccessToken(None).toTry - } + val wsmAuthToken: Try[String] = getWsmAuth container.workspaceId match { // If this is a Terra workspace, request a token from WSM case Success(workspaceId) => { (for { wsmAuth <- wsmAuthToken wsmAzureResourceClient = wsmClientProvider.getControlledAzureResourceApi(wsmAuth) - resourceId <- getContainerResourceId(workspaceId, container, wsmAuth) + resourceId <- getContainerResourceId(workspaceId, container, Option(wsmAuth)) sasToken <- wsmAzureResourceClient.createAzureStorageContainerSasToken(workspaceId, resourceId) } yield sasToken).recoverWith { // If the storage account was still not found in WSM, this may be a public filesystem @@ -201,9 +201,59 @@ case class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClient } } - def getContainerResourceId(workspaceId: UUID, container: BlobContainerName, wsmAuth : String): Try[UUID] = { - val wsmResourceClient = wsmClientProvider.getResourceApi(wsmAuth) - wsmResourceClient.findContainerResourceId(workspaceId, container) + private val cachedContainerResourceIds = new mutable.HashMap[BlobContainerName, UUID]() + + // Optionally provide wsmAuth to avoid acquiring it twice in generateBlobSasToken. + // In the case that the resourceId is not cached and no auth is provided, this function will acquire a new auth as necessary. + private def getContainerResourceId(workspaceId: UUID, container: BlobContainerName, precomputedWsmAuth: Option[String]): Try[UUID] = { + cachedContainerResourceIds.get(container) match { + case Some(id) => Try(id) //cache hit + case _ => { //cache miss + val auth: Try[String] = precomputedWsmAuth.map(auth => Try(auth)).getOrElse(getWsmAuth) + val resourceId = for { + wsmAuth <- auth + wsmResourceApi = wsmClientProvider.getResourceApi(wsmAuth) + resourceId <- wsmResourceApi.findContainerResourceId(workspaceId, container) + } yield resourceId + resourceId.map(id => cachedContainerResourceIds.put(container, id)) //NB: Modifying cache state here. + cachedContainerResourceIds.get(container) match { + case Some(uuid) => Try(uuid) + case _ => Failure(new NoSuchElementException("Could not retrieve container resource ID from WSM")) + } + } + } + } + + private def getWsmAuth: Try[String] = { + overrideWsmAuthToken match { + case Some(t) => Success(t) + case None => AzureCredentials.getAccessToken(None).toTry + } + } + + private def parseTerraWorkspaceIdFromPath(blobPath: BlobPath): Try[UUID] = { + if (blobPath.container.value.startsWith("sc-")) Try(UUID.fromString(blobPath.container.value.substring(3))) + else Failure(new Exception("Could not parse workspace ID from storage container. Are you sure this is a file in a Terra Workspace?")) + } + + /** + * Return a REST endpoint that will reply with a sas token for the blob storage container associated with the provided blob path. + * @param blobPath A blob path of a file living in a blob container that WSM knows about (likely a workspace container). + * @param tokenDuration How long will the token last after being generated. Default is 8 hours. Sas tokens won't last longer than 24h. + * NOTE: If a blobPath is provided for a file in a container other than what this token generator was constructed for, + * this function will make two REST requests. Otherwise, the relevant data is already cached locally. + */ + def getWSMSasFetchEndpoint(blobPath: BlobPath, tokenDuration: Option[Duration] = None): Try[String] = { + val wsmEndpoint = wsmClientProvider.getBaseWorkspaceManagerUrl + val lifetimeQueryParameters: String = tokenDuration.map(d => s"?sasExpirationDuration=${d.toSeconds.intValue}").getOrElse("") + val terraInfo: Try[WSMTerraCoordinates] = for { + workspaceId <- parseTerraWorkspaceIdFromPath(blobPath) + containerResourceId <- getContainerResourceId(workspaceId, blobPath.container, None) + coordinates = WSMTerraCoordinates(wsmEndpoint, workspaceId, containerResourceId) + } yield coordinates + terraInfo.map{terraCoordinates => + s"${terraCoordinates.wsmEndpoint}/api/workspaces/v1/${terraCoordinates.workspaceId.toString}/resources/controlled/azure/storageContainer/${terraCoordinates.containerResourceId.toString}/getSasToken${lifetimeQueryParameters}" + } } } diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala index 3aa26eb3c11..3acb99857e0 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala @@ -185,6 +185,9 @@ case class BlobPath private[blob](pathString: String, endpoint: EndpointURL, con * @return Path string relative to the container root. */ def pathWithoutContainer : String = pathString - + + def getFilesystemManager: BlobFileSystemManager = fsm + override def getSymlinkSafePath(options: LinkOption*): Path = toAbsolutePath + } diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala index 276738c98b6..490d0fcc704 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala @@ -20,6 +20,7 @@ import scala.util.Try trait WorkspaceManagerApiClientProvider { def getControlledAzureResourceApi(token: String): WsmControlledAzureResourceApi def getResourceApi(token: String): WsmResourceApi + def getBaseWorkspaceManagerUrl: String } class HttpWorkspaceManagerClientProvider(baseWorkspaceManagerUrl: WorkspaceManagerURL) extends WorkspaceManagerApiClientProvider { @@ -40,6 +41,7 @@ class HttpWorkspaceManagerClientProvider(baseWorkspaceManagerUrl: WorkspaceManag apiClient.setAccessToken(token) WsmControlledAzureResourceApi(new ControlledAzureResourceApi(apiClient)) } + def getBaseWorkspaceManagerUrl: String = baseWorkspaceManagerUrl.value } case class WsmResourceApi(resourcesApi : ResourceApi) { diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala index ae228ad503b..766a8f2552f 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala @@ -663,12 +663,12 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar private val DockerMonitoringLogPath: Path = GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringLogFilename) private val DockerMonitoringScriptPath: Path = GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringScriptFilename) - override def scriptPreamble: String = { + override def scriptPreamble: ErrorOr[String] = { if (monitoringOutput.isDefined) { s"""|touch $DockerMonitoringLogPath |chmod u+x $DockerMonitoringScriptPath - |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin - } else "" + |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin.valid + } else "".valid } private[actors] def generateInputs(jobDescriptor: BackendJobDescriptor): Set[GcpBatchInput] = { diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala index 745e11bee35..942838f8125 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala @@ -380,12 +380,12 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta private lazy val isDockerImageCacheUsageRequested = runtimeAttributes.useDockerImageCache.getOrElse(useDockerImageCache(jobDescriptor.workflowDescriptor)) - override def scriptPreamble: String = { + override def scriptPreamble: ErrorOr[String] = { if (monitoringOutput.isDefined) { s"""|touch $DockerMonitoringLogPath |chmod u+x $DockerMonitoringScriptPath |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin - } else "" + }.valid else "".valid } override def globParentDirectory(womGlobFile: WomGlobFile): Path = { diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala index ad8daca6ec7..100ed6137e9 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala @@ -1,10 +1,5 @@ package cromwell.backend.impl.tes -import common.exception.AggregatedMessageException - -import java.io.FileNotFoundException -import java.nio.file.FileAlreadyExistsException -import cats.syntax.apply._ import akka.http.scaladsl.Http import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.marshalling.Marshal @@ -13,23 +8,31 @@ import akka.http.scaladsl.model._ import akka.http.scaladsl.unmarshalling.{Unmarshal, Unmarshaller} import akka.stream.ActorMaterializer import akka.util.ByteString +import cats.implicits._ +import common.collections.EnhancedCollections._ +import common.exception.AggregatedMessageException import common.validation.ErrorOr.ErrorOr import common.validation.Validation._ import cromwell.backend.BackendJobLifecycleActor import cromwell.backend.async.{AbortedExecutionHandle, ExecutionHandle, FailedNonRetryableExecutionHandle, PendingExecutionHandle} +import cromwell.backend.impl.tes.TesAsyncBackendJobExecutionActor.{determineWSMSasEndpointFromInputs, generateLocalizedSasScriptPreamble} import cromwell.backend.impl.tes.TesResponseJsonFormatter._ import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.core.logging.JobLogger import cromwell.core.path.{DefaultPathBuilder, Path} -import cromwell.core.retry.SimpleExponentialBackoff import cromwell.core.retry.Retry._ -import cromwell.filesystems.blob.BlobPath +import cromwell.core.retry.SimpleExponentialBackoff +import cromwell.filesystems.blob.{BlobPath, WSMBlobSasTokenGenerator} import cromwell.filesystems.drs.{DrsPath, DrsResolver} -import wom.values.WomFile import net.ceedubs.ficus.Ficus._ +import wom.values.WomFile +import java.io.FileNotFoundException +import java.nio.file.FileAlreadyExistsException +import java.time.Duration +import java.time.temporal.ChronoUnit import scala.concurrent.Future -import scala.util.{Failure, Success} - +import scala.util.{Failure, Success, Try} sealed trait TesRunStatus { def isTerminal: Boolean def sysLogs: Seq[String] = Seq.empty[String] @@ -59,6 +62,110 @@ case object Cancelled extends TesRunStatus { object TesAsyncBackendJobExecutionActor { val JobIdKey = "tes_job_id" + + def generateLocalizedSasScriptPreamble(environmentVariableName: String, getSasWsmEndpoint: String) : String = { + // BEARER_TOKEN: https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/how-to-use-vm-token#get-a-token-using-http + // NB: Scala string interpolation and bash variable substitution use similar syntax. $$ is an escaped $, much like \\ is an escaped \. + s""" + |### BEGIN ACQUIRE LOCAL SAS TOKEN ### + |# Function to check if a command exists on this machine + |command_exists() { + | command -v "$$1" > /dev/null 2>&1 + |} + | + |# Check if curl exists; install if not + |if ! command_exists curl; then + | if command_exists apt-get; then + | apt-get -y update && apt-get -y install curl + | if [ $$? -ne 0 ]; then + | echo "Error: Failed to install curl via apt-get." + | exit 1 + | fi + | else + | echo "Error: apt-get is not available, and curl is not installed." + | exit 1 + | fi + |fi + | + |# Check if jq exists; install if not + |if ! command_exists jq; then + | if command_exists apt-get; then + | apt-get -y update && apt-get -y install jq + | if [ $$? -ne 0 ]; then + | echo "Error: Failed to install jq via apt-get." + | exit 1 + | fi + | else + | echo "Error: apt-get is not available, and jq is not installed." + | exit 1 + | fi + |fi + | + |# Acquire bearer token, relying on the User Assigned Managed Identity of this VM. + |echo Acquiring Bearer Token using User Assigned Managed Identity... + |BEARER_TOKEN=$$(curl 'http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https%3A%2F%2Fmanagement.azure.com%2F' -H Metadata:true -s | jq .access_token) + | + |# Remove the leading and trailing quotes + |BEARER_TOKEN="$${BEARER_TOKEN#\\"}" + |BEARER_TOKEN="$${BEARER_TOKEN%\\"}" + | + |# Use the precomputed endpoint from cromwell + WSM to acquire a sas token + |echo Requesting sas token from WSM... + |sas_response_json=$$(curl -s \\ + | --retry 3 \\ + | --retry-delay 2 \\ + | -X POST "$getSasWsmEndpoint" \\ + | -H "Content-Type: application/json" \\ + | -H "accept: */*" \\ + | -H "Authorization: Bearer $${BEARER_TOKEN}") + | + |# Store token as environment variable + |export $environmentVariableName=$$(echo "$${sas_response_json}" | jq -r '.token') + | + |# Echo the first characters for logging/debugging purposes. "null" indicates something went wrong. + |echo Saving sas token: $${$environmentVariableName:0:4}**** to environment variable $environmentVariableName... + |### END ACQUIRE LOCAL SAS TOKEN ### + |""".stripMargin + } + + private def maybeConvertToBlob(pathToTest: Try[Path]): Try[BlobPath] = { + pathToTest.collect { case blob: BlobPath => blob } + } + + /** + * Computes an endpoint that can be used to retrieve a sas token for a particular blob storage container. + * This assumes that some of the task inputs are blob files, all blob files are in the same container, and we can get a sas + * token for this container from WSM. + * The task VM will use the user assigned managed identity that it is running as in order to authenticate. + * @param taskInputs The inputs to this particular TesTask. If any are blob files, the first will be used to + * determine the storage container to retrieve the sas token for. + * @param pathGetter A function to convert string filepath into a cromwell Path object. + * @param blobConverter A function to convert a Path into a Blob path, if possible. Provided for testing purposes. + * @return A URL endpoint that, when called with proper authentication, will return a sas token. + * Returns 'None' if one should not be used for this task. + */ + def determineWSMSasEndpointFromInputs(taskInputs: List[Input], + pathGetter: String => Try[Path], + logger: JobLogger, + blobConverter: Try[Path] => Try[BlobPath] = maybeConvertToBlob): Try[String] = { + // Collect all of the inputs that are valid blob paths + val blobFiles = taskInputs + .collect{ case Input(_, _, Some(url), _, _, _) => blobConverter(pathGetter(url)) } + .collect{ case Success(blob) => blob } + + // Log if not all input files live in the same container. + if (blobFiles.map(_.container).distinct.size > 1) { + logger.info(s"While parsing blob inputs, found more than one container. Generating SAS token based on first file in the list.") + } + + // We use the first blob file in the list to determine the correct blob container. + blobFiles.headOption.map{blobPath => + blobPath.getFilesystemManager.blobTokenGenerator match { + case wsmGenerator: WSMBlobSasTokenGenerator => wsmGenerator.getWSMSasFetchEndpoint(blobPath, Some(Duration.of(24, ChronoUnit.HOURS))) + case _ => Failure(new UnsupportedOperationException("Blob file does not have an associated WSMBlobSasTokenGenerator")) + } + }.getOrElse(Failure(new NoSuchElementException("Could not infer blob storage container from task inputs: No valid blob files provided."))) + } } class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) @@ -71,7 +178,6 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn override type StandardAsyncRunState = TesRunStatus def statusEquivalentTo(thiz: StandardAsyncRunState)(that: StandardAsyncRunState): Boolean = thiz == that - override lazy val pollBackOff: SimpleExponentialBackoff = tesConfiguration.pollBackoff override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = tesConfiguration.executeOrRecoverBackoff @@ -90,6 +196,38 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn ) } + /** + * This script preamble is bash code that is executed at the start of a task inside the user's container. + * It is executed directly before the user's instantiated command is, which gives cromwell a chance to adjust the + * container environment before the actual task runs. See commandScriptContents in StandardAsyncExecutionActor for more context. + * + * For TES tasks, we sometimes want to acquire and save an azure sas token to an environment variable. + * If the user provides a value for runtimeAttributes.localizedSasEnvVar, we will add the relevant bash code to the preamble + * that acquires/exports the sas token to an environment variable. Once there, it will be visible to the user's task code. + * + * If runtimeAttributes.localizedSasEnvVar is provided in the WDL (and determineWSMSasEndpointFromInputs is successful), + * we will export the sas token to an environment variable named to be the value of runtimeAttributes.localizedSasEnvVar. + * Otherwise, we won't alter the preamble. + * + * See determineWSMSasEndpointFromInputs to see how we use taskInputs to infer *which* container to get a sas token for. + * + * @return Bash code to run at the start of a task. + */ + override def scriptPreamble: ErrorOr[String] = { + runtimeAttributes.localizedSasEnvVar match { + case Some(environmentVariableName) => { // Case: user wants a sas token. Return the computed preamble or die trying. + val workflowName = workflowDescriptor.callable.name + val callInputFiles = jobDescriptor.fullyQualifiedInputs.safeMapValues { + _.collectAsSeq { case w: WomFile => w } + } + val taskInputs: List[Input] = TesTask.buildTaskInputs(callInputFiles, workflowName, mapCommandLineWomFile) + val computedEndpoint = determineWSMSasEndpointFromInputs(taskInputs, getPath, jobLogger) + computedEndpoint.map(endpoint => generateLocalizedSasScriptPreamble(environmentVariableName, endpoint)) + }.toErrorOr + case _ => "".valid // Case: user doesn't want a sas token. Empty preamble is the correct preamble. + } + } + override def mapCommandLineWomFile(womFile: WomFile): WomFile = { womFile.mapFile(value => (getPath(value), asAdHocFile(womFile)) match { @@ -173,7 +311,6 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn } override def executeAsync(): Future[ExecutionHandle] = { - // create call exec dir tesJobPaths.callExecutionRoot.createPermissionedDirectories() val taskMessageFuture = createTaskMessage().fold( diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala index c5b3c4df66d..48ade7b234a 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala @@ -1,5 +1,6 @@ package cromwell.backend.impl.tes +import cats.data.Validated import cats.syntax.validated._ import com.typesafe.config.Config import common.validation.ErrorOr.ErrorOr @@ -15,6 +16,8 @@ import wom.format.MemorySize import wom.types.{WomIntegerType, WomStringType} import wom.values._ +import java.util.regex.Pattern + case class TesRuntimeAttributes(continueOnReturnCode: ContinueOnReturnCode, dockerImage: String, dockerWorkingDir: Option[String], @@ -23,13 +26,14 @@ case class TesRuntimeAttributes(continueOnReturnCode: ContinueOnReturnCode, memory: Option[MemorySize], disk: Option[MemorySize], preemptible: Boolean, + localizedSasEnvVar: Option[String], backendParameters: Map[String, Option[String]]) object TesRuntimeAttributes { - val DockerWorkingDirKey = "dockerWorkingDir" val DiskSizeKey = "disk" val PreemptibleKey = "preemptible" + val LocalizedSasKey = "azureSasEnvironmentVariable" private def cpuValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int Refined Positive] = CpuValidation.optional @@ -47,8 +51,8 @@ object TesRuntimeAttributes { private val dockerValidation: RuntimeAttributesValidation[String] = DockerValidation.instance private val dockerWorkingDirValidation: OptionalRuntimeAttributesValidation[String] = DockerWorkingDirValidation.optional - private def preemptibleValidation(runtimeConfig: Option[Config]) = PreemptibleValidation.default(runtimeConfig) + private def localizedSasValidation: OptionalRuntimeAttributesValidation[String] = LocalizedSasValidation.optional def runtimeAttributesBuilder(backendRuntimeConfig: Option[Config]): StandardValidatedRuntimeAttributesBuilder = // !! NOTE !! If new validated attributes are added to TesRuntimeAttributes, be sure to include @@ -62,6 +66,7 @@ object TesRuntimeAttributes { dockerValidation, dockerWorkingDirValidation, preemptibleValidation(backendRuntimeConfig), + localizedSasValidation ) def makeBackendParameters(runtimeAttributes: Map[String, WomValue], @@ -124,8 +129,10 @@ object TesRuntimeAttributes { RuntimeAttributesValidation.extract(failOnStderrValidation(backendRuntimeConfig), validatedRuntimeAttributes) val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(backendRuntimeConfig), validatedRuntimeAttributes) - val preemptible: Boolean = + val preemptible: Boolean = { RuntimeAttributesValidation.extract(preemptibleValidation(backendRuntimeConfig), validatedRuntimeAttributes) + } + val localizedSas: Option[String] = RuntimeAttributesValidation.extractOption(localizedSasValidation.key, validatedRuntimeAttributes) // !! NOTE !! If new validated attributes are added to TesRuntimeAttributes, be sure to include // their validations here so that they will be handled correctly with backendParameters. @@ -139,7 +146,8 @@ object TesRuntimeAttributes { diskSizeCompatValidation(backendRuntimeConfig), failOnStderrValidation(backendRuntimeConfig), continueOnReturnCodeValidation(backendRuntimeConfig), - preemptibleValidation(backendRuntimeConfig) + preemptibleValidation(backendRuntimeConfig), + localizedSasValidation ) // BT-458 any strings included in runtime attributes that aren't otherwise used should be @@ -156,6 +164,7 @@ object TesRuntimeAttributes { memory, disk, preemptible, + localizedSas, backendParameters ) } @@ -218,3 +227,25 @@ class PreemptibleValidation extends BooleanRuntimeAttributesValidation(TesRuntim override protected def missingValueMessage: String = s"Expecting $key runtime attribute to be an Integer, Boolean, or a String with values of 'true' or 'false'" } + +object LocalizedSasValidation { + lazy val instance: RuntimeAttributesValidation[String] = new LocalizedSasValidation + lazy val optional: OptionalRuntimeAttributesValidation[String] = instance.optional +} + +class LocalizedSasValidation extends StringRuntimeAttributesValidation(TesRuntimeAttributes.LocalizedSasKey) { + private def isValidBashVariableName(str: String): Boolean = { + // require string be only letters, numbers, and underscores + val pattern = Pattern.compile("^[a-zA-Z0-9_]+$", Pattern.CASE_INSENSITIVE) + val matcher = pattern.matcher(str) + matcher.find + } + + override protected def invalidValueMessage(value: WomValue): String = { + s"Invalid Runtime Attribute value for ${TesRuntimeAttributes.LocalizedSasKey}. Value must be a string containing only letters, numbers, and underscores." + } + + override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { + case WomString(value) => if(isValidBashVariableName(value)) value.validNel else Validated.invalidNel(invalidValueMessage(WomString(value))) + } +} diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala index 4a6e77641e3..d775367ac74 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala @@ -16,6 +16,8 @@ import wom.callable.Callable.OutputDefinition import wom.expression.NoIoFunctionSet import wom.values._ +import scala.collection.immutable.Map + final case class WorkflowExecutionIdentityConfig(value: String) {override def toString: String = value.toString} final case class WorkflowExecutionIdentityOption(value: String) {override def toString: String = value} final case class TesTask(jobDescriptor: BackendJobDescriptor, @@ -79,24 +81,7 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, } lazy val inputs: Seq[Input] = { - val result = (callInputFiles ++ writeFunctionFiles).flatMap { - case (fullyQualifiedName, files) => files.flatMap(_.flattenFiles).zipWithIndex.map { - case (f, index) => - val inputType = f match { - case _: WomUnlistedDirectory => "DIRECTORY" - case _: WomSingleFile => "FILE" - case _: WomGlobFile => "FILE" - } - Input( - name = Option(fullyQualifiedName + "." + index), - description = Option(workflowName + "." + fullyQualifiedName + "." + index), - url = Option(f.value), - path = mapCommandLineWomFile(f).value, - `type` = Option(inputType), - content = None - ) - } - }.toList ++ Seq(commandScript) + val result = TesTask.buildTaskInputs(callInputFiles ++ writeFunctionFiles, workflowName, mapCommandLineWomFile) ++ Seq(commandScript) jobLogger.info(s"Calculated TES inputs (found ${result.size}): " + result.mkString(System.lineSeparator(),System.lineSeparator(),System.lineSeparator())) result } @@ -288,6 +273,27 @@ object TesTask { ) } + def buildTaskInputs(taskFiles: Map[FullyQualifiedName, Seq[WomFile]], workflowName: String, womMapFn: WomFile => WomFile): List[Input] = { + taskFiles.flatMap { + case (fullyQualifiedName, files) => files.flatMap(_.flattenFiles).zipWithIndex.map { + case (f, index) => + val inputType = f match { + case _: WomUnlistedDirectory => "DIRECTORY" + case _: WomSingleFile => "FILE" + case _: WomGlobFile => "FILE" + } + Input( + name = Option(fullyQualifiedName + "." + index), + description = Option(workflowName + "." + fullyQualifiedName + "." + index), + url = Option(f.value), + path = womMapFn(f).value, + `type` = Option(inputType), + content = None + ) + } + }.toList + } + def makeTags(workflowDescriptor: BackendWorkflowDescriptor): Map[String, Option[String]] = { // In addition to passing through any workflow labels, include relevant workflow ids as tags. val baseTags = workflowDescriptor.customLabels.asMap.map { case (k, v) => (k, Option(v)) } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala new file mode 100644 index 00000000000..a28fce3d445 --- /dev/null +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala @@ -0,0 +1,155 @@ +package cromwell.backend.impl.tes + +import common.mock.MockSugar +import cromwell.core.logging.JobLogger +import cromwell.core.path.NioPath +import cromwell.filesystems.blob.{BlobFileSystemManager, BlobPath, WSMBlobSasTokenGenerator} +import org.mockito.ArgumentMatchers.any +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import java.time.Duration +import java.time.temporal.ChronoUnit +import scala.util.{Failure, Try} + +class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers with MockSugar { + behavior of "TesAsyncBackendJobExecutionActor" + + val fullyQualifiedName = "this.name.is.more.than.qualified" + val workflowName = "mockWorkflow" + val someBlobUrl = "https://lz813a3d637adefec2c6e88f.blob.core.windows.net/sc-d8143fd8-aa07-446d-9ba0-af72203f1794/nyxp6c/tes-internal/configuration/supported-vm-sizes" + val someNotBlobUrl = "https://www.google.com/path/to/exile" + var index = 0 + + val blobInput_0 = Input( + name = Option(fullyQualifiedName + "." + index), + description = Option(workflowName + "." + fullyQualifiedName + "." + index), + url = Option(someBlobUrl), + path = someBlobUrl, + `type` = Option("FILE"), + content = None + ) + index = index+1 + + val blobInput_1 = Input( + name = Option(fullyQualifiedName + "." + index), + description = Option(workflowName + "." + fullyQualifiedName + "." + index), + url = Option(someBlobUrl), + path = someBlobUrl, + `type` = Option("FILE"), + content = None + ) + index = index+1 + + val notBlobInput_1 = Input( + name = Option(fullyQualifiedName + "." + index), + description = Option(workflowName + "." + fullyQualifiedName + "." + index), + url = Option(someNotBlobUrl + index), + path = someNotBlobUrl + index, + `type` = Option("FILE"), + content = None + ) + index = index+1 + + val notBlobInput_2 = Input( + name = Option(fullyQualifiedName + "." + index), + description = Option(workflowName + "." + fullyQualifiedName + "." + index), + url = Option(someNotBlobUrl + index), + path = someNotBlobUrl + index, + `type` = Option("FILE"), + content = None + ) + + // Mock blob path functionality. + val testWsmEndpoint = "https://wsm.mock.com/endpoint" + val testWorkspaceId = "e58ed763-928c-4155-0000-fdbaaadc15f3" + val testContainerResourceId = "e58ed763-928c-4155-1111-fdbaaadc15f3" + + def generateMockWsmTokenGenerator: WSMBlobSasTokenGenerator = { + val mockTokenGenerator = mock[WSMBlobSasTokenGenerator] + val expectedTokenDuration: Duration = Duration.of(24, ChronoUnit.HOURS) + mockTokenGenerator.getWSMSasFetchEndpoint(any[BlobPath], any[Option[Duration]]) returns Try(s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken?sasExpirationDuration=${expectedTokenDuration.getSeconds.toInt}") + mockTokenGenerator + } + def generateMockFsm: BlobFileSystemManager = { + val mockFsm: BlobFileSystemManager = mock[BlobFileSystemManager] + val mockGenerator: WSMBlobSasTokenGenerator = generateMockWsmTokenGenerator + mockFsm.blobTokenGenerator returns mockGenerator + mockFsm + } + //path to a blob file + def generateMockBlobPath: BlobPath = { + val mockBlobPath = mock[BlobPath] + mockBlobPath.pathAsString returns someBlobUrl + + val mockFsm = generateMockFsm + mockBlobPath.getFilesystemManager returns mockFsm + + val mockNioPath: NioPath = mock[NioPath] + mockBlobPath.nioPath returns mockNioPath + mockBlobPath + } + + //Path to a file that isn't a blob file + def generateMockDefaultPath: cromwell.core.path.Path = { + val mockDefaultPath: cromwell.core.path.Path = mock[cromwell.core.path.Path] + mockDefaultPath.pathAsString returns someNotBlobUrl + mockDefaultPath + } + def pathGetter(pathString: String): Try[cromwell.core.path.Path] = { + val mockBlob: BlobPath = generateMockBlobPath + val mockDefault: cromwell.core.path.Path = generateMockDefaultPath + if(pathString.contains(someBlobUrl)) Try(mockBlob) else Try(mockDefault) + } + + def blobConverter(pathToConvert: Try[cromwell.core.path.Path]): Try[BlobPath] = { + val mockBlob: BlobPath = generateMockBlobPath + if(pathToConvert.get.pathAsString.contains(someBlobUrl)) Try(mockBlob) else Failure(new Exception("failed")) + } + + it should "not return sas endpoint when no blob paths are provided" in { + val mockLogger: JobLogger = mock[JobLogger] + val emptyInputs: List[Input] = List() + val bloblessInputs: List[Input] = List(notBlobInput_1, notBlobInput_2) + TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(emptyInputs, pathGetter, mockLogger, blobConverter).isFailure shouldBe true + TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(bloblessInputs, pathGetter, mockLogger, blobConverter).isFailure shouldBe true + } + + it should "return a sas endpoint based on inputs when blob paths are provided" in { + val mockLogger: JobLogger = mock[JobLogger] + val expectedTokenLifetimeSeconds = 24 * 60 * 60 //assert that cromwell asks for 24h token duration. + val expected = s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken?sasExpirationDuration=${expectedTokenLifetimeSeconds}" + val blobInput: List[Input] = List(blobInput_0) + val blobInputs: List[Input] = List(blobInput_0, blobInput_1) + val mixedInputs: List[Input] = List(notBlobInput_1, blobInput_0, blobInput_1) + TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(blobInput, pathGetter, mockLogger, blobConverter).get shouldEqual expected + TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(blobInputs, pathGetter, mockLogger, blobConverter).get shouldEqual expected + TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(mixedInputs, pathGetter, mockLogger, blobConverter).get shouldEqual expected + } + + it should "contain expected strings in the bash script" in { + val mockEnvironmentVariableNameFromWom = "mock_env_var_for_storing_sas_token" + val expectedEndpoint = s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken" + + val beginSubstring = "### BEGIN ACQUIRE LOCAL SAS TOKEN ###" + val endSubstring = "### END ACQUIRE LOCAL SAS TOKEN ###" + val curlCommandSubstring = + s""" + |sas_response_json=$$(curl -s \\ + | --retry 3 \\ + | --retry-delay 2 \\ + | -X POST "$expectedEndpoint" \\ + | -H "Content-Type: application/json" \\ + | -H "accept: */*" \\ + | -H "Authorization: Bearer $${BEARER_TOKEN}") + |""".stripMargin + val exportCommandSubstring = s"""export $mockEnvironmentVariableNameFromWom=$$(echo "$${sas_response_json}" | jq -r '.token')""" + + val generatedBashScript = TesAsyncBackendJobExecutionActor.generateLocalizedSasScriptPreamble(mockEnvironmentVariableNameFromWom, expectedEndpoint) + + generatedBashScript should include (beginSubstring) + generatedBashScript should include (endSubstring) + generatedBashScript should include (curlCommandSubstring) + generatedBashScript should include (exportCommandSubstring) + } +} diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala index 731dd3c6c70..a081f26c910 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala @@ -63,6 +63,7 @@ class TesInitializationActorSpec extends TestKitSuite | # The keys below have been commented out as they are optional runtime attributes. | # dockerWorkingDir | # docker + | # azureSasEnvironmentVariable |} |""".stripMargin @@ -107,6 +108,7 @@ class TesInitializationActorSpec extends TestKitSuite } def nonStringErrorMessage(key: String) = s"Workflow option $key must be a string" + val bothRequiredErrorMessage = s"Workflow options ${TesWorkflowOptionKeys.WorkflowExecutionIdentity} and ${TesWorkflowOptionKeys.DataAccessIdentity} are both required if one is provided" "fail when WorkflowExecutionIdentity is not a string and DataAccessIdentity is missing" in { @@ -120,7 +122,7 @@ class TesInitializationActorSpec extends TestKitSuite case InitializationFailed(failure) => val expectedMsg = nonStringErrorMessage(TesWorkflowOptionKeys.WorkflowExecutionIdentity) if (!(failure.getMessage.contains(expectedMsg) && - failure.getMessage.contains(bothRequiredErrorMessage))) { + failure.getMessage.contains(bothRequiredErrorMessage))) { fail(s"Exception message did not contain both '$expectedMsg' and '$bothRequiredErrorMessage'. Was '$failure'") } } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala index e1984fb65dc..830e0cbe70c 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala @@ -25,6 +25,7 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec None, None, false, + None, Map.empty ) @@ -71,6 +72,17 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } + "validate a valid azureSasEnvironmentVariable entry" in { + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), TesRuntimeAttributes.LocalizedSasKey -> WomString("THIS_IS_VALID")) + val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(localizedSasEnvVar = Some("THIS_IS_VALID")) + assertSuccess(runtimeAttributes, expectedRuntimeAttributes) + } + + "fail to validate an invalid azureSasEnvironmentVariable entry" in { + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), TesRuntimeAttributes.LocalizedSasKey -> WomString("THIS IS INVALID")) + assertFailure(runtimeAttributes, "Value must be a string containing only letters, numbers, and underscores.") + } + "convert a positive integer preemptible entry to true boolean" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "preemptible" -> WomInteger(3)) val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(preemptible = true) diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala index b7887b29944..a5fd3a3a7e2 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala @@ -31,6 +31,7 @@ class TesTaskSpec None, None, false, + None, Map.empty ) val internalPathPrefix = Option("mock/path/to/tes/task") From 9b7ad989c886c7f23a40bc4dfcdcf3f489d11bb2 Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Tue, 14 Nov 2023 09:32:19 -0500 Subject: [PATCH 15/87] [WX-1183] Ice Lake (#7252) --- .../standardTestCases/papi_cpu_platform.test | 3 +- .../papi_cpu_platform/papi_cpu_platform.wdl | 9 ++--- docs/RuntimeAttributes.md | 1 + .../pipelines/common/MachineConstraints.scala | 9 +++-- .../PipelinesApiRuntimeAttributes.scala | 1 + .../common/MachineConstraintsSpec.scala | 36 ++++++++++++------- 6 files changed, 40 insertions(+), 19 deletions(-) diff --git a/centaur/src/main/resources/standardTestCases/papi_cpu_platform.test b/centaur/src/main/resources/standardTestCases/papi_cpu_platform.test index 5aca2634edb..7b38c3a25d7 100644 --- a/centaur/src/main/resources/standardTestCases/papi_cpu_platform.test +++ b/centaur/src/main/resources/standardTestCases/papi_cpu_platform.test @@ -8,9 +8,10 @@ files { metadata { status: Succeeded - "outputs.cpus.cascadeLake.cpuPlatform": "Intel Cascade Lake" "outputs.cpus.broadwell.cpuPlatform": "Intel Broadwell" "outputs.cpus.haswell.cpuPlatform": "Intel Haswell" + "outputs.cpus.cascadeLake.cpuPlatform": "Intel Cascade Lake" + "outputs.cpus.iceLake.cpuPlatform": "Intel Ice Lake" "outputs.cpus.rome.cpuPlatform": "AMD Rome" } diff --git a/centaur/src/main/resources/standardTestCases/papi_cpu_platform/papi_cpu_platform.wdl b/centaur/src/main/resources/standardTestCases/papi_cpu_platform/papi_cpu_platform.wdl index 39d25e6fe47..3ec1bd77cae 100644 --- a/centaur/src/main/resources/standardTestCases/papi_cpu_platform/papi_cpu_platform.wdl +++ b/centaur/src/main/resources/standardTestCases/papi_cpu_platform/papi_cpu_platform.wdl @@ -24,8 +24,9 @@ task cpu_platform { } workflow cpus { - call cpu_platform as haswell { input: cpu_platform = "Intel Haswell" } - call cpu_platform as broadwell { input: cpu_platform = "Intel Broadwell" } - call cpu_platform as cascadeLake { input: cpu_platform = "Intel Cascade Lake" } - call cpu_platform as rome {input: cpu_platform = "AMD Rome" } + call cpu_platform as haswell { input: cpu_platform = "Intel Haswell" } + call cpu_platform as broadwell { input: cpu_platform = "Intel Broadwell" } + call cpu_platform as cascadeLake { input: cpu_platform = "Intel Cascade Lake" } + call cpu_platform as iceLake { input: cpu_platform = "Intel Ice Lake" } + call cpu_platform as rome { input: cpu_platform = "AMD Rome" } } diff --git a/docs/RuntimeAttributes.md b/docs/RuntimeAttributes.md index 3924c1de3e4..60216946a37 100644 --- a/docs/RuntimeAttributes.md +++ b/docs/RuntimeAttributes.md @@ -429,6 +429,7 @@ runtime { Note that when this options is specified, make sure the requested CPU platform is [available](https://cloud.google.com/compute/docs/regions-zones/#available) in the `zones` you selected. The following CPU platforms are currently supported by the Google Cloud backend: +- `Intel Ice Lake` - `Intel Cascade Lake` - `Intel Skylake` - `Intel Broadwell` diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala index fa1835dbad7..7e707f959e1 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala @@ -16,10 +16,15 @@ object MachineConstraints { if (googleLegacyMachineSelection) { s"predefined-$cpu-${memory.to(MemoryUnit.MB).amount.intValue()}" } else { - // If someone requests Intel Cascade Lake as their CPU platform then switch the machine type to n2. - // Similarly, CPU platform of AMD Rome corresponds to the machine type n2d. + // Users specify a CPU platform in their WDL, but GCP also needs to know which machine type to use. + // The below logic infers the machine type from the requested CPU. + // The heuristic we're using is: find the newest 'General Purpose' type that supports the given CPU. + // https://cloud.google.com/compute/docs/machine-resource + // For example, if someone requests Intel Cascade Lake as their CPU platform, then infer the n2 machine type. + // Infer n2d from AMD Rome, etc. val customMachineType = cpuPlatformOption match { + case Some(PipelinesApiRuntimeAttributes.CpuPlatformIntelIceLakeValue) => N2CustomMachineType case Some(PipelinesApiRuntimeAttributes.CpuPlatformIntelCascadeLakeValue) => N2CustomMachineType case Some(PipelinesApiRuntimeAttributes.CpuPlatformAMDRomeValue) => N2DCustomMachineType case _ => N1CustomMachineType diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala index f1a8767ee5b..db87797de37 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala @@ -78,6 +78,7 @@ object PipelinesApiRuntimeAttributes { // via `gcloud compute zones describe us-central1-a` val CpuPlatformIntelCascadeLakeValue = "Intel Cascade Lake" val CpuPlatformAMDRomeValue = "AMD Rome" + val CpuPlatformIntelIceLakeValue = "Intel Ice Lake" val UseDockerImageCacheKey = "useDockerImageCache" private val useDockerImageCacheValidationInstance = new BooleanRuntimeAttributesValidation(UseDockerImageCacheKey).optional diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala index 6eeb1102ec4..48ace9666e0 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala @@ -14,9 +14,11 @@ import wom.format.MemorySize class MachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "MachineConstraints" - private val n2Option = Option(PipelinesApiRuntimeAttributes.CpuPlatformIntelCascadeLakeValue) + private val n2OptionCascade = Option(PipelinesApiRuntimeAttributes.CpuPlatformIntelCascadeLakeValue) - private val n2dOption = Option(PipelinesApiRuntimeAttributes.CpuPlatformAMDRomeValue) + private val n2dOption = Option(PipelinesApiRuntimeAttributes.CpuPlatformAMDRomeValue) + + private val n2OptionIceLake = Option(PipelinesApiRuntimeAttributes.CpuPlatformIntelIceLakeValue) it should "generate valid machine types" in { val validTypes = Table( @@ -40,7 +42,6 @@ class MachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M // Same tests as above but with legacy machine type selection (cpu and memory as specified. No 'custom machine // requirement' adjustments are expected this time, except float->int) - (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), None, true, "predefined-1-1024"), (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), None, true, "predefined-3-4096"), (MemorySize(1, MemoryUnit.GB), refineMV[Positive](1), None, true, "predefined-1-1024"), @@ -52,15 +53,26 @@ class MachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), None, true, "predefined-33-2048"), // Same tests but with cascade lake (n2) - (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), n2Option, false, "n2-custom-2-2048"), - (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2Option, false, "n2-custom-4-4096"), - (MemorySize(1, MemoryUnit.GB), refineMV[Positive](1), n2Option, false, "n2-custom-2-2048"), - (MemorySize(1, MemoryUnit.GB), refineMV[Positive](4), n2Option, false, "n2-custom-4-4096"), - (MemorySize(14, MemoryUnit.GB), refineMV[Positive](16), n2Option, false, "n2-custom-16-16384"), - (MemorySize(13.65, MemoryUnit.GB), refineMV[Positive](1), n2Option, false, "n2-custom-2-14080"), - (MemorySize(1520.96, MemoryUnit.MB), refineMV[Positive](1), n2Option, false, "n2-custom-2-2048"), - (MemorySize(1024.0, MemoryUnit.MB), refineMV[Positive](1), n2Option, false, "n2-custom-2-2048"), - (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), n2Option, false, "n2-custom-36-36864"), + (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), n2OptionCascade, false, "n2-custom-2-2048"), + (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2OptionCascade, false, "n2-custom-4-4096"), + (MemorySize(1, MemoryUnit.GB), refineMV[Positive](1), n2OptionCascade, false, "n2-custom-2-2048"), + (MemorySize(1, MemoryUnit.GB), refineMV[Positive](4), n2OptionCascade, false, "n2-custom-4-4096"), + (MemorySize(14, MemoryUnit.GB), refineMV[Positive](16), n2OptionCascade, false, "n2-custom-16-16384"), + (MemorySize(13.65, MemoryUnit.GB), refineMV[Positive](1), n2OptionCascade, false, "n2-custom-2-14080"), + (MemorySize(1520.96, MemoryUnit.MB), refineMV[Positive](1), n2OptionCascade, false, "n2-custom-2-2048"), + (MemorySize(1024.0, MemoryUnit.MB), refineMV[Positive](1), n2OptionCascade, false, "n2-custom-2-2048"), + (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), n2OptionCascade, false, "n2-custom-36-36864"), + + // Same tests, but with ice lake. Should produce same results as cascade lake since they're both n2. + (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), n2OptionIceLake, false, "n2-custom-2-2048"), + (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2OptionIceLake, false, "n2-custom-4-4096"), + (MemorySize(1, MemoryUnit.GB), refineMV[Positive](1), n2OptionIceLake, false, "n2-custom-2-2048"), + (MemorySize(1, MemoryUnit.GB), refineMV[Positive](4), n2OptionIceLake, false, "n2-custom-4-4096"), + (MemorySize(14, MemoryUnit.GB), refineMV[Positive](16), n2OptionIceLake, false, "n2-custom-16-16384"), + (MemorySize(13.65, MemoryUnit.GB), refineMV[Positive](1), n2OptionIceLake, false, "n2-custom-2-14080"), + (MemorySize(1520.96, MemoryUnit.MB), refineMV[Positive](1), n2OptionIceLake, false, "n2-custom-2-2048"), + (MemorySize(1024.0, MemoryUnit.MB), refineMV[Positive](1), n2OptionIceLake, false, "n2-custom-2-2048"), + (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), n2OptionIceLake, false, "n2-custom-36-36864"), // Same tests but with AMD Rome (n2d) #cpu > 16 are in increments of 16 (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1024"), From 14c31840fddb6d7f7feea8c011b2a94fea6436af Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Thu, 16 Nov 2023 18:07:32 -0500 Subject: [PATCH 16/87] WX-1333 Improve logging visibility for load management (#7253) --- .../main/scala/cromwell/engine/io/IoActor.scala | 1 + .../services/ServiceRegistryActor.scala | 14 +++++++++++++- .../common/api/PipelinesApiRequestManager.scala | 17 +++++++++++++---- 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/engine/src/main/scala/cromwell/engine/io/IoActor.scala b/engine/src/main/scala/cromwell/engine/io/IoActor.scala index b4b3a0b191f..80a362a7b78 100644 --- a/engine/src/main/scala/cromwell/engine/io/IoActor.scala +++ b/engine/src/main/scala/cromwell/engine/io/IoActor.scala @@ -136,6 +136,7 @@ final class IoActor(ioConfig: IoConfig, override def onBackpressure(scale: Option[Double] = None): Unit = { incrementBackpressure() + log.warning("IoActor notifying HighLoad") serviceRegistryActor ! LoadMetric("IO", HighLoad) val uncappedDelay = scale.getOrElse(1.0d) * LoadConfig.IoNormalWindowMinimum diff --git a/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala b/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala index 0c613f307ba..2e43e651b2b 100644 --- a/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala +++ b/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala @@ -6,6 +6,7 @@ import akka.routing.Listen import cats.data.NonEmptyList import com.typesafe.config.{Config, ConfigFactory, ConfigObject} import cromwell.core.Dispatcher.ServiceDispatcher +import cromwell.services.loadcontroller.LoadControllerService.LoadMetric import cromwell.util.GracefulShutdownHelper import cromwell.util.GracefulShutdownHelper.ShutdownCommand import net.ceedubs.ficus.Ficus._ @@ -82,7 +83,9 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging def receive = { case msg: ServiceRegistryMessage => services.get(msg.serviceName) match { - case Some(ref) => ref.tell(transform(msg, sender()), sender()) + case Some(ref) => + debugLogLoadMessages(msg, sender()) + ref.tell(transform(msg, sender()), sender()) case None => log.error("Received ServiceRegistryMessage requesting service '{}' for which no service is configured. Message: {}", msg.serviceName, msg) sender() ! ServiceRegistryFailure(msg.serviceName) @@ -107,6 +110,15 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging sender() ! ServiceRegistryFailure("Message is not a ServiceRegistryMessage: " + fool) } + private def debugLogLoadMessages(msg: ServiceRegistryMessage, sender: ActorRef): Unit = { + msg match { + case msg: LoadMetric => + log.debug(s"Service Registry Actor receiving $msg message from $sender") + case _ => + () + } + } + /** * Set the supervision strategy such that any of the individual service actors fails to initialize that we'll pass * the error up the chain diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala index 02950e8bec8..81e3c2d9fda 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala @@ -2,7 +2,6 @@ package cromwell.backend.google.pipelines.common.api import java.io.IOException import java.util.UUID - import akka.actor.{Actor, ActorLogging, ActorRef, Props, SupervisorStrategy, Terminated, Timers} import akka.dispatch.ControlMessage import cats.data.NonEmptyList @@ -18,7 +17,7 @@ import cromwell.core.Dispatcher.BackendDispatcher import cromwell.core.retry.SimpleExponentialBackoff import cromwell.core.{CromwellFatalExceptionMarker, LoadConfig, Mailbox, WorkflowId} import cromwell.services.instrumentation.CromwellInstrumentationScheduler -import cromwell.services.loadcontroller.LoadControllerService.{HighLoad, LoadMetric, NormalLoad} +import cromwell.services.loadcontroller.LoadControllerService.{HighLoad, LoadLevel, LoadMetric, NormalLoad} import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric._ @@ -89,6 +88,8 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: protected[api] var statusPollers: Vector[ActorRef] = Vector.empty self ! ResetAllRequestWorkers + private var previousLoad: LoadLevel = NormalLoad + override def preStart() = { log.info("Running with {} PAPI request workers", requestWorkers.value) startInstrumentationTimer() @@ -96,8 +97,16 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: } def monitorQueueSize() = { - val load = if (workQueue.size > LoadConfig.PAPIThreshold) HighLoad else NormalLoad - serviceRegistryActor ! LoadMetric("PAPIQueryManager", load) + val newLoad = if (workQueue.size > LoadConfig.PAPIThreshold) HighLoad else NormalLoad + + if (previousLoad == NormalLoad && newLoad == HighLoad) + log.warning(s"PAPI Request Manager transitioned to HighLoad with queue size ${workQueue.size} exceeding limit of ${LoadConfig.PAPIThreshold}") + else if (previousLoad == HighLoad && newLoad == NormalLoad) + log.info("PAPI Request Manager transitioned back to NormaLoad") + + previousLoad = newLoad + + serviceRegistryActor ! LoadMetric("PAPIQueryManager", newLoad) updateQueueSize(workQueue.size) } From 84b4480dbacf05ef3051fc1aab5c2f1789a46f00 Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Fri, 1 Dec 2023 14:59:41 -0500 Subject: [PATCH 17/87] [WX-1391] Fix Bash Bug (#7326) --- .../StandardAsyncExecutionActor.scala | 28 +++++++++++++++---- ...cpBatchAsyncBackendJobExecutionActor.scala | 13 +++++---- ...inesApiAsyncBackendJobExecutionActor.scala | 12 ++++---- .../TesAsyncBackendJobExecutionActor.scala | 10 +++---- ...TesAsyncBackendJobExecutionActorSpec.scala | 3 +- 5 files changed, 44 insertions(+), 22 deletions(-) diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index 59eb7f08269..3b116ae1b99 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -61,6 +61,10 @@ case class DefaultStandardAsyncExecutionActorParams override val minimumRuntimeSettings: MinimumRuntimeSettings ) extends StandardAsyncExecutionActorParams +// Typically we want to "executeInSubshell" for encapsulation of bash code. +// Override to `false` when we need the script to set an environment variable in the parent shell. +case class ScriptPreambleData(bashString: String, executeInSubshell: Boolean = true) + /** * An extension of the generic AsyncBackendJobExecutionActor providing a standard abstract implementation of an * asynchronous polling backend. @@ -328,7 +332,7 @@ trait StandardAsyncExecutionActor } /** Any custom code that should be run within commandScriptContents before the instantiated command. */ - def scriptPreamble: ErrorOr[String] = "".valid + def scriptPreamble: ErrorOr[ScriptPreambleData] = ScriptPreambleData("").valid def cwd: Path = commandDirectory def rcPath: Path = cwd./(jobPaths.returnCodeFilename) @@ -426,7 +430,22 @@ trait StandardAsyncExecutionActor |find . -type d -exec sh -c '[ -z "$$(ls -A '"'"'{}'"'"')" ] && touch '"'"'{}'"'"'/.file' \\; |)""".stripMargin) - val errorOrPreamble: ErrorOr[String] = scriptPreamble + val errorOrPreamble: ErrorOr[String] = scriptPreamble.map{ preambleData => + preambleData.executeInSubshell match { + case true => + s""" + |( + |cd ${cwd.pathAsString} + |${preambleData.bashString} + |) + |""".stripMargin + case false => + s""" + |cd ${cwd.pathAsString} + |${preambleData.bashString} + |""".stripMargin + } + } // The `tee` trickery below is to be able to redirect to known filenames for CWL while also streaming // stdout and stderr for PAPI to periodically upload to cloud storage. @@ -440,10 +459,9 @@ trait StandardAsyncExecutionActor |export _JAVA_OPTIONS=-Djava.io.tmpdir="$$tmpDir" |export TMPDIR="$$tmpDir" |export HOME="$home" - |( - |cd ${cwd.pathAsString} + | |SCRIPT_PREAMBLE - |) + | |$out="$${tmpDir}/out.$$$$" $err="$${tmpDir}/err.$$$$" |mkfifo "$$$out" "$$$err" |trap 'rm "$$$out" "$$$err"' EXIT diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala index 766a8f2552f..b3f9d47d316 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala @@ -23,8 +23,9 @@ import cromwell.backend.google.batch.runnable.WorkflowOptionKeys import cromwell.backend.google.batch.util.{GcpBatchReferenceFilesMappingOperations, RuntimeOutputMapping} import cromwell.filesystems.gcs.GcsPathBuilder import cromwell.filesystems.gcs.GcsPathBuilder.ValidFullGcsPath + import java.io.FileNotFoundException -import cromwell.backend.standard.{StandardAdHocValue, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.backend.standard.{ScriptPreambleData, StandardAdHocValue, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} import cromwell.core._ import cromwell.core.io.IoCommandBuilder import cromwell.core.path.{DefaultPathBuilder, Path} @@ -49,6 +50,7 @@ import wom.core.FullyQualifiedName import wom.expression.{FileEvaluation, NoIoFunctionSet} import wom.format.MemorySize import wom.values._ + import java.io.OutputStreamWriter import java.nio.charset.Charset import java.util.Base64 @@ -663,12 +665,13 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar private val DockerMonitoringLogPath: Path = GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringLogFilename) private val DockerMonitoringScriptPath: Path = GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringScriptFilename) - override def scriptPreamble: ErrorOr[String] = { - if (monitoringOutput.isDefined) { + override def scriptPreamble: ErrorOr[ScriptPreambleData] = { + if (monitoringOutput.isDefined) + ScriptPreambleData( s"""|touch $DockerMonitoringLogPath |chmod u+x $DockerMonitoringScriptPath - |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin.valid - } else "".valid + |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin).valid + else ScriptPreambleData("").valid } private[actors] def generateInputs(jobDescriptor: BackendJobDescriptor): Set[GcpBatchInput] = { diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala index 942838f8125..fee63573ff1 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala @@ -1,7 +1,6 @@ package cromwell.backend.google.pipelines.common import java.net.SocketTimeoutException - import _root_.io.grpc.Status import akka.actor.ActorRef import akka.http.scaladsl.model.{ContentType, ContentTypes} @@ -27,7 +26,7 @@ import cromwell.backend.google.pipelines.common.errors.FailedToDelocalizeFailure import cromwell.backend.google.pipelines.common.io._ import cromwell.backend.google.pipelines.common.monitoring.{CheckpointingConfiguration, MonitoringImage} import cromwell.backend.io.DirectoryFunctions -import cromwell.backend.standard.{StandardAdHocValue, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.backend.standard.{ScriptPreambleData, StandardAdHocValue, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} import cromwell.core._ import cromwell.core.io.IoCommandBuilder import cromwell.core.path.{DefaultPathBuilder, Path} @@ -380,12 +379,13 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta private lazy val isDockerImageCacheUsageRequested = runtimeAttributes.useDockerImageCache.getOrElse(useDockerImageCache(jobDescriptor.workflowDescriptor)) - override def scriptPreamble: ErrorOr[String] = { - if (monitoringOutput.isDefined) { + override def scriptPreamble: ErrorOr[ScriptPreambleData] = { + if (monitoringOutput.isDefined) + ScriptPreambleData( s"""|touch $DockerMonitoringLogPath |chmod u+x $DockerMonitoringScriptPath - |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin - }.valid else "".valid + |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin).valid + else ScriptPreambleData("").valid } override def globParentDirectory(womGlobFile: WomGlobFile): Path = { diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala index 100ed6137e9..92391296533 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala @@ -17,7 +17,7 @@ import cromwell.backend.BackendJobLifecycleActor import cromwell.backend.async.{AbortedExecutionHandle, ExecutionHandle, FailedNonRetryableExecutionHandle, PendingExecutionHandle} import cromwell.backend.impl.tes.TesAsyncBackendJobExecutionActor.{determineWSMSasEndpointFromInputs, generateLocalizedSasScriptPreamble} import cromwell.backend.impl.tes.TesResponseJsonFormatter._ -import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.backend.standard.{ScriptPreambleData, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} import cromwell.core.logging.JobLogger import cromwell.core.path.{DefaultPathBuilder, Path} import cromwell.core.retry.Retry._ @@ -123,7 +123,7 @@ object TesAsyncBackendJobExecutionActor { |export $environmentVariableName=$$(echo "$${sas_response_json}" | jq -r '.token') | |# Echo the first characters for logging/debugging purposes. "null" indicates something went wrong. - |echo Saving sas token: $${$environmentVariableName:0:4}**** to environment variable $environmentVariableName... + |echo "Saving sas token: $${$environmentVariableName:0:4}**** to environment variable $environmentVariableName..." |### END ACQUIRE LOCAL SAS TOKEN ### |""".stripMargin } @@ -213,7 +213,7 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn * * @return Bash code to run at the start of a task. */ - override def scriptPreamble: ErrorOr[String] = { + override def scriptPreamble: ErrorOr[ScriptPreambleData] = { runtimeAttributes.localizedSasEnvVar match { case Some(environmentVariableName) => { // Case: user wants a sas token. Return the computed preamble or die trying. val workflowName = workflowDescriptor.callable.name @@ -222,9 +222,9 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn } val taskInputs: List[Input] = TesTask.buildTaskInputs(callInputFiles, workflowName, mapCommandLineWomFile) val computedEndpoint = determineWSMSasEndpointFromInputs(taskInputs, getPath, jobLogger) - computedEndpoint.map(endpoint => generateLocalizedSasScriptPreamble(environmentVariableName, endpoint)) + computedEndpoint.map(endpoint => ScriptPreambleData(generateLocalizedSasScriptPreamble(environmentVariableName, endpoint), executeInSubshell = false)) }.toErrorOr - case _ => "".valid // Case: user doesn't want a sas token. Empty preamble is the correct preamble. + case _ => ScriptPreambleData("", executeInSubshell = false).valid // Case: user doesn't want a sas token. Empty preamble is the correct preamble. } } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala index a28fce3d445..b9082d2e01f 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala @@ -144,12 +144,13 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit | -H "Authorization: Bearer $${BEARER_TOKEN}") |""".stripMargin val exportCommandSubstring = s"""export $mockEnvironmentVariableNameFromWom=$$(echo "$${sas_response_json}" | jq -r '.token')""" - + val echoCommandSubstring = s"""echo "Saving sas token: $${$mockEnvironmentVariableNameFromWom:0:4}**** to environment variable $mockEnvironmentVariableNameFromWom..."""" val generatedBashScript = TesAsyncBackendJobExecutionActor.generateLocalizedSasScriptPreamble(mockEnvironmentVariableNameFromWom, expectedEndpoint) generatedBashScript should include (beginSubstring) generatedBashScript should include (endSubstring) generatedBashScript should include (curlCommandSubstring) + generatedBashScript should include (echoCommandSubstring) generatedBashScript should include (exportCommandSubstring) } } From 408bc298f10e9a2ba8a9d05462658e75f61a0b6c Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Mon, 4 Dec 2023 13:42:03 -0500 Subject: [PATCH 18/87] [WX-1393] Add Content Length to Curl request (#7328) --- .../impl/tes/TesAsyncBackendJobExecutionActor.scala | 8 +++++--- .../impl/tes/TesAsyncBackendJobExecutionActorSpec.scala | 4 +++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala index 92391296533..bd171d757b4 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala @@ -100,11 +100,11 @@ object TesAsyncBackendJobExecutionActor { | exit 1 | fi |fi - | + |curl --version + |jq --version |# Acquire bearer token, relying on the User Assigned Managed Identity of this VM. |echo Acquiring Bearer Token using User Assigned Managed Identity... |BEARER_TOKEN=$$(curl 'http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https%3A%2F%2Fmanagement.azure.com%2F' -H Metadata:true -s | jq .access_token) - | |# Remove the leading and trailing quotes |BEARER_TOKEN="$${BEARER_TOKEN#\\"}" |BEARER_TOKEN="$${BEARER_TOKEN%\\"}" @@ -117,7 +117,9 @@ object TesAsyncBackendJobExecutionActor { | -X POST "$getSasWsmEndpoint" \\ | -H "Content-Type: application/json" \\ | -H "accept: */*" \\ - | -H "Authorization: Bearer $${BEARER_TOKEN}") + | -H "Authorization: Bearer $${BEARER_TOKEN}" \\ + | -H "Content-Length: 0" \\ + | -d "") | |# Store token as environment variable |export $environmentVariableName=$$(echo "$${sas_response_json}" | jq -r '.token') diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala index b9082d2e01f..11267d238cf 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala @@ -141,7 +141,9 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit | -X POST "$expectedEndpoint" \\ | -H "Content-Type: application/json" \\ | -H "accept: */*" \\ - | -H "Authorization: Bearer $${BEARER_TOKEN}") + | -H "Authorization: Bearer $${BEARER_TOKEN}" \\ + | -H "Content-Length: 0" \\ + | -d "") |""".stripMargin val exportCommandSubstring = s"""export $mockEnvironmentVariableNameFromWom=$$(echo "$${sas_response_json}" | jq -r '.token')""" val echoCommandSubstring = s"""echo "Saving sas token: $${$mockEnvironmentVariableNameFromWom:0:4}**** to environment variable $mockEnvironmentVariableNameFromWom..."""" From 9b98728520850b195f28cb02c652a8df24838f3e Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Mon, 4 Dec 2023 15:39:48 -0500 Subject: [PATCH 19/87] [WX-1346] Scalafmt (#7257) --- .scalafmt.conf | 18 + .../main/scala/cromiam/auth/Collection.scala | 23 +- .../src/main/scala/cromiam/auth/User.scala | 1 - .../cromiam/cromwell/CromwellClient.scala | 63 +- .../CromIamInstrumentation.scala | 17 +- .../main/scala/cromiam/sam/SamClient.scala | 66 +- .../scala/cromiam/server/CromIamServer.scala | 20 +- .../server/config/CromIamServerConfig.scala | 37 +- .../status/StatusCheckedSubsystem.scala | 5 +- .../cromiam/server/status/StatusService.scala | 7 +- .../webservice/CromIamApiService.scala | 127 +- .../webservice/EngineRouteSupport.scala | 8 +- .../cromiam/webservice/QuerySupport.scala | 27 +- .../cromiam/webservice/RequestSupport.scala | 23 +- .../webservice/SubmissionSupport.scala | 105 +- .../webservice/SwaggerUiHttpService.scala | 19 +- .../scala/cromiam/auth/CollectionSpec.scala | 1 - .../cromiam/cromwell/CromwellClientSpec.scala | 69 +- .../scala/cromiam/sam/SamClientSpec.scala | 92 +- .../server/status/MockStatusService.scala | 7 +- .../webservice/CromIamApiServiceSpec.scala | 195 +- .../webservice/EngineRouteSupportSpec.scala | 8 +- .../cromiam/webservice/MockClients.scala | 67 +- .../cromiam/webservice/QuerySupportSpec.scala | 14 +- .../webservice/SubmissionSupportSpec.scala | 9 +- .../webservice/SwaggerServiceSpec.scala | 54 +- .../webservice/SwaggerUiHttpServiceSpec.scala | 42 +- .../webservice/WomtoolRouteSupportSpec.scala | 8 +- .../backend/BackendCacheHitCopyingActor.scala | 8 +- .../backend/BackendInitializationData.scala | 6 +- .../backend/BackendJobExecutionActor.scala | 62 +- .../backend/BackendLifecycleActor.scala | 11 +- .../BackendLifecycleActorFactory.scala | 49 +- .../BackendWorkflowFinalizationActor.scala | 4 +- .../BackendWorkflowInitializationActor.scala | 71 +- .../main/scala/cromwell/backend/Command.scala | 13 +- .../cromwell/backend/FileSizeTooBig.scala | 1 - .../cromwell/backend/OutputEvaluator.scala | 68 +- .../backend/RuntimeAttributeDefinition.scala | 16 +- .../cromwell/backend/RuntimeEnvironment.scala | 48 +- .../cromwell/backend/SlowJobWarning.scala | 10 +- .../cromwell/backend/WriteFunctions.scala | 14 +- .../async/AsyncBackendJobExecutionActor.scala | 33 +- .../backend/async/ExecutionHandle.scala | 12 +- .../backend/async/ExecutionResult.scala | 3 +- .../async/KnownJobFailureException.scala | 52 +- .../cromwell/backend/async/package.scala | 1 - .../main/scala/cromwell/backend/backend.scala | 43 +- .../dummy/DummyAsyncExecutionActor.scala | 83 +- .../dummy/DummyInitializationActor.scala | 15 +- .../dummy/DummyLifecycleActorFactory.scala | 17 +- .../backend/dummy/DummySingletonActor.scala | 5 +- .../backend/io/DirectoryFunctions.scala | 45 +- .../io/FileEvaluationIoFunctionSet.scala | 3 +- .../cromwell/backend/io/GlobFunctions.scala | 7 +- .../scala/cromwell/backend/io/JobPaths.scala | 4 +- .../backend/io/JobPathsWithDocker.scala | 36 +- .../cromwell/backend/io/WorkflowPaths.scala | 19 +- .../backend/io/WorkflowPathsWithDocker.scala | 11 +- .../main/scala/cromwell/backend/package.scala | 1 + .../StandardAsyncExecutionActor.scala | 738 +++--- .../standard/StandardCachingActorHelper.scala | 8 +- .../StandardExpressionFunctions.scala | 10 +- .../standard/StandardFinalizationActor.scala | 19 +- .../StandardInitializationActor.scala | 34 +- .../standard/StandardInitializationData.scala | 8 +- .../StandardJobExecutionActorParams.scala | 8 +- .../StandardLifecycleActorFactory.scala | 228 +- .../standard/StandardSyncExecutionActor.scala | 50 +- ...ardValidatedRuntimeAttributesBuilder.scala | 14 +- .../standard/callcaching/BlacklistCache.scala | 35 +- .../CallCachingBlacklistManager.scala | 28 +- .../CopyingActorBlacklistCacheSupport.scala | 36 +- .../RootWorkflowFileHashCacheActor.scala | 42 +- .../StandardCacheHitCopyingActor.scala | 278 ++- .../StandardFileHashingActor.scala | 33 +- .../cromwell/backend/standard/package.scala | 2 +- .../validation/ContinueOnReturnCode.scala | 4 +- .../ContinueOnReturnCodeValidation.scala | 13 +- .../backend/validation/DockerValidation.scala | 4 +- .../validation/FailOnStderrValidation.scala | 7 +- .../validation/InformationValidation.scala | 63 +- .../validation/MaxRetriesValidation.scala | 4 +- .../backend/validation/MemoryValidation.scala | 16 +- ...PrimitiveRuntimeAttributesValidation.scala | 23 +- .../validation/RuntimeAttributesDefault.scala | 39 +- .../RuntimeAttributesValidation.scala | 114 +- .../ValidatedRuntimeAttributesBuilder.scala | 23 +- .../ValidationAggregatedException.scala | 3 +- .../wfs/DefaultWorkflowPathBuilder.scala | 1 - .../backend/wfs/WorkflowPathBuilder.scala | 12 +- .../scala/cromwell/backend/BackendSpec.scala | 153 +- ...ckendWorkflowInitializationActorSpec.scala | 105 +- .../cromwell/backend/MemorySizeSpec.scala | 2 +- .../backend/OutputEvaluatorSpec.scala | 85 +- .../RuntimeAttributeValidationSpec.scala | 76 +- .../scala/cromwell/backend/TestConfig.scala | 3 +- .../backend/io/DirectoryFunctionsSpec.scala | 10 +- .../cromwell/backend/io/JobPathsSpec.scala | 7 +- .../cromwell/backend/io/TestWorkflows.scala | 109 +- .../backend/io/WorkflowPathsSpec.scala | 18 +- ...alidatedRuntimeAttributesBuilderSpec.scala | 126 +- .../CallCachingBlacklistManagerSpec.scala | 3 +- .../RootWorkflowHashCacheActorSpec.scala | 47 +- .../StandardFileHashingActorSpec.scala | 23 +- .../validation/ContinueOnReturnCodeSpec.scala | 15 +- .../RuntimeAttributesDefaultSpec.scala | 12 +- .../RuntimeAttributesValidationSpec.scala | 256 +- .../centaur/AbstractCentaurTestCaseSpec.scala | 60 +- ...llEngineOrBackendUpgradeTestCaseSpec.scala | 48 +- .../it/scala/centaur/CentaurTestSuite.scala | 10 +- .../centaur/EngineUpgradeTestCaseSpec.scala | 7 +- .../scala/centaur/ExternalTestCaseSpec.scala | 7 +- .../centaur/PapiUpgradeTestCaseSpec.scala | 2 +- .../scala/centaur/ParallelTestCaseSpec.scala | 6 +- .../centaur/SequentialTestCaseSpec.scala | 4 +- .../centaur/WdlUpgradeTestCaseSpec.scala | 4 +- .../CromwellDatabaseCallCaching.scala | 3 +- .../centaur/reporting/AggregatedIo.scala | 19 +- .../centaur/reporting/BigQueryReporter.scala | 125 +- .../centaur/reporting/CiEnvironment.scala | 8 +- .../centaur/reporting/ErrorReporter.scala | 8 +- .../ErrorReporterCromwellDatabase.scala | 24 +- .../reporting/ErrorReporterParams.scala | 3 +- .../centaur/reporting/ErrorReporters.scala | 32 +- .../scala/centaur/reporting/GcsReporter.scala | 16 +- .../centaur/reporting/Slf4jReporter.scala | 14 +- .../centaur/reporting/SuccessReporter.scala | 8 +- .../main/scala/centaur/CentaurConfig.scala | 11 +- .../main/scala/centaur/CromwellManager.scala | 14 +- .../main/scala/centaur/CromwellTracker.scala | 10 +- .../DockerComposeCromwellConfiguration.scala | 17 +- .../centaur/JarCromwellConfiguration.scala | 17 +- .../centaur/api/CentaurCromwellClient.scala | 104 +- .../src/main/scala/centaur/api/Retry.scala | 18 +- .../main/scala/centaur/json/JsonUtils.scala | 18 +- .../centaur/test/CentaurTestException.scala | 27 +- .../scala/centaur/test/ObjectCounter.scala | 14 +- .../src/main/scala/centaur/test/Test.scala | 589 +++-- .../main/scala/centaur/test/TestOptions.scala | 8 +- .../centaur/test/formulas/TestFormulas.scala | 161 +- .../centaur/test/markers/CallMarker.scala | 3 +- .../test/metadata/CallAttemptFailure.scala | 22 +- .../test/metadata/WorkflowFlatMetadata.scala | 41 +- .../test/standard/CentaurTestCase.scala | 33 +- .../test/standard/CentaurTestFormat.scala | 69 +- .../centaur/test/submit/SubmitResponse.scala | 16 +- .../workflow/DirectoryContentCountCheck.scala | 19 +- .../workflow/SubmittedWorkflowTracker.scala | 3 +- .../centaur/test/workflow/Workflow.scala | 60 +- .../centaur/test/workflow/WorkflowData.scala | 57 +- .../DaemonizedDefaultThreadFactorySpec.scala | 2 +- .../scala/centaur/json/JsonUtilsSpec.scala | 8 +- .../centaur/test/CentaurOperationsSpec.scala | 24 +- .../metadata/CallAttemptFailureSpec.scala | 1 - ...actJobManagerStyleMetadataFieldsSpec.scala | 5 +- .../testfilecheck/FileCheckerSpec.scala | 20 +- .../impl/drs/DrsCloudNioFileProvider.scala | 47 +- .../drs/DrsCloudNioFileSystemProvider.scala | 7 +- .../DrsCloudNioRegularFileAttributes.scala | 45 +- .../scala/cloud/nio/impl/drs/DrsConfig.scala | 21 +- .../cloud/nio/impl/drs/DrsCredentials.scala | 24 +- .../cloud/nio/impl/drs/DrsPathResolver.scala | 86 +- .../DrsResolverHttpRequestRetryStrategy.scala | 14 +- .../nio/impl/drs/EngineDrsPathResolver.scala | 6 +- .../drs/DrsCloudNioFileProviderSpec.scala | 31 +- .../DrsCloudNioFileSystemProviderSpec.scala | 2 +- .../nio/impl/drs/DrsPathResolverSpec.scala | 118 +- ...ResolverHttpRequestRetryStrategySpec.scala | 2 +- .../MockDrsCloudNioFileSystemProvider.scala | 13 +- .../impl/drs/MockEngineDrsPathResolver.scala | 16 +- .../cloud/nio/impl/ftp/FtpClientPool.scala | 29 +- .../impl/ftp/FtpCloudNioFileProvider.scala | 70 +- .../nio/impl/ftp/FtpCloudNioFileSystem.scala | 11 +- .../ftp/FtpCloudNioFileSystemProvider.scala | 24 +- .../cloud/nio/impl/ftp/FtpCredentials.scala | 3 +- .../cloud/nio/impl/ftp/FtpFileSystems.scala | 16 +- .../ftp/FtpFileSystemsConfiguration.scala | 3 +- .../scala/cloud/nio/impl/ftp/FtpUtil.scala | 16 +- .../nio/impl/ftp/InputOutputStreams.scala | 8 +- .../nio/impl/ftp/LeasedInputStream.scala | 6 +- .../nio/impl/ftp/LeasedOutputStream.scala | 6 +- .../impl/ftp/operations/FtpOperation.scala | 78 +- .../nio/impl/ftp/FtpClientPoolSpec.scala | 10 +- .../ftp/FtpCloudNioFileProviderSpec.scala | 3 +- .../FtpCloudNioFileSystemProviderSpec.scala | 15 +- .../impl/ftp/FtpCloudNioFileSystemSpec.scala | 8 +- .../nio/impl/ftp/FtpCredentialsSpec.scala | 10 +- .../nio/impl/ftp/FtpFileSystemsSpec.scala | 4 +- .../cloud/nio/impl/ftp/FtpUtilSpec.scala | 6 +- .../nio/impl/ftp/LeaseInputStreamSpec.scala | 6 +- .../nio/impl/ftp/LeaseOutputStreamSpec.scala | 6 +- .../nio/impl/ftp/MockFtpFileSystem.scala | 15 +- .../scala/cloud/nio/spi/CloudNioBackoff.scala | 26 +- .../nio/spi/CloudNioDirectoryStream.scala | 13 +- .../nio/spi/CloudNioFileAttributeView.scala | 10 +- .../cloud/nio/spi/CloudNioFileSystem.scala | 31 +- .../nio/spi/CloudNioFileSystemProvider.scala | 76 +- .../scala/cloud/nio/spi/CloudNioPath.scala | 56 +- .../cloud/nio/spi/CloudNioReadChannel.scala | 49 +- .../scala/cloud/nio/spi/CloudNioRetry.scala | 8 +- .../cloud/nio/spi/CloudNioWriteChannel.scala | 7 +- .../main/scala/cloud/nio/spi/UnixPath.scala | 62 +- .../scala/cloud/nio/util/ChannelUtil.scala | 3 +- .../scala/cloud/nio/util/CloudNioFiles.scala | 3 +- .../scala/cloud/nio/util/CloudNioPaths.scala | 21 +- .../main/scala/cloud/nio/util/IoUtil.scala | 15 +- .../cloud/nio/util/TryWithResource.scala | 16 +- .../scala/cloud/nio/util/VersionUtil.scala | 9 +- .../cloudsupport/aws/AwsConfiguration.scala | 19 +- .../cloudsupport/aws/auth/AwsAuthMode.scala | 71 +- .../cloudsupport/aws/s3/S3Storage.scala | 6 +- .../cloudsupport/azure/AzureCredentials.scala | 8 +- .../cloudsupport/azure/AzureUtils.scala | 35 +- .../gcp/GoogleConfiguration.scala | 24 +- .../gcp/auth/GoogleAuthMode.scala | 60 +- .../cloudsupport/gcp/gcs/GcsStorage.scala | 25 +- .../gcp/http/GoogleHttpTransportOptions.scala | 3 +- .../aws/AwsConfigurationSpec.scala | 69 +- .../cloudsupport/aws/s3/S3StorageSpec.scala | 1 - .../gcp/GoogleConfigurationSpec.scala | 75 +- .../gcp/auth/ApplicationDefaultModeSpec.scala | 1 - .../gcp/auth/GoogleAuthModeSpec.scala | 7 +- .../gcp/auth/MockAuthModeSpec.scala | 1 - .../gcp/auth/ServiceAccountModeSpec.scala | 6 +- .../gcp/auth/ServiceAccountTestSupport.scala | 10 +- .../cloudsupport/gcp/auth/UserModeSpec.scala | 1 - .../gcp/auth/UserServiceAccountModeSpec.scala | 1 - .../cloudsupport/gcp/gcs/GcsStorageSpec.scala | 4 +- codegen_java/project/Artifactory.scala | 2 +- codegen_java/project/Publishing.scala | 16 +- codegen_java/project/Version.scala | 16 +- .../collections/EnhancedCollections.scala | 51 +- .../main/scala/common/collections/Table.scala | 9 +- .../common/collections/WeightedQueue.scala | 22 +- .../exception/ExceptionAggregation.scala | 21 +- .../main/scala/common/exception/package.scala | 3 +- .../scala/common/numeric/IntegerUtil.scala | 6 +- .../scala/common/transforms/package.scala | 48 +- .../src/main/scala/common/util/Backoff.scala | 2 + .../src/main/scala/common/util/IORetry.scala | 18 +- .../common/util/IntrospectableLazy.scala | 15 +- .../main/scala/common/util/StringUtil.scala | 6 +- .../main/scala/common/util/TerminalUtil.scala | 14 +- .../src/main/scala/common/util/TimeUtil.scala | 10 +- .../src/main/scala/common/util/TryUtil.scala | 3 +- .../src/main/scala/common/util/UriUtil.scala | 25 +- .../main/scala/common/util/VersionUtil.scala | 9 +- .../scala/common/validation/ErrorOr.scala | 341 ++- .../scala/common/validation/IOChecked.scala | 38 +- .../scala/common/validation/Validation.scala | 53 +- .../assertion/CaseClassAssertions.scala | 3 +- .../common/assertion/ErrorOrAssertions.scala | 1 - .../scala/common/collections/TableSpec.scala | 17 +- .../collections/WeightedQueueSpec.scala | 4 +- .../exception/ExceptionAggregationSpec.scala | 1 - .../scala/common/mock/MockImplicits.scala | 38 +- .../test/scala/common/mock/MockSugar.scala | 15 +- .../common/numeric/IntegerUtilSpec.scala | 33 +- .../common/util/IntrospectableLazySpec.scala | 12 +- .../test/scala/common/util/IoRetrySpec.scala | 4 +- .../scala/common/util/StringUtilSpec.scala | 46 +- .../scala/common/util/TerminalUtilSpec.scala | 1 - .../test/scala/common/util/TryUtilSpec.scala | 31 +- .../scala/common/util/VersionUtilSpec.scala | 7 +- .../scala/common/validation/CheckedSpec.scala | 3 +- .../scala/common/validation/ErrorOrSpec.scala | 261 +- .../common/validation/ValidationSpec.scala | 1 - .../core/BackendDockerConfiguration.scala | 4 +- .../main/scala/cromwell/core/ConfigUtil.scala | 21 +- .../cromwell/core/DockerConfiguration.scala | 43 +- .../main/scala/cromwell/core/Encryption.scala | 21 +- .../scala/cromwell/core/ExecutionIndex.scala | 3 +- .../scala/cromwell/core/ExecutionStatus.scala | 5 +- .../main/scala/cromwell/core/HogGroup.scala | 10 +- .../src/main/scala/cromwell/core/JobKey.scala | 4 +- .../core/MonitoringCompanionActor.scala | 16 +- .../main/scala/cromwell/core/WorkflowId.scala | 6 +- .../scala/cromwell/core/WorkflowOptions.scala | 42 +- .../core/WorkflowProcessingEvents.scala | 6 +- .../core/WorkflowSourceFilesCollection.scala | 25 +- .../scala/cromwell/core/WorkflowState.scala | 17 +- .../cromwell/core/actor/BatchActor.scala | 16 +- .../core/actor/RobustClientHelper.scala | 23 +- .../core/actor/StreamActorHelper.scala | 45 +- .../cromwell/core/actor/ThrottlerActor.scala | 3 +- .../core/callcaching/CallCachingMode.scala | 14 +- .../core/callcaching/HashResultMessage.scala | 1 - core/src/main/scala/cromwell/core/core.scala | 18 +- .../core/filesystem/CromwellFileSystems.scala | 105 +- .../main/scala/cromwell/core/io/AsyncIo.scala | 31 +- .../cromwell/core/io/AsyncIoFunctions.scala | 1 + .../core/io/CorePathFunctionSet.scala | 11 +- .../cromwell/core/io/DefaultIoCommand.scala | 30 +- .../main/scala/cromwell/core/io/IoAck.scala | 8 +- .../cromwell/core/io/IoClientHelper.scala | 17 +- .../scala/cromwell/core/io/IoCommand.scala | 21 +- .../cromwell/core/io/IoCommandBuilder.scala | 64 +- .../core/io/IoPromiseProxyActor.scala | 16 +- .../scala/cromwell/core/io/Throttle.scala | 3 +- .../scala/cromwell/core/labels/Label.scala | 9 +- .../scala/cromwell/core/labels/Labels.scala | 6 +- .../core/logging/EnhancedDateConverter.scala | 7 +- .../core/logging/EnhancedSlf4jLogger.scala | 1 + .../core/logging/JavaLoggingBridge.scala | 1 + .../cromwell/core/logging/JobLogger.scala | 16 +- .../cromwell/core/logging/LoggerWrapper.scala | 49 +- .../core/logging/WorkflowLogger.scala | 54 +- .../core/path/BetterFileMethods.scala | 164 +- .../core/path/CustomRetryParams.scala | 5 +- .../core/path/DefaultPathBuilder.scala | 6 +- .../core/path/DefaultPathBuilderFactory.scala | 3 +- .../core/path/EvenBetterPathMethods.scala | 58 +- .../core/path/JavaWriterImplicits.scala | 1 + .../cromwell/core/path/NioPathMethods.scala | 6 +- .../scala/cromwell/core/path/Obsolete.scala | 6 +- .../cromwell/core/path/PathBuilder.scala | 7 +- .../core/path/PathBuilderFactory.scala | 9 +- .../scala/cromwell/core/path/PathCopier.scala | 9 +- .../cromwell/core/path/PathFactory.scala | 28 +- .../core/path/PathObjectMethods.scala | 3 +- .../scala/cromwell/core/path/PathWriter.scala | 3 +- .../cromwell/core/retry/GoogleBackoff.scala | 52 +- .../scala/cromwell/core/retry/Retry.scala | 23 +- .../core/simpleton/WomValueBuilder.scala | 118 +- .../core/simpleton/WomValueSimpleton.scala | 31 +- .../scala/cromwell/util/DatabaseUtil.scala | 2 +- .../util/GracefulShutdownHelper.scala | 7 +- .../WomValueJsonFormatter.scala | 9 +- .../scala/cromwell/util/PromiseActor.scala | 12 +- .../cromwell/util/StopAndLogSupervisor.scala | 9 +- .../scala/cromwell/util/TryWithResource.scala | 16 +- .../cromwell/core/DockerCredentialsSpec.scala | 6 +- .../scala/cromwell/core/LoadConfigSpec.scala | 2 +- .../scala/cromwell/core/MockIoActor.scala | 7 +- .../scala/cromwell/core/SimpleIoActor.scala | 36 +- .../scala/cromwell/core/TestKitSuite.scala | 3 +- .../cromwell/core/WorkflowOptionsSpec.scala | 6 +- .../cromwell/core/actor/BatchActorSpec.scala | 17 +- .../core/actor/RobustClientHelperSpec.scala | 63 +- .../core/actor/StreamActorHelperSpec.scala | 25 +- .../core/callcaching/HashKeySpec.scala | 5 +- .../filesystem/CromwellFileSystemsSpec.scala | 108 +- .../scala/cromwell/core/io/AsyncIoSpec.scala | 13 +- .../cromwell/core/io/IoClientHelperSpec.scala | 34 +- .../cromwell/core/labels/LabelSpec.scala | 2 +- .../core/logging/LoggerWrapperSpec.scala | 66 +- .../core/path/DefaultPathBuilderSpec.scala | 80 +- .../core/path/PathBuilderFactorySpec.scala | 30 +- .../core/path/PathBuilderSpecUtils.scala | 6 +- .../cromwell/core/retry/BackoffSpec.scala | 15 +- .../scala/cromwell/core/retry/RetrySpec.scala | 24 +- .../core/simpleton/WomValueBuilderSpec.scala | 271 ++- .../scala/cromwell/util/AkkaTestUtil.scala | 6 +- .../scala/cromwell/util/EncryptionSpec.scala | 3 +- .../util/GracefulShutdownHelperSpec.scala | 12 +- .../test/scala/cromwell/util/SampleWdl.scala | 881 +++---- .../scala/cromwell/util/TestFileUtil.scala | 4 +- .../cromwell/util/TryWithResourceSpec.scala | 18 +- .../test/scala/cromwell/util/WomMocks.scala | 73 +- .../util/WomValueJsonFormatterSpec.scala | 4 +- .../drs/localizer/CommandLineParser.scala | 46 +- .../DrsLocalizerDrsPathResolver.scala | 4 +- .../drs/localizer/DrsLocalizerMain.scala | 153 +- .../downloaders/BulkAccessUrlDownloader.scala | 59 +- .../downloaders/DownloaderFactory.scala | 5 +- .../downloaders/GcsUriDownloader.scala | 47 +- .../localizer/downloaders/GetmChecksum.scala | 13 +- .../drs/localizer/CommandLineParserSpec.scala | 68 +- .../drs/localizer/DrsLocalizerMainSpec.scala | 302 ++- .../BulkAccessUrlDownloaderSpec.scala | 54 +- .../downloaders/GetmChecksumSpec.scala | 31 +- .../scala/cromwell/api/CromwellClient.scala | 171 +- .../cromwell/api/model/CallCacheDiff.scala | 12 +- .../api/model/CromwellQueryResult.scala | 8 +- .../main/scala/cromwell/api/model/Label.scala | 2 +- .../scala/cromwell/api/model/TimeUtil.scala | 10 +- .../cromwell/api/model/WaasDescription.scala | 9 +- .../api/model/WorkflowDescribeRequest.scala | 3 +- .../scala/cromwell/api/model/WorkflowId.scala | 1 - .../cromwell/api/model/WorkflowStatus.scala | 3 +- .../api/model/WorkflowSubmission.scala | 6 +- .../scala/cromwell/api/model/package.scala | 17 +- .../cromwell/api/CromwellClientSpec.scala | 168 +- .../api/CromwellResponseFailedSpec.scala | 26 +- ...CromwellQueryResultJsonFormatterSpec.scala | 77 +- .../api/model/LabelsJsonFormatterSpec.scala | 11 +- .../WaasDescriptionJsonSupportSpec.scala | 8 +- .../migration/WdlTransformation.scala | 9 +- .../migration/custom/BatchedTaskChange.scala | 46 +- .../custom/MigrationTaskChange.scala | 3 +- .../migration/custom/QueryPaginator.scala | 7 +- .../DeduplicateFailureMessageIds.scala | 6 +- .../liquibase/DiffResultFilter.scala | 64 +- .../migration/liquibase/LiquibaseUtils.scala | 42 +- .../metadata/MetadataCustomSql.scala | 8 +- .../table/ExecutionTableMigration.scala | 50 +- .../table/FailureEventTableMigration.scala | 47 +- ...cutionEventTableDescriptionMigration.scala | 3 +- .../ExecutionEventTableEndMigration.scala | 47 +- .../ExecutionEventTableStartMigration.scala | 50 +- .../CallOutputSymbolTableMigration.scala | 17 +- .../symbol/InputSymbolTableMigration.scala | 15 +- .../table/symbol/MetadataStatement.scala | 22 +- .../table/symbol/SymbolTableMigration.scala | 35 +- .../WorkflowOutputSymbolTableMigration.scala | 3 +- .../WorkflowExecutionTableMigration.scala | 194 +- .../table/JobStoreSimpletonMigration.scala | 11 +- .../RenameWorkflowOptionKeysMigration.scala | 4 +- .../ClearMetadataEntryWorkflowOptions.scala | 3 +- .../RenameWorkflowOptionsInMetadata.scala | 6 +- .../WorkflowOptionsChange.scala | 8 +- .../WorkflowOptionsRenaming.scala | 3 +- .../slick/CallCachingSlickDatabase.scala | 158 +- .../slick/DockerHashStoreSlickDatabase.scala | 14 +- .../database/slick/EngineSlickDatabase.scala | 2 +- .../slick/JobKeyValueSlickDatabase.scala | 50 +- .../slick/JobStoreSlickDatabase.scala | 26 +- .../slick/MetadataSlickDatabase.scala | 435 ++-- .../database/slick/SlickDatabase.scala | 58 +- .../slick/SubWorkflowStoreSlickDatabase.scala | 45 +- .../slick/SummaryQueueSlickDatabase.scala | 9 +- .../slick/SummaryStatusSlickDatabase.scala | 13 +- .../slick/WorkflowStoreSlickDatabase.scala | 73 +- ...CallCachingAggregationEntryComponent.scala | 79 +- .../CallCachingDetritusEntryComponent.scala | 13 +- .../tables/CallCachingEntryComponent.scala | 53 +- .../CallCachingHashEntryComponent.scala | 12 +- .../CallCachingSimpletonEntryComponent.scala | 12 +- .../tables/CustomLabelEntryComponent.scala | 56 +- .../DockerHashStoreEntryComponent.scala | 19 +- .../slick/tables/DriverComponent.scala | 7 +- .../tables/EngineDataAccessComponent.scala | 25 +- .../tables/JobKeyValueEntryComponent.scala | 45 +- .../slick/tables/JobStoreEntryComponent.scala | 27 +- .../JobStoreSimpletonEntryComponent.scala | 8 +- .../tables/MetadataDataAccessComponent.scala | 22 +- .../slick/tables/MetadataEntryComponent.scala | 349 +-- .../SubWorkflowStoreEntryComponent.scala | 41 +- .../tables/SummaryStatusEntryComponent.scala | 4 +- ...orkflowMetadataSummaryEntryComponent.scala | 251 +- .../tables/WorkflowStoreEntryComponent.scala | 104 +- .../database/sql/CallCachingSqlDatabase.scala | 30 +- .../sql/DockerHashStoreSqlDatabase.scala | 8 +- .../database/sql/EngineSqlDatabase.scala | 15 +- .../database/sql/JobKeyValueSqlDatabase.scala | 20 +- .../database/sql/JobStoreSqlDatabase.scala | 5 +- .../database/sql/MetadataSqlDatabase.scala | 135 +- .../cromwell/database/sql/SqlDatabase.scala | 1 + .../sql/SubWorkflowStoreSqlDatabase.scala | 8 +- .../sql/WorkflowStoreSqlDatabase.scala | 27 +- .../sql/joins/CallCachingDiffJoin.scala | 5 +- .../database/sql/joins/CallCachingJoin.scala | 3 +- .../database/sql/joins/JobStoreJoin.scala | 3 +- .../tables/CallCachingAggregationEntry.scala | 3 +- .../sql/tables/CallCachingDetritusEntry.scala | 3 +- .../sql/tables/CallCachingEntry.scala | 3 +- .../sql/tables/CallCachingHashEntry.scala | 3 +- .../tables/CallCachingSimpletonEntry.scala | 3 +- .../sql/tables/CustomLabelEntry.scala | 3 +- .../sql/tables/DockerHashStoreEntry.scala | 3 +- .../sql/tables/InformationSchemaEntry.scala | 3 +- .../sql/tables/JobKeyValueEntry.scala | 3 +- .../database/sql/tables/JobStoreEntry.scala | 3 +- .../sql/tables/JobStoreSimpletonEntry.scala | 3 +- .../database/sql/tables/MetadataEntry.scala | 3 +- .../sql/tables/SubWorkflowStoreEntry.scala | 3 +- .../sql/tables/SummaryStatusEntry.scala | 3 +- .../tables/WorkflowMetadataSummaryEntry.scala | 3 +- .../sql/tables/WorkflowStoreEntry.scala | 3 +- .../cromwell/docker/DockerClientHelper.scala | 11 +- .../cromwell/docker/DockerHashResult.scala | 8 +- .../docker/DockerImageIdentifier.scala | 32 +- .../cromwell/docker/DockerInfoActor.scala | 72 +- .../cromwell/docker/DockerRegistry.scala | 4 +- .../docker/DockerRegistryConfig.scala | 6 +- .../docker/local/DockerCliClient.scala | 21 +- .../cromwell/docker/local/DockerCliFlow.scala | 47 +- .../docker/registryv2/DockerManifest.scala | 2 +- .../registryv2/DockerRegistryV2Abstract.scala | 135 +- .../flows/azure/AzureContainerRegistry.scala | 37 +- .../flows/dockerhub/DockerHubRegistry.scala | 8 +- .../flows/google/GoogleRegistry.scala | 36 +- .../registryv2/flows/quay/QuayRegistry.scala | 7 +- .../cromwell/docker/DockerHashMocks.scala | 33 +- .../docker/DockerImageIdentifierSpec.scala | 51 +- .../cromwell/docker/DockerInfoActorSpec.scala | 53 +- .../cromwell/docker/DockerRegistrySpec.scala | 5 +- .../docker/local/DockerCliClientSpec.scala | 15 +- .../cromwell/docker/local/DockerCliSpec.scala | 23 +- .../docker/local/DockerCliTimeoutSpec.scala | 62 +- .../DockerRegistryV2AbstractSpec.scala | 11 +- .../src/main/scala/cromwell/Simpletons.scala | 6 +- .../cromwell/engine/EngineFilesystems.scala | 11 +- .../cromwell/engine/EngineIoFunctions.scala | 23 +- .../engine/EngineWorkflowDescriptor.scala | 3 +- .../engine/backend/BackendConfiguration.scala | 25 +- .../engine/backend/CromwellBackends.scala | 16 +- .../main/scala/cromwell/engine/engine.scala | 4 +- .../instrumentation/HttpInstrumentation.scala | 34 +- .../instrumentation/IoInstrumentation.scala | 35 +- .../instrumentation/JobInstrumentation.scala | 14 +- .../WorkflowInstrumentation.scala | 25 +- .../scala/cromwell/engine/io/IoActor.scala | 80 +- .../cromwell/engine/io/IoActorProxy.scala | 5 +- .../scala/cromwell/engine/io/IoAttempts.scala | 17 +- .../io/IoCommandStalenessBackpressuring.scala | 7 +- .../engine/io/RetryableRequestSupport.scala | 25 +- .../io/gcs/GcsBatchCommandContext.scala | 41 +- .../cromwell/engine/io/gcs/GcsBatchFlow.scala | 66 +- .../cromwell/engine/io/gcs/GcsResponse.scala | 9 +- .../engine/io/gcs/ParallelGcsBatchFlow.scala | 9 +- .../cromwell/engine/io/nio/NioFlow.scala | 73 +- .../main/scala/cromwell/engine/package.scala | 4 +- .../workflow/SingleWorkflowRunnerActor.scala | 112 +- .../engine/workflow/WorkflowActor.scala | 372 ++- .../workflow/WorkflowDockerLookupActor.scala | 196 +- .../workflow/WorkflowManagerActor.scala | 74 +- .../workflow/WorkflowMetadataHelper.scala | 22 +- .../WorkflowProcessingEventPublishing.scala | 14 +- .../engine/workflow/lifecycle/TimedFSM.scala | 9 +- .../lifecycle/WorkflowLifecycleActor.scala | 30 +- .../deletion/DeleteWorkflowFilesActor.scala | 182 +- .../execution/CallMetadataHelper.scala | 62 +- .../execution/SubWorkflowExecutionActor.scala | 231 +- .../execution/WorkflowExecutionActor.scala | 403 ++-- .../WorkflowExecutionActorData.scala | 105 +- .../execution/callcaching/CallCache.scala | 155 +- .../callcaching/CallCacheDiffActor.scala | 158 +- .../CallCacheDiffActorJsonFormatting.scala | 22 +- .../CallCacheDiffQueryParameter.scala | 38 +- .../CallCacheHashingJobActor.scala | 159 +- .../CallCacheInvalidateActor.scala | 13 +- .../callcaching/CallCacheReadActor.scala | 31 +- .../CallCacheReadingJobActor.scala | 51 +- .../callcaching/CallCacheWriteActor.scala | 20 +- .../callcaching/EngineJobHashingActor.scala | 84 +- .../callcaching/FetchCachedResultsActor.scala | 34 +- .../execution/callcaching/package.scala | 5 +- .../job/EngineJobExecutionActor.scala | 522 ++-- .../job/preparation/CallPreparation.scala | 8 +- .../job/preparation/JobPreparationActor.scala | 210 +- .../job/preparation/KeyValueLookups.scala | 8 +- .../SubWorkflowPreparationActor.scala | 48 +- .../keys/ConditionalCollectorKey.scala | 8 +- .../execution/keys/ConditionalKey.scala | 20 +- .../execution/keys/ExpressionKey.scala | 18 +- .../execution/keys/ScatterCollectorKey.scala | 10 +- .../lifecycle/execution/keys/ScatterKey.scala | 41 +- .../keys/ScatterVariableInputKey.scala | 2 +- .../keys/ScatteredCallCompletionKey.scala | 6 +- .../execution/keys/SubWorkflowKey.scala | 3 +- .../lifecycle/execution/keys/package.scala | 9 +- .../execution/stores/ExecutionStore.scala | 90 +- .../execution/stores/ValueStore.scala | 29 +- .../finalization/CopyWorkflowLogsActor.scala | 70 +- .../CopyWorkflowOutputsActor.scala | 103 +- .../finalization/WorkflowCallbackActor.scala | 116 +- .../WorkflowCallbackJsonSupport.scala | 3 +- .../WorkflowFinalizationActor.scala | 127 +- .../WorkflowInitializationActor.scala | 84 +- .../MaterializeWorkflowDescriptorActor.scala | 269 ++- .../workflow/tokens/DynamicRateLimiter.scala | 3 +- .../tokens/JobTokenDispenserActor.scala | 67 +- .../tokens/RoundRobinQueueIterator.scala | 7 +- .../workflow/tokens/TokenEventLogger.scala | 13 +- .../engine/workflow/tokens/TokenQueue.scala | 23 +- .../workflow/tokens/UnhoggableTokenPool.scala | 49 +- .../AbortRequestScanningActor.scala | 21 +- .../InMemorySubWorkflowStore.scala | 23 +- .../workflowstore/InMemoryWorkflowStore.scala | 55 +- .../workflowstore/SqlWorkflowStore.scala | 122 +- .../WorkflowHeartbeatConfig.scala | 22 +- .../workflowstore/WorkflowStore.scala | 14 +- .../workflowstore/WorkflowStoreAccess.scala | 91 +- .../workflowstore/WorkflowStoreActor.scala | 71 +- .../WorkflowStoreCoordinatedAccessActor.scala | 13 +- .../WorkflowStoreEngineActor.scala | 153 +- .../WorkflowStoreHeartbeatWriteActor.scala | 131 +- .../WorkflowStoreSubmitActor.scala | 140 +- .../workflowstore/workflowstore_.scala | 3 +- .../jobstore/JobResultJsonFormatter.scala | 8 +- .../scala/cromwell/jobstore/JobStore.scala | 8 +- .../cromwell/jobstore/JobStoreActor.scala | 22 +- .../jobstore/JobStoreReaderActor.scala | 10 +- .../jobstore/JobStoreWriterActor.scala | 79 +- .../scala/cromwell/jobstore/SqlJobStore.scala | 55 +- .../scala/cromwell/jobstore/jobstore_.scala | 17 +- .../scala/cromwell/jobstore/package.scala | 3 +- .../cromwell/logging/TerminalLayout.scala | 13 +- .../server/CromwellAkkaLogFilter.scala | 16 +- .../server/CromwellDeadLetterListener.scala | 11 +- .../cromwell/server/CromwellRootActor.scala | 154 +- .../cromwell/server/CromwellServer.scala | 33 +- .../cromwell/server/CromwellShutdown.scala | 185 +- .../cromwell/server/CromwellSystem.scala | 17 +- .../EmptySubWorkflowStoreActor.scala | 2 +- .../SqlSubWorkflowStore.scala | 16 +- .../subworkflowstore/SubWorkflowStore.scala | 8 +- .../SubWorkflowStoreActor.scala | 36 +- .../cromwell/webservice/ApiDataModels.scala | 15 +- .../webservice/EngineStatsActor.scala | 9 +- .../webservice/LabelsManagerActor.scala | 11 +- .../webservice/PartialWorkflowSources.scala | 148 +- .../webservice/SwaggerUiHttpService.scala | 27 +- .../cromwell/webservice/WebServiceUtils.scala | 47 +- .../webservice/WorkflowJsonSupport.scala | 18 +- .../routes/CromwellApiService.scala | 136 +- .../routes/MetadataRouteSupport.scala | 150 +- .../routes/WomtoolRouteSupport.scala | 7 +- .../routes/wes/CromwellMetadata.scala | 25 +- .../routes/wes/RunListResponse.scala | 6 +- .../webservice/routes/wes/ServiceInfo.scala | 19 +- .../webservice/routes/wes/WesResponse.scala | 27 +- .../routes/wes/WesRouteSupport.scala | 125 +- .../webservice/routes/wes/WesRunLog.scala | 5 +- .../webservice/routes/wes/WesState.scala | 48 +- .../webservice/routes/wes/WesSubmission.scala | 34 +- .../cromwell/webservice/webservice_.scala | 2 +- .../scala/cromwell/MetadataWatchActor.scala | 34 +- .../engine/MockCromwellTerminator.scala | 3 +- .../DefaultBackendJobExecutionActor.scala | 36 +- .../RetryableBackendJobExecutionActor.scala | 31 +- ...etryableBackendLifecycleActorFactory.scala | 14 +- .../engine/backend/mock/package.scala | 3 +- .../engine/io/IoActorProxyGcsBatchSpec.scala | 34 +- .../cromwell/engine/io/IoActorSpec.scala | 80 +- .../io/gcs/GcsBatchCommandContextSpec.scala | 23 +- .../engine/io/gcs/GcsBatchFlowSpec.scala | 38 +- .../cromwell/engine/io/nio/NioFlowSpec.scala | 51 +- .../WorkflowDockerLookupActorSpec.scala | 110 +- .../ValidatingCachingConfigSpec.scala | 38 +- .../ValidatingCallCachingModeSpec.scala | 61 +- .../DeleteWorkflowFilesActorSpec.scala | 521 ++-- .../execution/ExecutionStoreBenchmark.scala | 17 +- .../SubWorkflowExecutionActorSpec.scala | 137 +- .../callcaching/CallCacheDiffActorSpec.scala | 134 +- .../CallCacheHashingJobActorDataSpec.scala | 79 +- .../CallCacheHashingJobActorSpec.scala | 174 +- .../CallCacheReadingJobActorSpec.scala | 111 +- .../CallCachingSlickDatabaseSpec.scala | 45 +- .../EngineJobHashingActorSpec.scala | 103 +- .../job/preparation/CallPreparationSpec.scala | 3 +- .../preparation/JobPreparationActorSpec.scala | 136 +- .../JobPreparationTestHelper.scala | 87 +- .../execution/stores/ExecutionStoreSpec.scala | 19 +- .../CopyWorkflowLogsActorSpec.scala | 41 +- .../WorkflowCallbackActorSpec.scala | 39 +- .../workflow/mocks/DeclarationMock.scala | 4 +- .../engine/workflow/mocks/TaskMock.scala | 6 +- .../tokens/JobTokenDispenserActorSpec.scala | 177 +- .../tokens/RoundRobinQueueIteratorSpec.scala | 4 +- .../tokens/TestTokenGrabbingActor.scala | 8 +- .../workflow/tokens/TokenQueueSpec.scala | 2 +- .../tokens/UnhoggableTokenPoolSpec.scala | 21 +- ...LargeScaleJobTokenDispenserActorSpec.scala | 230 +- .../large/MultipleTokenUsingActor.scala | 13 +- .../large/PatientTokenNeedingActor.scala | 4 +- .../large/TokenDispenserBenchmark.scala | 5 +- .../workflowstore/SqlWorkflowStoreSpec.scala | 206 +- .../WorkflowHeartbeatConfigSpec.scala | 54 +- ...kflowStoreCoordinatedAccessActorSpec.scala | 70 +- ...WorkflowStoreHeartbeatWriteActorSpec.scala | 28 +- .../cromwell/jobstore/JobResultSpec.scala | 11 +- .../webservice/EngineStatsActorSpec.scala | 4 +- .../webservice/MetadataBuilderActorSpec.scala | 265 +- .../PartialWorkflowSourcesSpec.scala | 19 +- .../webservice/SwaggerServiceSpec.scala | 53 +- .../webservice/SwaggerUiHttpServiceSpec.scala | 37 +- .../webservice/WorkflowJsonSupportSpec.scala | 3 +- .../routes/CromwellApiServiceSpec.scala | 747 +++--- .../routes/MetadataRouteSupportSpec.scala | 138 +- .../routes/WomtoolRouteSupportSpec.scala | 63 +- .../routes/wes/ServiceInfoSpec.scala | 7 +- .../routes/wes/WesRouteSupportSpec.scala | 70 +- .../blob/BlobFileSystemConfig.scala | 17 +- .../blob/BlobFileSystemManager.scala | 122 +- .../filesystems/blob/BlobPathBuilder.scala | 54 +- .../blob/BlobPathBuilderFactory.scala | 25 +- .../WorkspaceManagerApiClientProvider.scala | 47 +- .../blob/AzureFileSystemSpec.scala | 18 +- .../blob/BlobFileSystemConfigSpec.scala | 31 +- .../blob/BlobPathBuilderFactorySpec.scala | 35 +- .../blob/BlobPathBuilderSpec.scala | 49 +- .../cromwell/filesystems/drs/DrsPath.scala | 14 +- .../filesystems/drs/DrsPathBuilder.scala | 16 +- .../drs/DrsPathBuilderFactory.scala | 40 +- .../cromwell/filesystems/drs/DrsReader.scala | 61 +- .../filesystems/drs/DrsResolver.scala | 28 +- .../drs/DrsPathBuilderFactorySpec.scala | 2 +- .../filesystems/drs/DrsPathBuilderSpec.scala | 71 +- .../filesystems/drs/DrsReaderSpec.scala | 32 +- .../filesystems/drs/DrsResolverSpec.scala | 20 +- .../ftp/CromwellFtpFileSystems.scala | 16 +- .../ftp/FtpInstanceConfiguration.scala | 8 +- .../filesystems/ftp/FtpPathBuilder.scala | 3 +- .../ftp/FtpPathBuilderFactory.scala | 39 +- .../ftp/CromwellFtpFileSystemsSpec.scala | 15 +- .../ftp/FtpInstanceConfigurationSpec.scala | 24 +- .../filesystems/ftp/FtpPathSpec.scala | 74 +- .../filesystems/gcs/GcsEnhancedRequest.scala | 31 +- .../filesystems/gcs/GcsPathBuilder.scala | 71 +- .../gcs/GcsPathBuilderFactory.scala | 12 +- .../cromwell/filesystems/gcs/GoogleUtil.scala | 22 +- .../filesystems/gcs/RequesterPaysErrors.scala | 2 +- .../gcs/batch/GcsBatchCommandBuilder.scala | 26 +- .../gcs/batch/GcsBatchIoCommand.scala | 136 +- .../gcs/GcsEnhancedRequestSpec.scala | 22 +- .../filesystems/gcs/GcsPathBuilderSpec.scala | 102 +- .../filesystems/gcs/MockGcsPathBuilder.scala | 5 +- .../gcs/batch/GcsBatchIoCommandSpec.scala | 4 +- .../filesystems/http/HttpPathBuilder.scala | 23 +- .../http/HttpPathBuilderFactory.scala | 6 +- .../filesystems/s3/S3PathBuilder.scala | 35 +- .../filesystems/s3/S3PathBuilderFactory.scala | 15 +- .../s3/batch/S3BatchCommandBuilder.scala | 16 +- .../s3/batch/S3BatchIoCommand.scala | 48 +- .../filesystems/s3/S3PathBuilderSpec.scala | 50 +- .../filesystems/sra/SraPathBuilder.scala | 4 +- .../sra/SraPathBuilderFactory.scala | 6 +- .../filesystems/sra/SraPathBuilderSpec.scala | 35 +- .../cromwell/languages/LanguageFactory.scala | 25 +- .../languages/ValidatedWomNamespace.scala | 3 +- .../languages/config/CromwellLanguages.scala | 18 +- .../config/LanguageConfiguration.scala | 15 +- .../languages/util/ImportResolver.scala | 61 +- .../languages/util/LanguageFactoryUtil.scala | 61 +- .../cromwell/languages/util/ParserCache.scala | 33 +- .../util/ResolvedExecutableInputsPoly.scala | 11 +- .../languages/util/ImportResolverSpec.scala | 42 +- .../biscayne/WdlBiscayneLanguageFactory.scala | 43 +- .../cascades/WdlCascadesLanguageFactory.scala | 43 +- .../wdl/draft2/WdlDraft2LanguageFactory.scala | 63 +- .../ArrayCoercionsSpec.scala | 22 +- .../MapWorkflowSpec.scala | 34 +- .../NamespaceCacheSpec.scala | 20 +- .../WdlWorkflowHttpImportSpec.scala | 67 +- .../wdl/draft3/WdlDraft3LanguageFactory.scala | 64 +- .../wdl/draft3/WdlDraft3CachingSpec.scala | 20 +- .../cromwell/consumer/DrsHubClient.scala | 26 +- .../workbench/cromwell/consumer/Helper.scala | 22 +- .../consumer/BlobFileSystemContractSpec.scala | 42 +- .../cromwell/consumer/DrsHubClientSpec.scala | 88 +- perf/src/main/scala/cromwell/perf/Call.scala | 41 +- .../scala/cromwell/perf/CompareMetadata.scala | 59 +- .../main/scala/cromwell/perf/Metadata.scala | 68 +- project/ContinuousIntegration.scala | 37 +- project/GenerateRestApiDocs.scala | 24 +- project/Merging.scala | 28 +- project/Publishing.scala | 41 +- project/Testing.scala | 19 +- project/Version.scala | 14 +- .../scala/cromwell/CommandLineArguments.scala | 33 +- .../scala/cromwell/CommandLineParser.scala | 80 +- .../src/main/scala/cromwell/CromwellApp.scala | 5 +- .../scala/cromwell/CromwellEntryPoint.scala | 150 +- .../cromwell/CromwellCommandLineSpec.scala | 46 +- .../scala/cromwell/CromwellTestKitSpec.scala | 163 +- .../cromwell/DeclarationWorkflowSpec.scala | 13 +- .../cromwell/FilePassingWorkflowSpec.scala | 2 +- ...ultipleFilesWithSameNameWorkflowSpec.scala | 2 +- .../cromwell/OptionalParamWorkflowSpec.scala | 36 +- .../scala/cromwell/ReferenceConfSpec.scala | 11 +- .../cromwell/SimpleWorkflowActorSpec.scala | 83 +- .../scala/cromwell/WorkflowFailSlowSpec.scala | 5 +- .../scala/cromwell/WorkflowOutputsSpec.scala | 8 +- .../engine/WorkflowManagerActorSpec.scala | 18 +- .../engine/WorkflowStoreActorSpec.scala | 171 +- ...CoordinatedWorkflowStoreActorBuilder.scala | 6 +- .../workflow/SqlWorkflowStoreBuilder.scala | 7 +- .../engine/workflow/WorkflowActorSpec.scala | 209 +- .../WorkflowDescriptorBuilderForSpecs.scala | 57 +- ...terializeWorkflowDescriptorActorSpec.scala | 375 ++- .../WorkflowExecutionActorSpec.scala | 83 +- ...jeaBackendIsCopyingCachedOutputsSpec.scala | 351 +-- .../EjeaCheckingCacheEntryExistenceSpec.scala | 14 +- .../ejea/EjeaCheckingCallCacheSpec.scala | 46 +- .../ejea/EjeaCheckingJobStoreSpec.scala | 30 +- ...etchingCachedOutputsFromDatabaseSpec.scala | 96 +- .../ejea/EjeaInvalidatingCacheEntrySpec.scala | 47 +- ...jeaMultipleCallCacheCopyAttemptsSpec.scala | 54 +- .../execution/ejea/EjeaPendingSpec.scala | 17 +- .../execution/ejea/EjeaPreparingJobSpec.scala | 55 +- .../EjeaRequestingExecutionTokenSpec.scala | 12 +- .../EjeaRequestingRestartCheckTokenSpec.scala | 7 +- .../execution/ejea/EjeaRunningJobSpec.scala | 70 +- .../ejea/EjeaUpdatingCallCacheSpec.scala | 8 +- .../ejea/EjeaUpdatingJobStoreSpec.scala | 14 +- .../ejea/EjeaWaitingForValueStoreSpec.scala | 10 +- .../ejea/EngineJobExecutionActorSpec.scala | 43 +- .../EngineJobExecutionActorSpecUtil.scala | 46 +- ...gineJobExecutionActorTransitionsSpec.scala | 44 +- .../lifecycle/execution/ejea/ExpectOne.scala | 14 +- .../execution/ejea/PerTestHelper.scala | 217 +- .../jobstore/JobStoreServiceSpec.scala | 36 +- .../jobstore/JobStoreWriterSpec.scala | 50 +- .../SubWorkflowStoreSpec.scala | 38 +- .../services/EnhancedBatchActor.scala | 2 +- .../services/EnhancedThrottlerActor.scala | 8 +- .../cromwell/services/IoActorRequester.scala | 5 +- .../services/ServiceRegistryActor.scala | 76 +- .../ProtoHealthMonitorServiceActor.scala | 49 +- .../impl/HealthMonitorServiceActor.scala | 6 +- .../impl/common/DockerHubMonitor.scala | 11 +- .../impl/common/EngineDatabaseMonitor.scala | 5 +- .../WorkbenchHealthMonitorServiceActor.scala | 89 +- ...ynchronousThrottlingGaugeMetricActor.scala | 13 +- .../CromwellInstrumentation.scala | 47 +- .../instrumentation/CromwellMetric.scala | 9 +- .../InstrumentedBatchActor.scala | 12 +- .../NoopInstrumentationServiceActor.scala | 7 +- ...ectiveTsvInstrumentationServiceActor.scala | 46 +- .../impl/stackdriver/StackdriverConfig.scala | 51 +- ...ackdriverInstrumentationServiceActor.scala | 91 +- .../impl/statsd/CromwellStatsD.scala | 5 +- .../impl/statsd/StatsDConfig.scala | 24 +- .../StatsDInstrumentationServiceActor.scala | 25 +- .../services/keyvalue/KeyValueReadActor.scala | 13 +- .../keyvalue/KeyValueServiceActor.scala | 4 +- .../keyvalue/KeyValueWriteActor.scala | 30 +- .../cromwell/services/keyvalue/KvClient.scala | 17 +- .../impl/BackendKeyValueDatabaseAccess.scala | 44 +- .../keyvalue/impl/SqlKeyValueReadActor.scala | 6 +- .../impl/SqlKeyValueServiceActor.scala | 11 +- .../keyvalue/impl/SqlKeyValueWriteActor.scala | 22 +- .../LoadControlledBatchActor.scala | 18 +- .../impl/LoadControllerServiceActor.scala | 39 +- .../metadata/MetadataArchiveStatus.scala | 12 +- .../metadata/MetadataJsonSupport.scala | 12 +- .../services/metadata/MetadataQuery.scala | 58 +- .../services/metadata/MetadataService.scala | 127 +- .../services/metadata/WorkflowQueryKey.scala | 40 +- .../metadata/WorkflowQueryParameters.scala | 122 +- .../impl/MetadataDatabaseAccess.scala | 281 ++- .../metadata/impl/MetadataServiceActor.scala | 136 +- .../impl/MetadataStatisticsRecorder.scala | 84 +- .../impl/MetadataSummaryRefreshActor.scala | 64 +- .../ReadDatabaseMetadataWorkerActor.scala | 118 +- .../impl/ReadMetadataRegulatorActor.scala | 52 +- .../metadata/impl/WriteMetadataActor.scala | 38 +- .../impl/archiver/ArchiveMetadataConfig.scala | 34 +- .../ArchiveMetadataSchedulerActor.scala | 194 +- .../impl/builder/MetadataBuilderActor.scala | 232 +- .../impl/builder/MetadataComponent.scala | 52 +- .../impl/deleter/DeleteMetadataActor.scala | 83 +- .../impl/deleter/DeleteMetadataConfig.scala | 12 +- .../HybridPubSubMetadataServiceActor.scala | 11 +- .../pubsub/PubSubMetadataServiceActor.scala | 34 +- .../cromwell/services/metadata/metadata.scala | 2 +- .../cromwell/services/metadata/package.scala | 18 +- .../cromwell/services/womtool/Describer.scala | 17 +- .../impl/WomtoolServiceInCromwellActor.scala | 8 +- .../womtool/models/InputDescription.scala | 9 +- .../models/MetaValueElementJsonSupport.scala | 9 +- .../womtool/models/OutputDescription.scala | 2 +- .../womtool/models/WomTypeJsonSupport.scala | 37 +- .../womtool/models/WorkflowDescription.scala | 123 +- .../services/IoActorRequesterSpec.scala | 23 +- .../cromwell/services/NooPServiceActor.scala | 4 +- .../services/ServiceRegistryActorSpec.scala | 67 +- .../cromwell/services/ServicesSpec.scala | 3 +- .../database/ConnectionMetadata.scala | 5 +- .../database/CromwellDatabaseType.scala | 8 +- .../services/database/DatabaseSystem.scala | 2 +- .../services/database/DatabaseTestKit.scala | 142 +- .../HsqldbTransactionIsolationSpec.scala | 11 +- .../database/LiquibaseChangeSetSpec.scala | 4 +- .../database/LiquibaseComparisonSpec.scala | 132 +- .../services/database/LiquibaseOrdering.scala | 3 +- .../cromwell/services/database/LobSpec.scala | 16 +- .../database/MetadataSlickDatabaseSpec.scala | 1215 ++++++++-- .../services/database/QueryTimeoutSpec.scala | 86 +- .../RootAndSubworkflowLabelsSpec.scala | 48 +- .../services/database/SchemaManager.scala | 2 +- .../services/database/SchemaManagerSpec.scala | 86 +- .../database/SlickDeadlocksSpec.scala | 14 +- .../services/database/TestSlickDatabase.scala | 5 +- .../HealthMonitorServiceActorSpec.scala | 20 +- .../ProtoHealthMonitorServiceActorSpec.scala | 27 +- ...ronousThrottlingGaugeMetricActorSpec.scala | 49 +- .../stackdriver/StackdriverConfigSpec.scala | 4 - ...riverInstrumentationServiceActorSpec.scala | 38 +- ...ackdriverInstrumentationServiceActor.scala | 17 +- .../impl/statsd/StatsDConfigSpec.scala | 2 +- ...rumentationServiceActorBenchmarkSpec.scala | 21 +- ...tatsDInstrumentationServiceActorSpec.scala | 100 +- .../keyvalue/InMemoryKvServiceActor.scala | 3 +- .../services/keyvalue/KvClientSpec.scala | 2 - .../keyvalue/impl/KeyValueDatabaseSpec.scala | 20 +- .../impl/KeyValueServiceActorSpec.scala | 40 +- .../impl/LoadControllerServiceActorSpec.scala | 9 +- .../services/metadata/MetadataQuerySpec.scala | 42 +- .../metadata/MetadataServiceSpec.scala | 54 +- .../MetadataStatisticsRecorderSpec.scala | 94 +- ...ryForWorkflowsMatchingParametersSpec.scala | 12 +- .../impl/MetadataDatabaseAccessSpec.scala | 442 ++-- .../impl/MetadataServiceActorSpec.scala | 175 +- .../impl/WriteMetadataActorBenchmark.scala | 19 +- .../impl/WriteMetadataActorSpec.scala | 291 ++- .../PubSubMetadataServiceActorSpec.scala | 56 +- .../services/womtool/DescriberSpec.scala | 6 +- .../WomtoolServiceInCromwellActorSpec.scala | 111 +- ...wsBatchAsyncBackendJobExecutionActor.scala | 221 +- .../backend/impl/aws/AwsBatchAttributes.scala | 54 +- .../AwsBatchBackendInitializationData.scala | 23 +- ...AwsBatchBackendLifecycleActorFactory.scala | 76 +- .../impl/aws/AwsBatchConfiguration.scala | 17 +- .../aws/AwsBatchExpressionFunctions.scala | 10 +- .../impl/aws/AwsBatchFinalizationActor.scala | 12 +- .../aws/AwsBatchInitializationActor.scala | 41 +- .../backend/impl/aws/AwsBatchJob.scala | 530 ++-- .../aws/AwsBatchJobCachingActorHelper.scala | 19 +- .../impl/aws/AwsBatchJobDefinition.scala | 152 +- .../backend/impl/aws/AwsBatchJobPaths.scala | 5 +- .../impl/aws/AwsBatchMetadataKeys.scala | 1 - .../backend/impl/aws/AwsBatchParameters.scala | 8 +- .../impl/aws/AwsBatchRuntimeAttributes.scala | 205 +- .../impl/aws/AwsBatchSingletonActor.scala | 18 +- .../impl/aws/AwsBatchWorkflowPaths.scala | 21 +- .../IntervalLimitedAwsJobSubmitActor.scala | 42 +- .../aws/OccasionalStatusPollingActor.scala | 39 +- .../cromwell/backend/impl/aws/RunStatus.scala | 33 +- .../AwsBatchBackendCacheHitCopyingActor.scala | 67 +- .../AwsBatchBackendFileHashingActor.scala | 14 +- .../AwsBatchCacheHitDuplicationStrategy.scala | 2 +- .../aws/errors/AwsBatchKnownJobFailure.scala | 6 +- .../backend/impl/aws/io/AwsBatchVolume.scala | 24 +- .../cromwell/backend/impl/aws/package.scala | 11 +- ...tchAsyncBackendJobExecutionActorSpec.scala | 588 +++-- .../impl/aws/AwsBatchAttachedDiskSpec.scala | 7 +- .../impl/aws/AwsBatchAttributesSpec.scala | 37 +- .../impl/aws/AwsBatchConfigurationSpec.scala | 68 +- .../aws/AwsBatchInitializationActorSpec.scala | 138 +- .../aws/AwsBatchJobExecutionActorSpec.scala | 68 +- .../backend/impl/aws/AwsBatchJobSpec.scala | 83 +- .../aws/AwsBatchRuntimeAttributesSpec.scala | 255 +- .../backend/impl/aws/AwsBatchTestConfig.scala | 14 +- ...GcpBatchBackendLifecycleActorFactory.scala | 112 +- .../batch/actors/BatchApiAbortClient.scala | 3 +- .../batch/actors/BatchApiFetchJobClient.scala | 3 +- .../actors/BatchApiRunCreationClient.scala | 3 +- ...cpBatchAsyncBackendJobExecutionActor.scala | 657 +++-- .../GcpBatchBackendSingletonActor.scala | 20 +- .../actors/GcpBatchFinalizationActor.scala | 11 +- .../actors/GcpBatchInitializationActor.scala | 124 +- .../GcpBatchJobCachingActorHelper.scala | 35 +- .../batch/api/GcpBatchApiRequestHandler.scala | 7 +- .../batch/api/GcpBatchRequestFactory.scala | 31 +- .../api/GcpBatchRequestFactoryImpl.scala | 175 +- .../GcpBatchVMAuthentication.scala | 19 +- .../BatchBackendCacheHitCopyingActor.scala | 44 +- .../BatchBackendFileHashingActor.scala | 3 +- .../BatchCacheHitDuplicationStrategy.scala | 1 - ...validGcsPathsInManifestFileException.scala | 5 +- .../batch/io/GcpBatchAttachedDisk.scala | 33 +- .../backend/google/batch/io/package.scala | 13 +- .../models/CreateGcpBatchParameters.scala | 2 +- .../models/GcpBackendInitializationData.scala | 17 +- .../batch/models/GcpBatchConfiguration.scala | 14 +- .../GcpBatchConfigurationAttributes.scala | 326 +-- .../models/GcpBatchCustomMachineType.scala | 35 +- .../batch/models/GcpBatchJobPaths.scala | 14 +- .../batch/models/GcpBatchParameters.scala | 43 +- .../google/batch/models/GcpBatchRequest.scala | 3 +- .../models/GcpBatchRuntimeAttributes.scala | 257 +- .../batch/models/GcpBatchWorkflowPaths.scala | 43 +- .../google/batch/models/GcpLabel.scala | 49 +- .../batch/models/PreviousRetryReasons.scala | 8 +- .../backend/google/batch/models/Run.scala | 2 +- .../google/batch/models/RunStatus.scala | 101 +- .../VpcAndSubnetworkProjectLabelValues.scala | 4 +- .../CheckpointingConfiguration.scala | 17 +- .../backend/google/batch/monitoring/Env.scala | 6 +- .../batch/monitoring/MonitoringImage.scala | 29 +- .../runnable/CheckpointingRunnable.scala | 86 +- .../batch/runnable/Delocalization.scala | 53 +- .../google/batch/runnable/Localization.scala | 64 +- .../runnable/MemoryRetryCheckRunnable.scala | 3 +- .../batch/runnable/MonitoringRunnable.scala | 15 +- .../batch/runnable/RunnableBuilder.scala | 131 +- .../batch/runnable/RunnableCommands.scala | 42 +- .../batch/runnable/RunnableLabels.scala | 1 + .../google/batch/runnable/RunnableUtils.scala | 32 +- .../google/batch/runnable/UserRunnable.scala | 1 - .../batch/util/BatchExpressionFunctions.scala | 11 +- .../util/BatchParameterConversions.scala | 71 +- .../batch/util/BatchUtilityConversions.scala | 34 +- ...GcpBatchDockerCacheMappingOperations.scala | 42 +- .../util/GcpBatchExpressionFunctions.scala | 11 +- .../util/GcpBatchMachineConstraints.scala | 16 +- ...BatchReferenceFilesMappingOperations.scala | 67 +- .../google/batch/util/GpuTypeValidation.scala | 6 +- .../google/batch/util/GpuValidation.scala | 8 +- .../backend/google/batch/GcpBatchIoSpec.scala | 10 +- ...tchAsyncBackendJobExecutionActorSpec.scala | 448 ++-- ...atchBackendLifecycleActorFactorySpec.scala | 9 +- .../GcpBatchInitializationActorSpec.scala | 136 +- .../batch/io/GcpBatchAttachedDiskSpec.scala | 2 +- .../batch/models/GcpBatchAttributeSpec.scala | 37 +- .../GcpBatchConfigurationAttributesSpec.scala | 122 +- .../models/GcpBatchConfigurationSpec.scala | 58 +- .../batch/models/GcpBatchJobPathsSpec.scala | 27 +- .../GcpBatchRuntimeAttributesSpec.scala | 101 +- .../batch/models/GcpBatchTestConfig.scala | 8 +- ...cAndSubnetworkProjectLabelValuesSpec.scala | 6 +- .../batch/runnable/LocalizationSpec.scala | 31 +- .../batch/runnable/RunnableBuilderSpec.scala | 67 +- .../batch/runnable/RunnableCommandSpec.scala | 38 +- .../util/GcpBatchMachineConstraintsSpec.scala | 6 +- .../pipelines/common/CustomMachineType.scala | 35 +- .../pipelines/common/GoogleCloudScopes.scala | 1 + .../pipelines/common/GoogleLabels.scala | 49 +- .../pipelines/common/GpuTypeValidation.scala | 3 +- .../pipelines/common/GpuValidation.scala | 6 +- .../pipelines/common/MachineConstraints.scala | 7 +- .../common/PapiInstrumentation.scala | 21 +- ...inesApiAsyncBackendJobExecutionActor.scala | 511 ++-- ...ipelinesApiBackendInitializationData.scala | 3 +- ...linesApiBackendLifecycleActorFactory.scala | 117 +- .../PipelinesApiBackendSingletonActor.scala | 19 +- .../common/PipelinesApiConfiguration.scala | 11 +- .../PipelinesApiConfigurationAttributes.scala | 352 +-- ...linesApiDockerCacheMappingOperations.scala | 40 +- .../PipelinesApiExpressionFunctions.scala | 11 +- .../PipelinesApiFinalizationActor.scala | 6 +- .../PipelinesApiInitializationActor.scala | 123 +- .../PipelinesApiJobCachingActorHelper.scala | 35 +- .../common/PipelinesApiJobPaths.scala | 15 +- .../common/PipelinesApiParameters.scala | 12 +- ...esApiReferenceFilesMappingOperations.scala | 74 +- .../PipelinesApiRuntimeAttributes.scala | 232 +- .../common/PipelinesApiWorkflowPaths.scala | 54 +- .../common/PreviousRetryReasons.scala | 13 +- .../VpcAndSubnetworkProjectLabelValues.scala | 4 +- .../common/action/ActionCommands.scala | 42 +- .../common/action/ActionLabels.scala | 1 + .../pipelines/common/action/ActionUtils.scala | 14 +- .../common/api/PipelinesApiBatchHandler.scala | 5 +- .../api/PipelinesApiFactoryInterface.scala | 2 +- .../api/PipelinesApiRequestFactory.scala | 31 +- .../api/PipelinesApiRequestHandler.scala | 17 +- .../api/PipelinesApiRequestManager.scala | 186 +- .../api/PipelinesApiRequestWorker.scala | 45 +- .../pipelines/common/api/RunStatus.scala | 51 +- .../api/clients/PipelinesApiAbortClient.scala | 13 +- .../PipelinesApiRunCreationClient.scala | 22 +- .../PipelinesApiStatusRequestClient.scala | 9 +- .../PipelinesApiVMAuthentication.scala | 20 +- ...elinesApiBackendCacheHitCopyingActor.scala | 94 +- .../PipelinesApiBackendFileHashingActor.scala | 3 +- ...validGcsPathsInManifestFileException.scala | 3 +- .../errors/PipelinesApiKnownJobFailure.scala | 2 +- .../common/io/PipelinesApiAttachedDisk.scala | 40 +- .../google/pipelines/common/io/package.scala | 10 +- .../CheckpointingConfiguration.scala | 17 +- .../pipelines/common/monitoring/Env.scala | 6 +- .../common/monitoring/MonitoringImage.scala | 29 +- .../google/pipelines/common/IoSpec.scala | 10 +- .../common/MachineConstraintsSpec.scala | 10 +- ...ApiAsyncBackendJobExecutionActorSpec.scala | 1160 ++++++--- .../common/PipelinesApiAttachedDiskSpec.scala | 9 +- ...sApiBackendLifecycleActorFactorySpec.scala | 15 +- .../common/PipelinesApiCallPathsSpec.scala | 27 +- ...elinesApiConfigurationAttributesSpec.scala | 166 +- .../PipelinesApiConfigurationSpec.scala | 67 +- ...sApiDockerCacheMappingOperationsSpec.scala | 53 +- .../PipelinesApiGpuAttributesSpec.scala | 35 +- .../PipelinesApiInitializationActorSpec.scala | 171 +- .../PipelinesApiJobExecutionActorSpec.scala | 69 +- ...iReferenceFilesMappingOperationsSpec.scala | 44 +- .../PipelinesApiRuntimeAttributesSpec.scala | 160 +- .../common/PipelinesApiTestConfig.scala | 19 +- .../PipelinesApiWorkflowPathsSpec.scala | 9 +- ...cAndSubnetworkProjectLabelValuesSpec.scala | 6 +- .../api/PipelinesApiRequestManagerSpec.scala | 83 +- .../api/PipelinesApiRequestWorkerSpec.scala | 90 +- ...esApiBackendCacheHitCopyingActorSpec.scala | 89 +- .../pipelines/v2alpha1/GenomicsFactory.scala | 77 +- ...inesApiAsyncBackendJobExecutionActor.scala | 315 ++- .../PipelinesApiLifecycleActorFactory.scala | 12 +- .../PipelinesParameterConversions.scala | 85 +- .../PipelinesUtilityConversions.scala | 9 +- .../v2alpha1/api/ActionBuilder.scala | 75 +- .../v2alpha1/api/CheckpointingAction.scala | 33 +- .../v2alpha1/api/Delocalization.scala | 90 +- .../v2alpha1/api/Deserialization.scala | 45 +- .../pipelines/v2alpha1/api/Localization.scala | 52 +- .../v2alpha1/api/MemoryRetryCheckAction.scala | 5 +- .../v2alpha1/api/MonitoringAction.scala | 16 +- .../v2alpha1/api/SSHAccessAction.scala | 6 +- .../api/request/AbortRequestHandler.scala | 41 +- .../v2alpha1/api/request/ErrorReporter.scala | 69 +- .../api/request/GetRequestHandler.scala | 61 +- .../v2alpha1/api/request/RequestHandler.scala | 23 +- .../api/request/RunRequestHandler.scala | 19 +- ...ApiAsyncBackendJobExecutionActorSpec.scala | 11 +- .../v2alpha1/PipelinesConversionsSpec.scala | 17 +- .../v2alpha1/api/ActionBuilderSpec.scala | 52 +- .../v2alpha1/api/ActionCommandsSpec.scala | 36 +- .../v2alpha1/api/DeserializationSpec.scala | 30 +- .../api/request/GetRequestHandlerSpec.scala | 494 ++-- .../v2beta/LifeSciencesFactory.scala | 78 +- ...inesApiAsyncBackendJobExecutionActor.scala | 325 ++- .../PipelinesApiLifecycleActorFactory.scala | 12 +- .../PipelinesParameterConversions.scala | 87 +- .../v2beta/PipelinesUtilityConversions.scala | 17 +- .../pipelines/v2beta/api/ActionBuilder.scala | 77 +- .../v2beta/api/CheckpointingAction.scala | 36 +- .../pipelines/v2beta/api/Delocalization.scala | 88 +- .../v2beta/api/Deserialization.scala | 31 +- .../pipelines/v2beta/api/Localization.scala | 70 +- .../v2beta/api/MemoryRetryCheckAction.scala | 5 +- .../v2beta/api/MonitoringAction.scala | 16 +- .../v2beta/api/SSHAccessAction.scala | 6 +- .../api/request/AbortRequestHandler.scala | 41 +- .../v2beta/api/request/ErrorReporter.scala | 72 +- .../api/request/GetRequestHandler.scala | 63 +- .../v2beta/api/request/RequestHandler.scala | 23 +- .../api/request/RunRequestHandler.scala | 19 +- ...ApiAsyncBackendJobExecutionActorSpec.scala | 27 +- .../v2beta/api/ActionBuilderSpec.scala | 53 +- .../v2beta/api/ActionCommandsSpec.scala | 36 +- .../v2beta/api/DeserializationSpec.scala | 33 +- .../v2beta/api/LocalizationSpec.scala | 12 +- .../api/request/GetRequestHandlerSpec.scala | 1229 +++++----- .../config/ConfigAsyncJobExecutionActor.scala | 79 +- .../sfs/config/ConfigBackendFileHashing.scala | 6 +- .../ConfigBackendFileHashingActor.scala | 14 +- .../ConfigBackendLifecycleActorFactory.scala | 16 +- .../sfs/config/ConfigHashingStrategy.scala | 82 +- .../config/ConfigInitializationActor.scala | 31 +- .../impl/sfs/config/ConfigWdlNamespace.scala | 28 +- .../sfs/config/CpuDeclarationValidation.scala | 8 +- .../sfs/config/DeclarationValidation.scala | 56 +- .../config/MemoryDeclarationValidation.scala | 20 +- .../BackgroundAsyncJobExecutionActor.scala | 44 +- .../backend/sfs/SharedFileSystem.scala | 130 +- ...aredFileSystemAsyncJobExecutionActor.scala | 76 +- ...leSystemBackendLifecycleActorFactory.scala | 3 +- ...SharedFileSystemCacheHitCopyingActor.scala | 27 +- .../SharedFileSystemExpressionFunctions.scala | 30 +- .../SharedFileSystemInitializationActor.scala | 14 +- ...haredFileSystemJobCachingActorHelper.scala | 8 +- .../ConfigAsyncJobExecutionActorSpec.scala | 10 +- .../config/ConfigHashingStrategySpec.scala | 28 +- .../ConfigInitializationActorSpec.scala | 44 +- .../MemoryDeclarationValidationSpec.scala | 62 +- ...redFileSystemInitializationActorSpec.scala | 46 +- ...haredFileSystemJobExecutionActorSpec.scala | 242 +- .../backend/sfs/SharedFileSystemSpec.scala | 32 +- .../sfs/TestLocalAsyncJobExecutionActor.scala | 35 +- .../config/DeclarationValidationSpec.scala | 21 +- .../config/HttpFilesystemEnablementSpec.scala | 15 +- .../TesAsyncBackendJobExecutionActor.scala | 198 +- .../tes/TesBackendInitializationData.scala | 3 +- .../tes/TesBackendLifecycleActorFactory.scala | 11 +- .../backend/impl/tes/TesConfiguration.scala | 12 +- .../impl/tes/TesExpressionFunctions.scala | 6 +- .../impl/tes/TesInitializationActor.scala | 22 +- .../impl/tes/TesJobCachingActorHelper.scala | 10 +- .../backend/impl/tes/TesJobPaths.scala | 20 +- .../impl/tes/TesRuntimeAttributes.scala | 127 +- .../cromwell/backend/impl/tes/TesTask.scala | 179 +- .../backend/impl/tes/TesWorkflowPaths.scala | 12 +- ...TesAsyncBackendJobExecutionActorSpec.scala | 92 +- .../impl/tes/TesConfigurationSpec.scala | 14 +- .../impl/tes/TesInitializationActorSpec.scala | 75 +- .../impl/tes/TesRuntimeAttributesSpec.scala | 111 +- .../backend/impl/tes/TesTaskSpec.scala | 145 +- .../backend/impl/tes/TesTestConfig.scala | 1 - .../impl/tes/TesWorkflowPathsSpec.scala | 4 +- .../backend/impl/tes/TestWorkflows.scala | 115 +- .../scala/wdl/draft2/model/AstTools.scala | 275 ++- .../scala/wdl/draft2/model/Declaration.scala | 72 +- .../src/main/scala/wdl/draft2/model/If.scala | 7 +- .../main/scala/wdl/draft2/model/Scatter.scala | 4 +- .../main/scala/wdl/draft2/model/Scope.scala | 156 +- .../scala/wdl/draft2/model/TaskOutput.scala | 15 +- .../main/scala/wdl/draft2/model/WdlCall.scala | 59 +- .../wdl/draft2/model/WdlExpression.scala | 124 +- .../scala/wdl/draft2/model/WdlGraphNode.scala | 32 +- .../scala/wdl/draft2/model/WdlNamespace.scala | 394 +-- .../draft2/model/WdlRuntimeAttributes.scala | 3 +- .../model/WdlSyntaxErrorFormatter.scala | 283 +-- .../main/scala/wdl/draft2/model/WdlTask.scala | 32 +- .../scala/wdl/draft2/model/WdlWorkflow.scala | 56 +- .../wdl/draft2/model/WorkflowOutput.scala | 7 +- .../draft2/model/WorkflowOutputWildcard.scala | 3 +- .../wdl/draft2/model/WorkflowScoped.scala | 10 +- .../model/command/ParameterCommandPart.scala | 36 +- .../model/command/StringCommandPart.scala | 7 +- .../draft2/model/command/WdlCommandPart.scala | 16 +- .../scala/wdl/draft2/model/examples/ex1.scala | 16 +- .../scala/wdl/draft2/model/examples/ex2.scala | 22 +- .../scala/wdl/draft2/model/examples/ex3.scala | 22 +- .../scala/wdl/draft2/model/examples/ex4.scala | 14 +- .../scala/wdl/draft2/model/examples/ex5.scala | 28 +- .../scala/wdl/draft2/model/examples/ex6.scala | 13 +- .../scala/wdl/draft2/model/examples/ex7.scala | 27 +- .../scala/wdl/draft2/model/examples/ex8.scala | 2 +- .../model/exception/LookupException.scala | 20 +- .../exception/UnsatisfiedInputException.scala | 2 +- .../model/exception/ValidationException.scala | 2 +- .../draft2/model/expression/Evaluator.scala | 1 - .../model/expression/FileEvaluator.scala | 61 +- .../model/expression/TypeEvaluator.scala | 71 +- .../model/expression/ValueEvaluator.scala | 167 +- .../model/expression/WdlFunctions.scala | 10 +- .../WdlStandardLibraryFunctions.scala | 410 +++- .../model/formatter/SyntaxFormatter.scala | 105 +- .../main/scala/wdl/draft2/model/package.scala | 3 +- .../types/WdlCallOutputsObjectType.scala | 4 +- .../model/types/WdlFlavoredWomType.scala | 16 +- .../draft2/model/types/WdlNamespaceType.scala | 4 +- .../draft2/src/test/scala/wdl/AstSpec.scala | 10 +- .../src/test/scala/wdl/DeclarationSpec.scala | 78 +- .../src/test/scala/wdl/NamespaceSpec.scala | 5 +- .../wdl/ParameterWdlCommandPartSpec.scala | 48 +- .../test/scala/wdl/RuntimeAttributeSpec.scala | 18 +- .../scala/wdl/SameNameParametersSpec.scala | 21 +- .../draft2/src/test/scala/wdl/SampleWdl.scala | 367 ++- .../draft2/src/test/scala/wdl/ScopeSpec.scala | 6 +- .../src/test/scala/wdl/SyntaxErrorSpec.scala | 140 +- .../test/scala/wdl/SyntaxHighlightSpec.scala | 378 +-- .../draft2/src/test/scala/wdl/TaskSpec.scala | 66 +- .../src/test/scala/wdl/TestFileUtil.scala | 3 +- .../wdl/ThreeStepImportNamespaceSpec.scala | 90 +- .../test/scala/wdl/ThreeStepImportSpec.scala | 88 +- .../src/test/scala/wdl/WdlCallSpec.scala | 276 ++- .../draft2/src/test/scala/wdl/WdlTest.scala | 13 +- .../src/test/scala/wdl/WdlWiringSpec.scala | 109 +- .../scala/wdl/WdlWorkflowImportsSpec.scala | 208 +- .../src/test/scala/wdl/WdlWorkflowSpec.scala | 231 +- .../wdl/expression/DNAxTypeEvalTest.scala | 20 +- .../expression/Draft2SizeFunctionSpec.scala | 78 +- .../wdl/expression/FileEvaluatorSpec.scala | 75 +- .../PureStandardLibraryFunctionsSpec.scala | 73 +- .../wdl/expression/TypeEvaluatorSpec.scala | 19 +- .../wdl/expression/ValueEvaluatorSpec.scala | 326 +-- .../WdlStandardLibraryFunctionsSpec.scala | 2 - .../scala/wdl/types/WdlArrayTypeSpec.scala | 2 +- .../test/scala/wdl/types/WdlMapTypeSpec.scala | 22 +- .../scala/wdl/types/WdlObjectTypeSpec.scala | 29 +- .../scala/wdl/types/WdlPairTypeSpec.scala | 105 +- .../scala/wdl/types/WomArrayTypeSpec.scala | 38 +- .../test/scala/wdl/util/StringUtilSpec.scala | 1 - .../test/scala/wdl/values/WdlValueSpec.scala | 119 +- .../model/draft3/elements/CallElement.scala | 5 +- .../draft3/elements/CommandPartElement.scala | 4 +- .../draft3/elements/DeclarationElement.scala | 33 +- .../draft3/elements/ExpressionElement.scala | 62 +- .../model/draft3/elements/FileElement.scala | 7 +- .../wdl/model/draft3/elements/IfElement.scala | 4 +- .../model/draft3/elements/ImportElement.scala | 5 +- .../elements/InputsSectionElement.scala | 4 +- .../draft3/elements/LanguageElement.scala | 2 +- .../draft3/elements/MetaSectionElement.scala | 4 +- .../elements/OutputsSectionElement.scala | 4 +- .../ParameterMetaSectionElement.scala | 4 +- .../PlaceholderAttributeElement.scala | 6 +- .../draft3/elements/ScatterElement.scala | 6 +- .../elements/StringEscapeSequence.scala | 11 +- .../elements/TaskDefinitionElement.scala | 3 +- .../model/draft3/elements/TypeElement.scala | 2 +- .../elements/WorkflowDefinitionElement.scala | 3 +- .../draft3/graph/GeneratedValueHandle.scala | 3 +- .../wdl/model/draft3/graph/LinkedGraph.scala | 4 +- .../graph/UnlinkedConsumedValueHook.scala | 4 +- .../draft3/graph/UnlinkedValueConsumer.scala | 12 +- .../draft3/graph/UnlinkedValueGenerator.scala | 5 +- .../graph/expression/FileEvaluator.scala | 17 +- .../graph/expression/TypeEvaluator.scala | 6 +- .../graph/expression/ValueEvaluator.scala | 5 +- .../graph/expression/WomExpressionMaker.scala | 3 +- .../wdl/shared/FileSizeLimitationConfig.scala | 17 +- .../model/expression/FileEvaluatorUtil.scala | 15 +- .../model/expression/ValueEvaluation.scala | 28 +- .../AstToNewExpressionElements.scala | 21 +- .../ast2wdlom/BiscayneGenericAstNode.scala | 3 +- .../biscayne/ast2wdlom/ast2wdlom.scala | 83 +- .../BiscayneExpressionValueConsumers.scala | 58 +- .../expression/consumed/consumed.scala | 188 +- .../files/BiscayneFileEvaluators.scala | 9 +- .../linking/expression/files/files.scala | 165 +- .../types/BiscayneTypeEvaluators.scala | 70 +- .../linking/expression/types/types.scala | 10 +- .../values/BiscayneValueEvaluators.scala | 167 +- .../linking/expression/values/values.scala | 195 +- .../biscayne/parsing/BiscayneParser.scala | 2 +- .../WdlBiscayneSyntaxErrorFormatter.scala | 42 +- .../biscayne/wdlom2wom/package.scala | 17 +- .../transforms/biscayne/Ast2WdlomSpec.scala | 31 +- .../ast2wdlom/WdlFileToWdlomSpec.scala | 408 +++- ...BiscayneExpressionValueConsumersSpec.scala | 20 +- .../files/BiscayneFileEvaluatorSpec.scala | 17 +- .../types/BiscayneTypeEvaluatorSpec.scala | 20 +- .../values/BiscayneValueEvaluatorSpec.scala | 102 +- .../AstToNewExpressionElements.scala | 21 +- .../ast2wdlom/CascadesGenericAstNode.scala | 3 +- .../cascades/ast2wdlom/ast2wdlom.scala | 83 +- .../CascadesExpressionValueConsumers.scala | 58 +- .../expression/consumed/consumed.scala | 188 +- .../files/CascadesFileEvaluators.scala | 9 +- .../linking/expression/files/files.scala | 165 +- .../types/CascadesTypeEvaluators.scala | 70 +- .../linking/expression/types/types.scala | 10 +- .../values/CascadesValueEvaluators.scala | 167 +- .../linking/expression/values/values.scala | 195 +- .../cascades/parsing/CascadesParser.scala | 2 +- .../WdlCascadesSyntaxErrorFormatter.scala | 42 +- .../cascades/wdlom2wom/package.scala | 17 +- .../transforms/cascades/Ast2WdlomSpec.scala | 31 +- .../ast2wdlom/WdlFileToWdlomSpec.scala | 408 +++- ...CascadesExpressionValueConsumersSpec.scala | 20 +- .../files/CascadesFileEvaluatorSpec.scala | 17 +- .../types/CascadesTypeEvaluatorSpec.scala | 20 +- .../values/CascadesValueEvaluatorSpec.scala | 102 +- .../wdlom2wom/WdlDraft2WomBundleMakers.scala | 24 +- .../wdlom2wom/WdlDraft2WomCallNodeMaker.scala | 131 +- ...lDraft2WomCommandTaskDefinitionMaker.scala | 16 +- .../WdlDraft2WomConditionalNodeMaker.scala | 65 +- .../WdlDraft2WomExecutableMakers.scala | 35 +- .../wdlom2wom/WdlDraft2WomGraphMaker.scala | 67 +- .../WdlDraft2WomScatterNodeMaker.scala | 49 +- .../WdlDraft2WomWorkflowDefinitionMaker.scala | 21 +- .../wdlwom/Draft2ReadFileLimitsSpec.scala | 12 +- .../transforms/wdlwom/WdlAliasWomSpec.scala | 15 +- .../wdlwom/WdlConditionalWomSpec.scala | 79 +- .../wdlwom/WdlInputValidationSpec.scala | 68 +- .../wdlwom/WdlNamespaceWomSpec.scala | 46 +- .../wdlwom/WdlNestedConditionalWomSpec.scala | 10 +- .../transforms/wdlwom/WdlScatterWomSpec.scala | 133 +- .../wdlwom/WdlSubworkflowWomSpec.scala | 57 +- .../WdlWomExpressionsAsInputsSpec.scala | 10 +- .../ast2wdlom/Draft3GenericAstNode.scala | 3 +- .../transforms/ast2wdlom/ast2wdlom.scala | 115 +- .../linking/expression/consumed/package.scala | 148 +- .../linking/expression/files/package.scala | 157 +- .../linking/expression/types/package.scala | 10 +- .../linking/expression/values/package.scala | 33 +- .../transforms/parsing/Draft3Parser.scala | 2 +- .../WdlDraft3SyntaxErrorFormatter.scala | 42 +- .../draft3/transforms/wdlom2wom/package.scala | 17 +- .../AstToWorkflowDefinitionElementSpec.scala | 73 +- .../transforms/ast2wdlom/Ast2WdlomSpec.scala | 8 +- .../transforms/ast2wdlom/ExpressionSet.scala | 28 +- .../ast2wdlom/WdlFileToWdlomSpec.scala | 2146 +++++++++++------ .../MemberAccessTypeEvaluatorSpec.scala | 22 +- .../MemberAccessValueEvaluatorSpec.scala | 18 +- .../expression/TernaryIfEvaluatorSpec.scala | 6 +- ...UnaryAndBinaryOperatorsEvaluatorSpec.scala | 5 - .../consumed/ValueConsumerSpec.scala | 4 +- .../values/Draft3ReadFileLimitsSpec.scala | 19 +- .../values/Draft3SizeFunctionSpec.scala | 129 +- .../expression/files/FileEvaluatorSpec.scala | 6 +- .../wdlom2wdl/WdlomToWdlFileSpec.scala | 36 +- .../wdlom2wom/WdlFileToWomSpec.scala | 146 +- .../AstNodeToCommandPartElement.scala | 26 +- .../AstNodeToExpressionElement.scala | 108 +- .../base/ast2wdlom/AstNodeToKvPair.scala | 5 +- .../base/ast2wdlom/AstNodeToMetaKvPair.scala | 22 +- .../AstNodeToPlaceholderAttributeSet.scala | 64 +- .../ast2wdlom/AstNodeToStaticString.scala | 54 +- .../base/ast2wdlom/AstNodeToTypeElement.scala | 61 +- .../base/ast2wdlom/AstToCallElement.scala | 18 +- .../AstToCommandSectionElement.scala | 20 +- .../ast2wdlom/AstToDeclarationContent.scala | 16 +- .../base/ast2wdlom/AstToFileBodyElement.scala | 14 +- .../base/ast2wdlom/AstToFileElement.scala | 14 +- .../base/ast2wdlom/AstToIfElement.scala | 10 +- .../base/ast2wdlom/AstToImportElement.scala | 16 +- .../AstToInputDeclarationElement.scala | 11 +- .../ast2wdlom/AstToInputsSectionElement.scala | 6 +- .../ast2wdlom/AstToMetaSectionElement.scala | 6 +- .../AstToOutputsSectionElement.scala | 10 +- .../AstToParameterMetaSectionElement.scala | 15 +- ...AstToRuntimeAttributesSectionElement.scala | 13 +- .../base/ast2wdlom/AstToScatterElement.scala | 23 +- .../base/ast2wdlom/AstToStructElement.scala | 13 +- .../AstToTaskDefinitionElement.scala | 63 +- .../ast2wdlom/AstToTaskSectionElement.scala | 17 +- .../ast2wdlom/AstToWorkflowBodyElement.scala | 29 +- .../AstToWorkflowDefinitionElement.scala | 79 +- .../AstToWorkflowGraphNodeElement.scala | 35 +- .../base/ast2wdlom/GenericAst.scala | 26 +- .../transforms/base/ast2wdlom/ast2wdlom.scala | 14 +- .../consumed/BinaryOperatorEvaluators.scala | 6 +- .../consumed/EngineFunctionEvaluators.scala | 60 +- .../consumed/LiteralEvaluators.scala | 85 +- .../consumed/LookupEvaluators.scala | 37 +- .../consumed/TernaryIfEvaluator.scala | 12 +- .../consumed/UnaryOperatorEvaluators.scala | 5 +- .../files/BinaryOperatorEvaluators.scala | 15 +- .../files/EngineFunctionEvaluators.scala | 161 +- .../expression/files/LiteralEvaluators.scala | 76 +- .../expression/files/LookupEvaluators.scala | 64 +- .../expression/files/TernaryIfEvaluator.scala | 14 +- .../files/UnaryOperatorEvaluators.scala | 8 +- .../base/linking/expression/package.scala | 16 +- .../types/BinaryOperatorEvaluators.scala | 11 +- .../types/EngineFunctionEvaluators.scala | 294 +-- .../expression/types/LiteralEvaluators.scala | 54 +- .../expression/types/LookupEvaluators.scala | 79 +- .../expression/types/TernaryIfEvaluator.scala | 9 +- .../types/UnaryOperatorEvaluators.scala | 6 +- .../values/BinaryOperatorEvaluators.scala | 21 +- .../values/EngineFunctionEvaluators.scala | 590 +++-- .../expression/values/LiteralEvaluators.scala | 57 +- .../expression/values/LookupEvaluators.scala | 119 +- .../values/TernaryIfEvaluator.scala | 17 +- .../values/UnaryOperatorEvaluators.scala | 5 +- .../base/linking/graph/LinkedGraphMaker.scala | 89 +- .../base/linking/graph/package.scala | 263 +- .../base/linking/typemakers/package.scala | 42 +- .../transforms/base/wdlom2wdl/WdlWriter.scala | 2 +- .../base/wdlom2wdl/WdlWriterImpl.scala | 470 ++-- .../CommandPartElementToWomCommandPart.scala | 68 +- .../wdlom2wom/FileElementToWomBundle.scala | 143 +- .../base/wdlom2wom/StructEvaluation.scala | 17 +- ...DefinitionElementToWomTaskDefinition.scala | 232 +- .../wdlom2wom/WomBundleToWomExecutable.scala | 6 +- ...nitionElementToWomWorkflowDefinition.scala | 161 +- .../expression/WdlomWomExpression.scala | 51 +- .../renaming/BinaryOperatorEvaluators.scala | 23 +- .../renaming/EngineFunctionEvaluators.scala | 54 +- .../renaming/IdentifierLookupRenamer.scala | 4 +- .../renaming/LiteralEvaluators.scala | 92 +- .../renaming/LookupEvaluators.scala | 74 +- .../renaming/TernaryIfEvaluator.scala | 4 +- .../renaming/UnaryOperatorEvaluators.scala | 4 +- .../expression/renaming/package.scala | 151 +- .../graph/CallElementToGraphNode.scala | 139 +- .../graph/IfElementToGraphNode.scala | 93 +- .../InputDeclarationElementToGraphNode.scala | 23 +- .../graph/ScatterElementToGraphNode.scala | 207 +- .../WorkflowGraphElementToGraphNode.scala | 70 +- .../GraphIdentifierLookupRenamer.scala | 7 +- .../wdlom2wom/graph/renaming/package.scala | 152 +- .../evaluation/values/EngineFunctions.scala | 22 +- .../wdlom2wom/WdlSharedInputParsing.scala | 19 +- .../wdlom2wom/WomGraphMakerTools.scala | 26 +- wom/src/main/scala/wdl/util/StringUtil.scala | 6 +- wom/src/main/scala/wom/CommandPart.scala | 3 +- .../main/scala/wom/RuntimeAttributes.scala | 3 + wom/src/main/scala/wom/WomFileMapper.scala | 19 +- .../main/scala/wom/callable/Callable.scala | 115 +- .../wom/callable/CommandTaskDefinition.scala | 102 +- .../ContainerizedInputExpression.scala | 3 +- .../wom/callable/RuntimeEnvironment.scala | 4 +- .../wom/callable/WorkflowDefinition.scala | 3 +- .../scala/wom/executable/Executable.scala | 78 +- .../wom/executable/ExecutableValidation.scala | 21 +- .../main/scala/wom/executable/WomBundle.scala | 6 +- .../expression/InputPointerToWomValue.scala | 44 +- .../wom/expression/IoFunctionSetAdapter.scala | 5 +- .../wom/expression/NoIoFunctionSet.scala | 33 +- .../scala/wom/expression/WomExpression.scala | 26 +- .../main/scala/wom/format/MemorySize.scala | 9 +- wom/src/main/scala/wom/graph/CallNode.scala | 234 +- .../scala/wom/graph/ConditionalNode.scala | 18 +- wom/src/main/scala/wom/graph/Graph.scala | 40 +- .../main/scala/wom/graph/GraphInputNode.scala | 131 +- wom/src/main/scala/wom/graph/GraphNode.scala | 18 +- .../wom/graph/GraphNodeInputExpression.scala | 9 +- .../main/scala/wom/graph/GraphNodePort.scala | 36 +- .../graph/GraphNodeWithSingleOutputPort.scala | 1 + .../scala/wom/graph/GraphOutputNode.scala | 10 +- .../main/scala/wom/graph/ScatterNode.scala | 74 +- .../main/scala/wom/graph/WomIdentifier.scala | 6 +- .../expression/AnonymousExpressionNode.scala | 15 +- .../expression/ExposedExpressionNode.scala | 7 +- .../wom/graph/expression/ExpressionNode.scala | 38 +- .../graph/expression/ExpressionNodeLike.scala | 4 +- wom/src/main/scala/wom/package.scala | 16 +- .../runtime/WomOutputRuntimeExtractor.scala | 4 +- .../wom/transforms/WomCallNodeMaker.scala | 7 +- .../transforms/WomConditionalNodeMaker.scala | 7 +- .../wom/transforms/WomExecutableMaker.scala | 6 +- .../scala/wom/transforms/WomGraphMaker.scala | 7 +- .../wom/transforms/WomScatterNodeMaker.scala | 7 +- wom/src/main/scala/wom/types/WomAnyType.scala | 11 +- .../main/scala/wom/types/WomArrayType.scala | 26 +- .../main/scala/wom/types/WomBooleanType.scala | 2 +- .../scala/wom/types/WomCompositeType.scala | 33 +- .../scala/wom/types/WomCoproductType.scala | 33 +- .../scala/wom/types/WomEnumerationType.scala | 10 +- .../main/scala/wom/types/WomFileType.scala | 4 +- .../main/scala/wom/types/WomFloatType.scala | 2 +- .../main/scala/wom/types/WomIntegerLike.scala | 1 - .../main/scala/wom/types/WomIntegerType.scala | 14 +- .../main/scala/wom/types/WomNothingType.scala | 4 +- .../main/scala/wom/types/WomObjectType.scala | 24 +- .../scala/wom/types/WomOptionalType.scala | 13 +- .../main/scala/wom/types/WomPairType.scala | 22 +- .../scala/wom/types/WomPrimitiveType.scala | 3 +- .../main/scala/wom/types/WomStringType.scala | 2 +- wom/src/main/scala/wom/types/WomType.scala | 71 +- .../wom/types/WomTypeJsonFormatter.scala | 4 +- .../wom/types/coercion/defaults/package.scala | 22 +- wom/src/main/scala/wom/util/YamlUtils.scala | 50 +- wom/src/main/scala/wom/values/WomArray.scala | 58 +- .../main/scala/wom/values/WomBoolean.scala | 10 +- .../scala/wom/values/WomCoproductValue.scala | 3 +- wom/src/main/scala/wom/values/WomFile.scala | 93 +- wom/src/main/scala/wom/values/WomFloat.scala | 66 +- .../main/scala/wom/values/WomInteger.scala | 44 +- wom/src/main/scala/wom/values/WomLong.scala | 2 +- wom/src/main/scala/wom/values/WomMap.scala | 66 +- wom/src/main/scala/wom/values/WomObject.scala | 57 +- .../scala/wom/values/WomOptionalValue.scala | 13 +- wom/src/main/scala/wom/values/WomPair.scala | 3 +- wom/src/main/scala/wom/values/WomValue.scala | 29 +- wom/src/main/scala/wom/views/GraphPrint.scala | 49 +- wom/src/test/scala/wom/WomMatchers.scala | 2 +- .../callable/CommandTaskDefinitionSpec.scala | 21 +- .../expression/PlaceholderWomExpression.scala | 13 +- .../scala/wom/format/MemorySizeSpec.scala | 7 +- .../wom/graph/ExpressionAsCallInputSpec.scala | 8 +- .../scala/wom/graph/ExpressionNodeSpec.scala | 17 +- .../scala/wom/graph/GraphOutputNodeSpec.scala | 14 +- wom/src/test/scala/wom/graph/GraphSpec.scala | 76 +- .../scala/wom/graph/ScatterNodeSpec.scala | 21 +- .../scala/wom/types/WomArrayTypeSpec.scala | 18 +- .../scala/wom/types/WomCoercionSpec.scala | 10 +- .../wom/types/WomCompositeTypeSpec.scala | 62 +- .../scala/wom/types/WomCoproductSpec.scala | 1 - .../scala/wom/types/WomFileTypeSpec.scala | 176 +- .../test/scala/wom/types/WomMapTypeSpec.scala | 84 +- .../scala/wom/types/WomObjectTypeSpec.scala | 48 +- .../scala/wom/types/WomOptionalTypeSpec.scala | 56 +- .../test/scala/wom/types/WomTypeSpec.scala | 88 +- .../test/scala/wom/util/YamlUtilsSpec.scala | 40 +- .../test/scala/wom/values/WomFileSpec.scala | 250 +- .../test/scala/wom/values/WomObjectSpec.scala | 8 +- .../wom/values/WomOptionalValueSpec.scala | 1 - .../src/main/scala/womtool/WomtoolMain.scala | 71 +- .../PartialWomtoolCommandLineArguments.scala | 12 +- .../cmdline/WomtoolCommandLineParser.scala | 66 +- .../main/scala/womtool/graph/WomGraph.scala | 66 +- .../main/scala/womtool/graph/package.scala | 4 +- .../scala/womtool/input/WomGraphMaker.scala | 11 +- .../main/scala/womtool/inputs/Inputs.scala | 24 +- .../main/scala/womtool/outputs/Outputs.scala | 19 +- .../scala/womtool/validate/Validate.scala | 3 +- .../scala/womtool/wom2wdlom/WomToWdlom.scala | 317 ++- .../src/test/scala/womtool/SampleWdl.scala | 4 +- .../womtool/WomtoolJsonCommandSpec.scala | 17 +- .../test/scala/womtool/WomtoolMainSpec.scala | 16 +- .../scala/womtool/WomtoolValidateSpec.scala | 73 +- .../ExpressionBasedGraphOutputNodeSpec.scala | 16 +- .../womtool/graph/ExpressionNodeSpec.scala | 6 +- .../graph/OutputNameCollisionSpec.scala | 7 +- .../scala/womtool/graph/WomDotGraphTest.scala | 10 +- 1553 files changed, 49441 insertions(+), 32619 deletions(-) create mode 100644 .scalafmt.conf diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 00000000000..336b0fd7145 --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,18 @@ +version = 3.7.17 +align.preset = none +align.openParenCallSite = true +align.openParenDefnSite = true +maxColumn = 120 +continuationIndent.defnSite = 2 +assumeStandardLibraryStripMargin = true +align.stripMargin = true +danglingParentheses.preset = true +rewrite.rules = [Imports, RedundantBraces, RedundantParens, SortModifiers] +rewrite.imports.sort = scalastyle +docstrings.style = keep +project.excludeFilters = [ + Dependencies.scala, + Settings.scala, + build.sbt +] +runner.dialect = scala213 diff --git a/CromIAM/src/main/scala/cromiam/auth/Collection.scala b/CromIAM/src/main/scala/cromiam/auth/Collection.scala index 84e72194ab2..47785ae2005 100644 --- a/CromIAM/src/main/scala/cromiam/auth/Collection.scala +++ b/CromIAM/src/main/scala/cromiam/auth/Collection.scala @@ -9,6 +9,7 @@ import scala.util.{Success, Try} final case class Collection(name: String) extends AnyVal object Collection { + /** * Parses a raw JSON string to make sure it fits the standard pattern (see below) for labels, * performs some CromIAM-specific checking to ensure the user isn't attempting to manipulate the @@ -19,13 +20,14 @@ object Collection { */ def validateLabels(labelsJson: Option[String]): Directive1[Option[Map[String, JsValue]]] = { - val labels = labelsJson map { l => - Try(l.parseJson) match { - case Success(JsObject(json)) if json.keySet.contains(CollectionLabelName) => throw new LabelContainsCollectionException - case Success(JsObject(json)) => json - case _ => throw InvalidLabelsException(l) - } + val labels = labelsJson map { l => + Try(l.parseJson) match { + case Success(JsObject(json)) if json.keySet.contains(CollectionLabelName) => + throw new LabelContainsCollectionException + case Success(JsObject(json)) => json + case _ => throw InvalidLabelsException(l) } + } provide(labels) } @@ -34,15 +36,16 @@ object Collection { val LabelsKey = "labels" // LabelContainsCollectionException is a class because of ScalaTest, some of the constructs don't play well w/ case objects - final class LabelContainsCollectionException extends Exception(s"Submitted labels contain the key $CollectionLabelName, which is not allowed\n") - final case class InvalidLabelsException(labels: String) extends Exception(s"Labels must be a valid JSON object, received: $labels\n") + final class LabelContainsCollectionException + extends Exception(s"Submitted labels contain the key $CollectionLabelName, which is not allowed\n") + final case class InvalidLabelsException(labels: String) + extends Exception(s"Labels must be a valid JSON object, received: $labels\n") /** * Returns the default collection for a user. */ - def forUser(user: User): Collection = { + def forUser(user: User): Collection = Collection(user.userId.value) - } implicit val collectionJsonReader = new JsonReader[Collection] { import spray.json.DefaultJsonProtocol._ diff --git a/CromIAM/src/main/scala/cromiam/auth/User.scala b/CromIAM/src/main/scala/cromiam/auth/User.scala index d123f8fa2f7..fec64ebc5f9 100644 --- a/CromIAM/src/main/scala/cromiam/auth/User.scala +++ b/CromIAM/src/main/scala/cromiam/auth/User.scala @@ -7,4 +7,3 @@ import org.broadinstitute.dsde.workbench.model.WorkbenchUserId * Wraps the concept of an authenticated workbench user including their numeric ID as well as their bearer token */ final case class User(userId: WorkbenchUserId, authorization: Authorization) - diff --git a/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala b/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala index a95b8df89b5..4a419b62255 100644 --- a/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala +++ b/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala @@ -25,10 +25,16 @@ import scala.concurrent.{ExecutionContextExecutor, Future} * * FIXME: Look for ways to synch this up with the mothership */ -class CromwellClient(scheme: String, interface: String, port: Int, log: LoggingAdapter, serviceRegistryActorRef: ActorRef)(implicit system: ActorSystem, - ece: ExecutionContextExecutor, - materializer: ActorMaterializer) - extends SprayJsonSupport with DefaultJsonProtocol with StatusCheckedSubsystem with CromIamInstrumentation{ +class CromwellClient(scheme: String, + interface: String, + port: Int, + log: LoggingAdapter, + serviceRegistryActorRef: ActorRef +)(implicit system: ActorSystem, ece: ExecutionContextExecutor, materializer: ActorMaterializer) + extends SprayJsonSupport + with DefaultJsonProtocol + with StatusCheckedSubsystem + with CromIamInstrumentation { val cromwellUrl = new URL(s"$scheme://$interface:$port") val cromwellApiVersion = "v1" @@ -41,21 +47,23 @@ class CromwellClient(scheme: String, interface: String, port: Int, log: LoggingA def collectionForWorkflow(workflowId: String, user: User, - cromIamRequest: HttpRequest): FailureResponseOrT[Collection] = { + cromIamRequest: HttpRequest + ): FailureResponseOrT[Collection] = { import CromwellClient.EnhancedWorkflowLabels log.info("Requesting collection for " + workflowId + " for user " + user.userId + " from metadata") // Look up in Cromwell what the collection is for this workflow. If it doesn't exist, fail the Future - val cromwellApiLabelFunc = () => cromwellApiClient.labels(WorkflowId.fromString(workflowId), headers = List(user.authorization)) flatMap { - _.caasCollection match { - case Some(c) => FailureResponseOrT.pure[IO, HttpResponse](c) - case None => - val exception = new IllegalArgumentException(s"Workflow $workflowId has no associated collection") - val failure = IO.raiseError[Collection](exception) - FailureResponseOrT.right[HttpResponse](failure) + val cromwellApiLabelFunc = () => + cromwellApiClient.labels(WorkflowId.fromString(workflowId), headers = List(user.authorization)) flatMap { + _.caasCollection match { + case Some(c) => FailureResponseOrT.pure[IO, HttpResponse](c) + case None => + val exception = new IllegalArgumentException(s"Workflow $workflowId has no associated collection") + val failure = IO.raiseError[Collection](exception) + FailureResponseOrT.right[HttpResponse](failure) + } } - } instrumentRequest(cromwellApiLabelFunc, cromIamRequest, wfCollectionPrefix) } @@ -63,13 +71,14 @@ class CromwellClient(scheme: String, interface: String, port: Int, log: LoggingA def forwardToCromwell(httpRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { val future = { // See CromwellClient's companion object for info on these header modifications - val headers = httpRequest.headers.filterNot(header => header.name == TimeoutAccessHeader || header.name == HostHeader) + val headers = + httpRequest.headers.filterNot(header => header.name == TimeoutAccessHeader || header.name == HostHeader) val cromwellRequest = httpRequest .copy(uri = httpRequest.uri.withAuthority(interface, port).withScheme(scheme)) .withHeaders(headers) Http().singleRequest(cromwellRequest) - } recoverWith { - case e => Future.failed(CromwellConnectionFailure(e)) + } recoverWith { case e => + Future.failed(CromwellConnectionFailure(e)) } future.asFailureResponseOrT } @@ -86,7 +95,7 @@ class CromwellClient(scheme: String, interface: String, port: Int, log: LoggingA use the current workflow id. This is all called from inside the context of a Future, so exceptions will be properly caught. - */ + */ metadata.value.parseJson.asJsObject.fields.get("rootWorkflowId").map(_.convertTo[String]).getOrElse(workflowId) } @@ -96,11 +105,13 @@ class CromwellClient(scheme: String, interface: String, port: Int, log: LoggingA Grab the metadata from Cromwell filtered down to the rootWorkflowId. Then transform the response to get just the root workflow ID itself */ - val cromwellApiMetadataFunc = () => cromwellApiClient.metadata( - WorkflowId.fromString(workflowId), - args = Option(Map("includeKey" -> List("rootWorkflowId"))), - headers = List(user.authorization)).map(metadataToRootWorkflowId - ) + val cromwellApiMetadataFunc = () => + cromwellApiClient + .metadata(WorkflowId.fromString(workflowId), + args = Option(Map("includeKey" -> List("rootWorkflowId"))), + headers = List(user.authorization) + ) + .map(metadataToRootWorkflowId) instrumentRequest(cromwellApiMetadataFunc, cromIamRequest, rootWfIdPrefix) } @@ -120,14 +131,14 @@ object CromwellClient { // See: https://broadworkbench.atlassian.net/browse/DDO-2190 val HostHeader = "Host" - final case class CromwellConnectionFailure(f: Throwable) extends Exception(s"Unable to connect to Cromwell (${f.getMessage})", f) + final case class CromwellConnectionFailure(f: Throwable) + extends Exception(s"Unable to connect to Cromwell (${f.getMessage})", f) implicit class EnhancedWorkflowLabels(val wl: WorkflowLabels) extends AnyVal { - import Collection.{CollectionLabelName, collectionJsonReader} + import Collection.{collectionJsonReader, CollectionLabelName} - def caasCollection: Option[Collection] = { + def caasCollection: Option[Collection] = wl.labels.fields.get(CollectionLabelName).map(_.convertTo[Collection]) - } } } diff --git a/CromIAM/src/main/scala/cromiam/instrumentation/CromIamInstrumentation.scala b/CromIAM/src/main/scala/cromiam/instrumentation/CromIamInstrumentation.scala index 65b164f00f6..c79e0b6e14c 100644 --- a/CromIAM/src/main/scala/cromiam/instrumentation/CromIamInstrumentation.scala +++ b/CromIAM/src/main/scala/cromiam/instrumentation/CromIamInstrumentation.scala @@ -27,10 +27,11 @@ trait CromIamInstrumentation extends CromwellInstrumentation { val rootWfIdPrefix = NonEmptyList.one("root-workflow-id") val wfCollectionPrefix = NonEmptyList.one("workflow-collection") - def convertRequestToPath(httpRequest: HttpRequest): NonEmptyList[String] = NonEmptyList.of( // Returns the path of the URI only, without query parameters (e.g: api/engine/workflows/metadata) - httpRequest.uri.path.toString().stripPrefix("/") + httpRequest.uri.path + .toString() + .stripPrefix("/") // Replace UUIDs with [id] to keep paths same regardless of the workflow .replaceAll(CromIamInstrumentation.UUIDRegex, "[id]"), // Name of the method (e.g: GET) @@ -43,15 +44,19 @@ trait CromIamInstrumentation extends CromwellInstrumentation { def makePathFromRequestAndResponse(httpRequest: HttpRequest, httpResponse: HttpResponse): InstrumentationPath = convertRequestToPath(httpRequest).concatNel(NonEmptyList.of(httpResponse.status.intValue.toString)) - def sendTimingApi(statsDPath: InstrumentationPath, timing: FiniteDuration, prefixToStatsd: NonEmptyList[String]): Unit = { + def sendTimingApi(statsDPath: InstrumentationPath, + timing: FiniteDuration, + prefixToStatsd: NonEmptyList[String] + ): Unit = sendTiming(prefixToStatsd.concatNel(statsDPath), timing, CromIamPrefix) - } - def instrumentationPrefixForSam(methodPrefix: NonEmptyList[String]): NonEmptyList[String] = samPrefix.concatNel(methodPrefix) + def instrumentationPrefixForSam(methodPrefix: NonEmptyList[String]): NonEmptyList[String] = + samPrefix.concatNel(methodPrefix) def instrumentRequest[A](func: () => FailureResponseOrT[A], httpRequest: HttpRequest, - prefix: NonEmptyList[String]): FailureResponseOrT[A] = { + prefix: NonEmptyList[String] + ): FailureResponseOrT[A] = { def now(): Deadline = Deadline.now val startTimestamp = now() diff --git a/CromIAM/src/main/scala/cromiam/sam/SamClient.scala b/CromIAM/src/main/scala/cromiam/sam/SamClient.scala index d6a315f8241..8fa0cc8fd87 100644 --- a/CromIAM/src/main/scala/cromiam/sam/SamClient.scala +++ b/CromIAM/src/main/scala/cromiam/sam/SamClient.scala @@ -33,20 +33,21 @@ class SamClient(scheme: String, port: Int, checkSubmitWhitelist: Boolean, log: LoggingAdapter, - serviceRegistryActorRef: ActorRef) - (implicit system: ActorSystem, ece: ExecutionContextExecutor, materializer: ActorMaterializer) extends StatusCheckedSubsystem with CromIamInstrumentation { + serviceRegistryActorRef: ActorRef +)(implicit system: ActorSystem, ece: ExecutionContextExecutor, materializer: ActorMaterializer) + extends StatusCheckedSubsystem + with CromIamInstrumentation { - private implicit val cs = IO.contextShift(ece) + implicit private val cs = IO.contextShift(ece) override val statusUri = uri"$samBaseUri/status" override val serviceRegistryActor: ActorRef = serviceRegistryActorRef - def isSubmitWhitelisted(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = { + def isSubmitWhitelisted(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = checkSubmitWhitelist.fold( isSubmitWhitelistedSam(user, cromIamRequest), FailureResponseOrT.pure(true) ) - } def isSubmitWhitelistedSam(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = { val request = HttpRequest( @@ -64,7 +65,7 @@ class SamClient(scheme: String, whitelisted <- response.status match { case StatusCodes.OK => // Does not seem to be already provided? - implicit val entityToBooleanUnmarshaller : Unmarshaller[HttpEntity, Boolean] = + implicit val entityToBooleanUnmarshaller: Unmarshaller[HttpEntity, Boolean] = (Unmarshaller.stringUnmarshaller flatMap Unmarshaller.booleanFromStringUnmarshaller).asScala val unmarshal = IO.fromFuture(IO(Unmarshal(response.entity).to[Boolean])) FailureResponseOrT.right[HttpResponse](unmarshal) @@ -95,14 +96,19 @@ class SamClient(scheme: String, userInfo.enabled } case _ => - log.error("Could not verify access with Sam for user {}, error was {} {}", user.userId, response.status, response.toString().take(100)) + log.error("Could not verify access with Sam for user {}, error was {} {}", + user.userId, + response.status, + response.toString().take(100) + ) FailureResponseOrT.pure[IO, HttpResponse](false) } } yield userEnabled } def collectionsForUser(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[List[Collection]] = { - val request = HttpRequest(method = HttpMethods.GET, uri = samBaseCollectionUri, headers = List[HttpHeader](user.authorization)) + val request = + HttpRequest(method = HttpMethods.GET, uri = samBaseCollectionUri, headers = List[HttpHeader](user.authorization)) for { response <- instrumentRequest( @@ -120,24 +126,25 @@ class SamClient(scheme: String, * @return Successful future if the auth is accepted, a Failure otherwise. */ def requestAuth(authorizationRequest: CollectionAuthorizationRequest, - cromIamRequest: HttpRequest): FailureResponseOrT[Unit] = { + cromIamRequest: HttpRequest + ): FailureResponseOrT[Unit] = { val logString = authorizationRequest.action + " access for user " + authorizationRequest.user.userId + - " on a request to " + authorizationRequest.action + " for collection " + authorizationRequest.collection.name + " on a request to " + authorizationRequest.action + " for collection " + authorizationRequest.collection.name - def validateEntityBytes(byteString: ByteString): FailureResponseOrT[Unit] = { + def validateEntityBytes(byteString: ByteString): FailureResponseOrT[Unit] = if (byteString.utf8String == "true") { Monad[FailureResponseOrT].unit } else { log.warning("Sam denied " + logString) FailureResponseOrT[IO, HttpResponse, Unit](IO.raiseError(new SamDenialException)) } - } log.info("Requesting authorization for " + logString) val request = HttpRequest(method = HttpMethods.GET, - uri = samAuthorizeActionUri(authorizationRequest), - headers = List[HttpHeader](authorizationRequest.user.authorization)) + uri = samAuthorizeActionUri(authorizationRequest), + headers = List[HttpHeader](authorizationRequest.user.authorization) + ) for { response <- instrumentRequest( @@ -158,10 +165,7 @@ class SamClient(scheme: String, - If user has the 'add' permission we're ok - else fail the future */ - def requestSubmission(user: User, - collection: Collection, - cromIamRequest: HttpRequest - ): FailureResponseOrT[Unit] = { + def requestSubmission(user: User, collection: Collection, cromIamRequest: HttpRequest): FailureResponseOrT[Unit] = { log.info("Verifying user " + user.userId + " can submit a workflow to collection " + collection.name) val createCollection = registerCreation(user, collection, cromIamRequest) @@ -169,15 +173,20 @@ class SamClient(scheme: String, case r if r.status == StatusCodes.NoContent => Monad[FailureResponseOrT].unit case r => FailureResponseOrT[IO, HttpResponse, Unit](IO.raiseError(SamRegisterCollectionException(r.status))) } recoverWith { - case r if r.status == StatusCodes.Conflict => requestAuth(CollectionAuthorizationRequest(user, collection, "add"), cromIamRequest) + case r if r.status == StatusCodes.Conflict => + requestAuth(CollectionAuthorizationRequest(user, collection, "add"), cromIamRequest) case r => FailureResponseOrT[IO, HttpResponse, Unit](IO.raiseError(SamRegisterCollectionException(r.status))) } } protected def registerCreation(user: User, collection: Collection, - cromIamRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { - val request = HttpRequest(method = HttpMethods.POST, uri = samRegisterUri(collection), headers = List[HttpHeader](user.authorization)) + cromIamRequest: HttpRequest + ): FailureResponseOrT[HttpResponse] = { + val request = HttpRequest(method = HttpMethods.POST, + uri = samRegisterUri(collection), + headers = List[HttpHeader](user.authorization) + ) instrumentRequest( () => Http().singleRequest(request).asFailureResponseOrT, @@ -186,9 +195,9 @@ class SamClient(scheme: String, ) } - private def samAuthorizeActionUri(authorizationRequest: CollectionAuthorizationRequest) = { - akka.http.scaladsl.model.Uri(s"${samBaseUriForWorkflow(authorizationRequest.collection)}/action/${authorizationRequest.action}") - } + private def samAuthorizeActionUri(authorizationRequest: CollectionAuthorizationRequest) = + akka.http.scaladsl.model + .Uri(s"${samBaseUriForWorkflow(authorizationRequest.collection)}/action/${authorizationRequest.action}") private def samRegisterUri(collection: Collection) = akka.http.scaladsl.model.Uri(samBaseUriForWorkflow(collection)) @@ -207,15 +216,18 @@ object SamClient { class SamDenialException extends Exception("Access Denied") - final case class SamConnectionFailure(phase: String, f: Throwable) extends Exception(s"Unable to connect to Sam during $phase (${f.getMessage})", f) + final case class SamConnectionFailure(phase: String, f: Throwable) + extends Exception(s"Unable to connect to Sam during $phase (${f.getMessage})", f) - final case class SamRegisterCollectionException(errorCode: StatusCode) extends Exception(s"Can't register collection with Sam. Status code: ${errorCode.value}") + final case class SamRegisterCollectionException(errorCode: StatusCode) + extends Exception(s"Can't register collection with Sam. Status code: ${errorCode.value}") final case class CollectionAuthorizationRequest(user: User, collection: Collection, action: String) val SamDenialResponse = HttpResponse(status = StatusCodes.Forbidden, entity = new SamDenialException().getMessage) - def SamRegisterCollectionExceptionResp(statusCode: StatusCode) = HttpResponse(status = statusCode, entity = SamRegisterCollectionException(statusCode).getMessage) + def SamRegisterCollectionExceptionResp(statusCode: StatusCode) = + HttpResponse(status = statusCode, entity = SamRegisterCollectionException(statusCode).getMessage) case class UserStatusInfo(adminEnabled: Boolean, enabled: Boolean, userEmail: String, userSubjectId: String) diff --git a/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala b/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala index 9f5af038b12..b18366490c5 100644 --- a/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala +++ b/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala @@ -15,7 +15,6 @@ import org.broadinstitute.dsde.workbench.util.health.Subsystems.{Cromwell, Sam} import scala.concurrent.{ExecutionContext, ExecutionContextExecutor, Future, Promise} - object CromIamServer extends HttpApp with CromIamApiService with SwaggerService { final val rootConfig: Config = ConfigFactory.load() @@ -35,21 +34,28 @@ object CromIamServer extends HttpApp with CromIamApiService with SwaggerService If there is a reason then leave a comment why there should be two actor systems. https://github.com/broadinstitute/cromwell/issues/3851 */ - CromIamServer.startServer(configuration.cromIamConfig.http.interface, configuration.cromIamConfig.http.port, configuration.cromIamConfig.serverSettings) + CromIamServer.startServer(configuration.cromIamConfig.http.interface, + configuration.cromIamConfig.http.port, + configuration.cromIamConfig.serverSettings + ) } - override implicit val system: ActorSystem = ActorSystem() - override implicit lazy val executor: ExecutionContextExecutor = system.dispatcher - override implicit val materializer: ActorMaterializer = ActorMaterializer() + implicit override val system: ActorSystem = ActorSystem() + implicit override lazy val executor: ExecutionContextExecutor = system.dispatcher + implicit override val materializer: ActorMaterializer = ActorMaterializer() override val log = Logging(system, getClass) override val routes: Route = allRoutes ~ swaggerUiResourceRoute - override val statusService: StatusService = new StatusService(() => Map(Cromwell -> cromwellClient.subsystemStatus(), Sam -> samClient.subsystemStatus())) + override val statusService: StatusService = new StatusService(() => + Map(Cromwell -> cromwellClient.subsystemStatus(), Sam -> samClient.subsystemStatus()) + ) // Override default shutdownsignal which was just "hit return/enter" - override def waitForShutdownSignal(actorSystem: ActorSystem)(implicit executionContext: ExecutionContext): Future[Done] = { + override def waitForShutdownSignal( + actorSystem: ActorSystem + )(implicit executionContext: ExecutionContext): Future[Done] = { val promise = Promise[Done]() sys.addShutdownHook { // we can add anything we want the server to do when someone shutdowns the server (Ctrl-c) diff --git a/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala b/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala index 8d27c7b980d..ce8c5934acd 100644 --- a/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala +++ b/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala @@ -15,7 +15,8 @@ import scala.util.{Failure, Success, Try} final case class CromIamServerConfig(cromIamConfig: CromIamConfig, cromwellConfig: ServiceConfig, samConfig: SamClientConfig, - swaggerOauthConfig: SwaggerOauthConfig) + swaggerOauthConfig: SwaggerOauthConfig +) object CromIamServerConfig { def getFromConfig(conf: Config): ErrorOr[CromIamServerConfig] = { @@ -27,22 +28,28 @@ object CromIamServerConfig { (cromIamConfig, cromwellConfig, samConfig, googleConfig) mapN CromIamServerConfig.apply } - private[config] def getValidatedConfigPath[A](conf: Config, path: String, getter: (Config, String) => A, default: Option[A] = None): ErrorOr[A] = { + private[config] def getValidatedConfigPath[A](conf: Config, + path: String, + getter: (Config, String) => A, + default: Option[A] = None + ): ErrorOr[A] = if (conf.hasPath(path)) { Try(getter.apply(conf, path)) match { case Success(s) => s.validNel case Failure(e) => s"Unable to read valid value at '$path': ${e.getMessage}".invalidNel } - } else default match { - case Some(d) => d.validNel - case None => s"Configuration does not have path $path".invalidNel - } - } + } else + default match { + case Some(d) => d.validNel + case None => s"Configuration does not have path $path".invalidNel + } - private[config] implicit final class ValidatingConfig(val conf: Config) extends AnyVal { - def getValidatedString(path: String, default: Option[String] = None): ErrorOr[String] = getValidatedConfigPath(conf, path, (c, p) => c.getString(p), default) + implicit final private[config] class ValidatingConfig(val conf: Config) extends AnyVal { + def getValidatedString(path: String, default: Option[String] = None): ErrorOr[String] = + getValidatedConfigPath(conf, path, (c, p) => c.getString(p), default) def getValidatedInt(path: String): ErrorOr[Int] = getValidatedConfigPath(conf, path, (c, p) => c.getInt(p)) - def getValidatedStringList(path: String): ErrorOr[List[String]] = getValidatedConfigPath[List[String]](conf, path, (c, p) => c.getStringList(p).asScala.toList) + def getValidatedStringList(path: String): ErrorOr[List[String]] = + getValidatedConfigPath[List[String]](conf, path, (c, p) => c.getStringList(p).asScala.toList) } } @@ -50,13 +57,12 @@ final case class CromIamConfig(http: ServiceConfig, serverSettings: ServerSettin object CromIamConfig { - private def getValidatedServerSettings(conf: Config): ErrorOr[ServerSettings] = { + private def getValidatedServerSettings(conf: Config): ErrorOr[ServerSettings] = Try(ServerSettings(conf)) match { case Success(serverSettings) => serverSettings.validNel case Failure(e) => s"Unable to generate server settings from configuration file: ${e.getMessage}".invalidNel } - } private[config] def getFromConfig(conf: Config, basePath: String): ErrorOr[CromIamConfig] = { val serviceConfig = ServiceConfig.getFromConfig(conf, basePath) @@ -94,6 +100,9 @@ object SwaggerOauthConfig { private[config] def getFromConfig(conf: Config, basePath: String): ErrorOr[SwaggerOauthConfig] = { def getValidatedOption(option: String) = conf.getValidatedString(s"$basePath.$option") - (getValidatedOption("client_id"), getValidatedOption("realm"), getValidatedOption("app_name")) mapN SwaggerOauthConfig.apply - } + (getValidatedOption("client_id"), + getValidatedOption("realm"), + getValidatedOption("app_name") + ) mapN SwaggerOauthConfig.apply + } } diff --git a/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala b/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala index 7aa6c0af752..b267b201bb0 100644 --- a/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala +++ b/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala @@ -1,7 +1,7 @@ package cromiam.server.status import com.softwaremill.sttp.asynchttpclient.future.AsyncHttpClientFutureBackend -import com.softwaremill.sttp.{Uri, sttp} +import com.softwaremill.sttp.{sttp, Uri} import org.broadinstitute.dsde.workbench.util.health.SubsystemStatus import scala.concurrent.{ExecutionContext, Future} @@ -18,12 +18,11 @@ trait StatusCheckedSubsystem { * Make a call to the status endpoint. If we receive a 200 OK fill in the SubsystemStatus w/ OK = true and no * error messages, otherwise OK = false and include the response body */ - def subsystemStatus()(implicit ec: ExecutionContext): Future[SubsystemStatus] = { + def subsystemStatus()(implicit ec: ExecutionContext): Future[SubsystemStatus] = sttp.get(statusUri).send() map { x => x.body match { case Right(_) => SubsystemStatus(true, None) case Left(errors) => SubsystemStatus(false, Option(List(errors))) } } - } } diff --git a/CromIAM/src/main/scala/cromiam/server/status/StatusService.scala b/CromIAM/src/main/scala/cromiam/server/status/StatusService.scala index 97b16e98a5e..c3f38865224 100644 --- a/CromIAM/src/main/scala/cromiam/server/status/StatusService.scala +++ b/CromIAM/src/main/scala/cromiam/server/status/StatusService.scala @@ -15,13 +15,14 @@ import scala.concurrent.duration._ */ class StatusService(checkStatus: () => Map[Subsystem, Future[SubsystemStatus]], initialDelay: FiniteDuration = Duration.Zero, - pollInterval: FiniteDuration = 1.minute)(implicit system: ActorSystem, executionContext: ExecutionContext) { + pollInterval: FiniteDuration = 1.minute +)(implicit system: ActorSystem, executionContext: ExecutionContext) { implicit val askTimeout = Timeout(5.seconds) private val healthMonitor = system.actorOf(HealthMonitor.props(Set(Cromwell, Sam))(checkStatus), "HealthMonitorActor") system.scheduler.schedule(initialDelay, pollInterval, healthMonitor, HealthMonitor.CheckAll) - def status(): Future[StatusCheckResponse] = healthMonitor.ask(GetCurrentStatus).asInstanceOf[Future[StatusCheckResponse]] + def status(): Future[StatusCheckResponse] = + healthMonitor.ask(GetCurrentStatus).asInstanceOf[Future[StatusCheckResponse]] } - diff --git a/CromIAM/src/main/scala/cromiam/webservice/CromIamApiService.scala b/CromIAM/src/main/scala/cromiam/webservice/CromIamApiService.scala index 7a16e5ea797..d046694b0f9 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/CromIamApiService.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/CromIamApiService.scala @@ -16,7 +16,12 @@ import cromiam.auth.{Collection, User} import cromiam.cromwell.CromwellClient import cromiam.instrumentation.CromIamInstrumentation import cromiam.sam.SamClient -import cromiam.sam.SamClient.{CollectionAuthorizationRequest, SamConnectionFailure, SamDenialException, SamDenialResponse} +import cromiam.sam.SamClient.{ + CollectionAuthorizationRequest, + SamConnectionFailure, + SamDenialException, + SamDenialResponse +} import cromiam.server.config.CromIamServerConfig import cromiam.server.status.StatusService import cromiam.webservice.CromIamApiService._ @@ -30,12 +35,13 @@ trait SwaggerService extends SwaggerUiResourceHttpService { } // NB: collection name *must* follow label value rules in cromwell. This needs to be documented somewhere. (although those restrictions are soon to die) -trait CromIamApiService extends RequestSupport - with EngineRouteSupport - with SubmissionSupport - with QuerySupport - with WomtoolRouteSupport - with CromIamInstrumentation { +trait CromIamApiService + extends RequestSupport + with EngineRouteSupport + with SubmissionSupport + with QuerySupport + with WomtoolRouteSupport + with CromIamInstrumentation { implicit val system: ActorSystem implicit def executor: ExecutionContextExecutor @@ -44,23 +50,23 @@ trait CromIamApiService extends RequestSupport protected def rootConfig: Config protected def configuration: CromIamServerConfig - override lazy val serviceRegistryActor: ActorRef = system.actorOf(ServiceRegistryActor.props(rootConfig), "ServiceRegistryActor") + override lazy val serviceRegistryActor: ActorRef = + system.actorOf(ServiceRegistryActor.props(rootConfig), "ServiceRegistryActor") val log: LoggingAdapter - val CromIamExceptionHandler: ExceptionHandler = { - ExceptionHandler { - case e: Exception => - log.error(e, "Request failed {}", e) - complete(HttpResponse(InternalServerError, entity = e.getMessage)) // FIXME: use workbench-model ErrorReport + val CromIamExceptionHandler: ExceptionHandler = + ExceptionHandler { case e: Exception => + log.error(e, "Request failed {}", e) + complete(HttpResponse(InternalServerError, entity = e.getMessage)) // FIXME: use workbench-model ErrorReport } - } lazy val cromwellClient = new CromwellClient(configuration.cromwellConfig.scheme, - configuration.cromwellConfig.interface, - configuration.cromwellConfig.port, - log, - serviceRegistryActor) + configuration.cromwellConfig.interface, + configuration.cromwellConfig.port, + log, + serviceRegistryActor + ) lazy val samClient = new SamClient( configuration.samConfig.http.scheme, @@ -68,7 +74,8 @@ trait CromIamApiService extends RequestSupport configuration.samConfig.http.port, configuration.samConfig.checkSubmitWhitelist, log, - serviceRegistryActor) + serviceRegistryActor + ) val statusService: StatusService @@ -76,8 +83,7 @@ trait CromIamApiService extends RequestSupport workflowLogsRoute ~ abortRoute ~ metadataRoute ~ timingRoute ~ statusRoute ~ backendRoute ~ labelPatchRoute ~ callCacheDiffRoute ~ labelGetRoute ~ releaseHoldRoute - - val allRoutes: Route = handleExceptions(CromIamExceptionHandler) { workflowRoutes ~ engineRoutes ~ womtoolRoutes } + val allRoutes: Route = handleExceptions(CromIamExceptionHandler)(workflowRoutes ~ engineRoutes ~ womtoolRoutes) def abortRoute: Route = path("api" / "workflows" / Segment / Segment / Abort) { (_, workflowId) => post { @@ -90,8 +96,8 @@ trait CromIamApiService extends RequestSupport } } - //noinspection MutatorLikeMethodIsParameterless - def releaseHoldRoute: Route = path("api" / "workflows" / Segment / Segment / ReleaseHold) { (_, workflowId) => + // noinspection MutatorLikeMethodIsParameterless + def releaseHoldRoute: Route = path("api" / "workflows" / Segment / Segment / ReleaseHold) { (_, workflowId) => post { extractUserAndStrictRequest { (user, req) => logUserWorkflowAction(user, workflowId, ReleaseHold) @@ -109,22 +115,22 @@ trait CromIamApiService extends RequestSupport def statusRoute: Route = workflowGetRouteWithId("status") def labelGetRoute: Route = workflowGetRouteWithId(Labels) - def labelPatchRoute: Route = { + def labelPatchRoute: Route = path("api" / "workflows" / Segment / Segment / Labels) { (_, workflowId) => patch { extractUserAndStrictRequest { (user, req) => entity(as[String]) { labels => logUserWorkflowAction(user, workflowId, Labels) - validateLabels(Option(labels)) { _ => // Not using the labels, just using this to verify they didn't specify labels we don't want them to - complete { - authorizeUpdateThenForwardToCromwell(user, workflowId, req).asHttpResponse - } + validateLabels(Option(labels)) { + _ => // Not using the labels, just using this to verify they didn't specify labels we don't want them to + complete { + authorizeUpdateThenForwardToCromwell(user, workflowId, req).asHttpResponse + } } } } } } - } def backendRoute: Route = workflowGetRoute("backends") @@ -137,7 +143,10 @@ trait CromIamApiService extends RequestSupport complete { (paramMap.get("workflowA"), paramMap.get("workflowB")) match { case (Some(a), Some(b)) => authorizeReadThenForwardToCromwell(user, List(a, b), req).asHttpResponse - case _ => HttpResponse(status = BadRequest, entity = "Must supply both workflowA and workflowB to the /callcaching/diff endpoint") + case _ => + HttpResponse(status = BadRequest, + entity = "Must supply both workflowA and workflowB to the /callcaching/diff endpoint" + ) } } } @@ -162,31 +171,31 @@ trait CromIamApiService extends RequestSupport */ private def workflowGetRouteWithId(urlSuffix: String): Route = workflowRoute(urlSuffix, get) - private def workflowRoute(urlSuffix: String, method: Directive0): Route = path("api" / "workflows" / Segment / Segment / urlSuffix) { (_, workflowId) => - method { - extractUserAndStrictRequest { (user, req) => - logUserWorkflowAction(user, workflowId, urlSuffix) - complete { - authorizeReadThenForwardToCromwell(user, List(workflowId), req).asHttpResponse + private def workflowRoute(urlSuffix: String, method: Directive0): Route = + path("api" / "workflows" / Segment / Segment / urlSuffix) { (_, workflowId) => + method { + extractUserAndStrictRequest { (user, req) => + logUserWorkflowAction(user, workflowId, urlSuffix) + complete { + authorizeReadThenForwardToCromwell(user, List(workflowId), req).asHttpResponse + } } } } - } private def authorizeThenForwardToCromwell(user: User, workflowIds: List[String], action: String, request: HttpRequest, - cromwellClient: CromwellClient): - FailureResponseOrT[HttpResponse] = { - def authForCollection(collection: Collection): FailureResponseOrT[Unit] = { + cromwellClient: CromwellClient + ): FailureResponseOrT[HttpResponse] = { + def authForCollection(collection: Collection): FailureResponseOrT[Unit] = samClient.requestAuth(CollectionAuthorizationRequest(user, collection, action), request) mapErrorWith { case e: SamDenialException => IO.raiseError(e) case e => log.error(e, "Unable to connect to Sam {}", e) IO.raiseError(SamConnectionFailure("authorization", e)) } - } val cromwellResponseT = for { rootWorkflowIds <- workflowIds.traverse(cromwellClient.getRootWorkflow(_, user, request)) @@ -209,31 +218,29 @@ trait CromIamApiService extends RequestSupport private def authorizeReadThenForwardToCromwell(user: User, workflowIds: List[String], request: HttpRequest - ): FailureResponseOrT[HttpResponse] = { - authorizeThenForwardToCromwell( - user = user, - workflowIds = workflowIds, - action = "view", - request = request, - cromwellClient = cromwellClient) - } + ): FailureResponseOrT[HttpResponse] = + authorizeThenForwardToCromwell(user = user, + workflowIds = workflowIds, + action = "view", + request = request, + cromwellClient = cromwellClient + ) private def authorizeUpdateThenForwardToCromwell(user: User, workflowId: String, request: HttpRequest - ): FailureResponseOrT[HttpResponse] = { - authorizeThenForwardToCromwell( - user = user, - workflowIds = List(workflowId), - action = "update", - request = request, - cromwellClient = cromwellClient) - } + ): FailureResponseOrT[HttpResponse] = + authorizeThenForwardToCromwell(user = user, + workflowIds = List(workflowId), + action = "update", + request = request, + cromwellClient = cromwellClient + ) private def authorizeAbortThenForwardToCromwell(user: User, workflowId: String, request: HttpRequest - ): FailureResponseOrT[HttpResponse] = { + ): FailureResponseOrT[HttpResponse] = // Do all the authing for the abort with "this" cromwell instance (cromwellClient), but the actual abort command // must go to the dedicated abort server (cromwellAbortClient). authorizeThenForwardToCromwell( @@ -243,13 +250,11 @@ trait CromIamApiService extends RequestSupport request = request, cromwellClient = cromwellClient ) - } private def logUserAction(user: User, action: String) = log.info("User " + user.userId + " requesting " + action) - private def logUserWorkflowAction(user: User, wfId: String, action: String) = { + private def logUserWorkflowAction(user: User, wfId: String, action: String) = log.info("User " + user.userId + " requesting " + action + " with " + wfId) - } } object CromIamApiService { diff --git a/CromIAM/src/main/scala/cromiam/webservice/EngineRouteSupport.scala b/CromIAM/src/main/scala/cromiam/webservice/EngineRouteSupport.scala index 5719bda3479..42f69ee6449 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/EngineRouteSupport.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/EngineRouteSupport.scala @@ -13,7 +13,6 @@ import org.broadinstitute.dsde.workbench.util.health.StatusJsonSupport._ import scala.concurrent.ExecutionContextExecutor - trait EngineRouteSupport extends RequestSupport with SprayJsonSupport { val statusService: StatusService val cromwellClient: CromwellClient @@ -25,7 +24,7 @@ trait EngineRouteSupport extends RequestSupport with SprayJsonSupport { def versionRoute: Route = path("engine" / Segment / "version") { _ => get { extractStrictRequest { req => - complete { cromwellClient.forwardToCromwell(req).asHttpResponse } + complete(cromwellClient.forwardToCromwell(req).asHttpResponse) } } } @@ -39,10 +38,11 @@ trait EngineRouteSupport extends RequestSupport with SprayJsonSupport { } } - def statsRoute: Route = path("engine" / Segment / "stats") { _ => complete(CromIamStatsForbidden) } + def statsRoute: Route = path("engine" / Segment / "stats")(_ => complete(CromIamStatsForbidden)) } object EngineRouteSupport { - private[webservice] val CromIamStatsForbidden = HttpResponse(status = Forbidden, entity = "CromIAM does not allow access to the /stats endpoint") + private[webservice] val CromIamStatsForbidden = + HttpResponse(status = Forbidden, entity = "CromIAM does not allow access to the /stats endpoint") } diff --git a/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala b/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala index e9397605c6a..68ad1f21230 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala @@ -43,7 +43,7 @@ trait QuerySupport extends RequestSupport { post { preprocessQuery { (user, collections, request) => processLabelsForPostQuery(user, collections) { entity => - complete { cromwellClient.forwardToCromwell(request.withEntity(entity)).asHttpResponse } + complete(cromwellClient.forwardToCromwell(request.withEntity(entity)).asHttpResponse) } } } @@ -54,7 +54,7 @@ trait QuerySupport extends RequestSupport { * retrieves the collections for the user, grabs the underlying HttpRequest and forwards it on to the specific * directive */ - private def preprocessQuery: Directive[(User, List[Collection], HttpRequest)] = { + private def preprocessQuery: Directive[(User, List[Collection], HttpRequest)] = extractUserAndStrictRequest tflatMap { case (user, cromIamRequest) => log.info("Received query " + cromIamRequest.method.value + " request for user " + user.userId) @@ -71,13 +71,12 @@ trait QuerySupport extends RequestSupport { throw new RuntimeException(s"Unable to look up collections for user ${user.userId}: ${e.getMessage}", e) } } - } /** * Will verify that none of the GET query parameters are specifying the collection label, and then tack * on query parameters for the user's collections on to the query URI */ - private def processLabelsForGetQuery(user: User, collections: List[Collection]): Directive1[Uri] = { + private def processLabelsForGetQuery(user: User, collections: List[Collection]): Directive1[Uri] = extractUri flatMap { uri => val query = uri.query() @@ -95,7 +94,6 @@ trait QuerySupport extends RequestSupport { provide(uri.withQuery(newQueryBuilder.result())) } - } /** * Will verify that none of the POSTed query parameters are specifying the collection label, and then tack @@ -115,7 +113,7 @@ trait QuerySupport extends RequestSupport { case jsObject if jsObject.fields.keySet.exists(key => key.equalsIgnoreCase(LabelOrKey)) => jsObject.fields.values.map(_.convertTo[String]) } - ).flatten + ).flatten // DO NOT REMOVE THE NEXT LINE WITHOUT READING THE SCALADOC ON ensureNoLabelOrs ensureNoLabelOrs(user, labelOrs) @@ -152,12 +150,11 @@ trait QuerySupport extends RequestSupport { * - https://github.com/persvr/rql#rql-rules * - https://github.com/jirutka/rsql-parser#grammar-and-semantic */ - protected[this] def ensureNoLabelOrs(user: User, labelOrs: Iterable[String]): Unit = { + protected[this] def ensureNoLabelOrs(user: User, labelOrs: Iterable[String]): Unit = labelOrs.toList match { case Nil => () case head :: tail => throw new LabelContainsOrException(user, NonEmptyList(head, tail)) } - } /** * Returns the user's collections as a set of labels @@ -169,12 +166,14 @@ trait QuerySupport extends RequestSupport { } object QuerySupport { - final case class InvalidQueryException(e: Throwable) extends - Exception(s"Invalid JSON in query POST body: ${e.getMessage}", e) - - final class LabelContainsOrException(val user: User, val labelOrs: NonEmptyList[String]) extends - Exception(s"User ${user.userId} submitted a labels query containing an OR which CromIAM is blocking: " + - labelOrs.toList.mkString("LABELS CONTAIN '", "' OR LABELS CONTAIN '", "'")) + final case class InvalidQueryException(e: Throwable) + extends Exception(s"Invalid JSON in query POST body: ${e.getMessage}", e) + + final class LabelContainsOrException(val user: User, val labelOrs: NonEmptyList[String]) + extends Exception( + s"User ${user.userId} submitted a labels query containing an OR which CromIAM is blocking: " + + labelOrs.toList.mkString("LABELS CONTAIN '", "' OR LABELS CONTAIN '", "'") + ) val LabelAndKey = "label" val LabelOrKey = "labelor" diff --git a/CromIAM/src/main/scala/cromiam/webservice/RequestSupport.scala b/CromIAM/src/main/scala/cromiam/webservice/RequestSupport.scala index c9b6a196368..22575738ae7 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/RequestSupport.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/RequestSupport.scala @@ -15,32 +15,34 @@ import cromiam.sam.SamClient import scala.util.{Failure, Success} trait RequestSupport { - def extractStrictRequest: Directive1[HttpRequest] = { + def extractStrictRequest: Directive1[HttpRequest] = toStrictEntity(Timeout) tflatMap { _ => extractRequest flatMap { request => provide(request) } } - } /** * Obtain both the user id header from the proxy as well as the bearer token and pass that back * into the route logic as a User object */ - def extractUser: Directive1[User] = { - (headerValueByName("OIDC_CLAIM_user_id") & headerValuePF { case a: Authorization => a }) tmap { case (userId, auth) => - User(WorkbenchUserId(userId), auth) + def extractUser: Directive1[User] = + (headerValueByName("OIDC_CLAIM_user_id") & headerValuePF { case a: Authorization => a }) tmap { + case (userId, auth) => + User(WorkbenchUserId(userId), auth) } - } - def extractUserAndStrictRequest: Directive[(User, HttpRequest)] = { + def extractUserAndStrictRequest: Directive[(User, HttpRequest)] = for { user <- extractUser request <- extractStrictRequest } yield (user, request) - } - def forwardIfUserEnabled(user: User, req: HttpRequest, cromwellClient: CromwellClient, samClient: SamClient): Route = { + def forwardIfUserEnabled(user: User, + req: HttpRequest, + cromwellClient: CromwellClient, + samClient: SamClient + ): Route = { import cromwell.api.model.EnhancedFailureResponseOrHttpResponseT onComplete(samClient.isUserEnabledSam(user, req).value.unsafeToFuture()) { @@ -52,7 +54,8 @@ trait RequestSupport { } } case Failure(e) => - val message = s"Unable to look up enablement status for user ${user.userId}: ${e.getMessage}. Please try again later." + val message = + s"Unable to look up enablement status for user ${user.userId}: ${e.getMessage}. Please try again later." throw new RuntimeException(message, e) } } diff --git a/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala b/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala index 52a05d1cdc7..79c66a77313 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala @@ -7,7 +7,7 @@ import akka.http.scaladsl.server._ import akka.stream.ActorMaterializer import akka.util.ByteString import cats.effect.IO -import cromiam.auth.Collection.{CollectionLabelName, LabelsKey, validateLabels} +import cromiam.auth.Collection.{validateLabels, CollectionLabelName, LabelsKey} import cromiam.auth.{Collection, User} import cromiam.cromwell.CromwellClient import cromiam.sam.SamClient @@ -57,16 +57,18 @@ trait SubmissionSupport extends RequestSupport { private def forwardSubmissionToCromwell(user: User, collection: Collection, - submissionRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { - log.info("Forwarding submission request for " + user.userId + " with collection " + collection.name + " to Cromwell") + submissionRequest: HttpRequest + ): FailureResponseOrT[HttpResponse] = { + log.info( + "Forwarding submission request for " + user.userId + " with collection " + collection.name + " to Cromwell" + ) - def registerWithSam(collection: Collection, httpRequest: HttpRequest): FailureResponseOrT[Unit] = { + def registerWithSam(collection: Collection, httpRequest: HttpRequest): FailureResponseOrT[Unit] = samClient.requestSubmission(user, collection, httpRequest) mapErrorWith { case e: SamDenialException => IO.raiseError(e) case SamRegisterCollectionException(statusCode) => IO.raiseError(SamRegisterCollectionException(statusCode)) case e => IO.raiseError(SamConnectionFailure("new workflow registration", e)) } - } FailureResponseOrT( (for { @@ -81,36 +83,34 @@ trait SubmissionSupport extends RequestSupport { } object SubmissionSupport { - def extractCollection(user: User): Directive1[Collection] = { + def extractCollection(user: User): Directive1[Collection] = formField(CollectionNameKey.?) map { maybeCollectionName => maybeCollectionName.map(Collection(_)).getOrElse(Collection.forUser(user)) } - } - def extractSubmission(user: User): Directive1[WorkflowSubmission] = { + def extractSubmission(user: User): Directive1[WorkflowSubmission] = ( extractCollection(user) & - formFields(( - WorkflowSourceKey.?, - WorkflowUrlKey.?, - WorkflowTypeKey.?, - WorkflowTypeVersionKey.?, - WorkflowInputsKey.?, - WorkflowOptionsKey.?, - WorkflowOnHoldKey.as[Boolean].?, - WorkflowDependenciesKey.as[ByteString].?)) & - extractLabels & - extractInputAux + formFields( + (WorkflowSourceKey.?, + WorkflowUrlKey.?, + WorkflowTypeKey.?, + WorkflowTypeVersionKey.?, + WorkflowInputsKey.?, + WorkflowOptionsKey.?, + WorkflowOnHoldKey.as[Boolean].?, + WorkflowDependenciesKey.as[ByteString].? + ) + ) & + extractLabels & + extractInputAux ).as(WorkflowSubmission) - } - def extractLabels: Directive1[Option[Map[String, JsValue]]] = { + def extractLabels: Directive1[Option[Map[String, JsValue]]] = formField(LabelsKey.?) flatMap validateLabels - } - def extractInputAux: Directive1[Map[String, String]] = { + def extractInputAux: Directive1[Map[String, String]] = formFieldMap.map(_.view.filterKeys(_.startsWith(WorkflowInputsAuxPrefix)).toMap) - } // FIXME: Much like CromwellClient see if there are ways of unifying this a bit w/ the mothership final case class WorkflowSubmission(collection: Collection, @@ -123,32 +123,61 @@ object SubmissionSupport { workflowOnHold: Option[Boolean], workflowDependencies: Option[ByteString], origLabels: Option[Map[String, JsValue]], - workflowInputsAux: Map[String, String]) { + workflowInputsAux: Map[String, String] + ) { // For auto-validation, if origLabels defined, can't have CaaS collection label set. Was checked previously, but ... require(origLabels.forall(!_.keySet.contains(CollectionLabelName))) // Inject the collection name into the labels and convert to a String private val collectionLabels = Map(CollectionLabelName -> JsString(collection.name)) - private val labels: String = JsObject(origLabels.map(o => o ++ collectionLabels).getOrElse(collectionLabels)).toString + private val labels: String = JsObject( + origLabels.map(o => o ++ collectionLabels).getOrElse(collectionLabels) + ).toString val entity: MessageEntity = { - val sourcePart = workflowSource map { s => Multipart.FormData.BodyPart(WorkflowSourceKey, HttpEntity(MediaTypes.`application/json`, s)) } - val urlPart = workflowUrl map { u => Multipart.FormData.BodyPart(WorkflowUrlKey, HttpEntity(MediaTypes.`application/json`, u))} - val typePart = workflowType map { t => Multipart.FormData.BodyPart(WorkflowTypeKey, HttpEntity(MediaTypes.`application/json`, t)) } - val typeVersionPart = workflowTypeVersion map { v => Multipart.FormData.BodyPart(WorkflowTypeVersionKey, HttpEntity(MediaTypes.`application/json`, v)) } - val inputsPart = workflowInputs map { i => Multipart.FormData.BodyPart(WorkflowInputsKey, HttpEntity(MediaTypes.`application/json`, i)) } - val optionsPart = workflowOptions map { o => Multipart.FormData.BodyPart(WorkflowOptionsKey, HttpEntity(MediaTypes.`application/json`, o)) } - val importsPart = workflowDependencies map { d => Multipart.FormData.BodyPart(WorkflowDependenciesKey, HttpEntity(MediaTypes.`application/octet-stream`, d)) } - val onHoldPart = workflowOnHold map { h => Multipart.FormData.BodyPart(WorkflowOnHoldKey, HttpEntity(h.toString)) } + val sourcePart = workflowSource map { s => + Multipart.FormData.BodyPart(WorkflowSourceKey, HttpEntity(MediaTypes.`application/json`, s)) + } + val urlPart = workflowUrl map { u => + Multipart.FormData.BodyPart(WorkflowUrlKey, HttpEntity(MediaTypes.`application/json`, u)) + } + val typePart = workflowType map { t => + Multipart.FormData.BodyPart(WorkflowTypeKey, HttpEntity(MediaTypes.`application/json`, t)) + } + val typeVersionPart = workflowTypeVersion map { v => + Multipart.FormData.BodyPart(WorkflowTypeVersionKey, HttpEntity(MediaTypes.`application/json`, v)) + } + val inputsPart = workflowInputs map { i => + Multipart.FormData.BodyPart(WorkflowInputsKey, HttpEntity(MediaTypes.`application/json`, i)) + } + val optionsPart = workflowOptions map { o => + Multipart.FormData.BodyPart(WorkflowOptionsKey, HttpEntity(MediaTypes.`application/json`, o)) + } + val importsPart = workflowDependencies map { d => + Multipart.FormData.BodyPart(WorkflowDependenciesKey, HttpEntity(MediaTypes.`application/octet-stream`, d)) + } + val onHoldPart = workflowOnHold map { h => + Multipart.FormData.BodyPart(WorkflowOnHoldKey, HttpEntity(h.toString)) + } val labelsPart = Multipart.FormData.BodyPart(LabelsKey, HttpEntity(MediaTypes.`application/json`, labels)) - val parts = List(sourcePart, urlPart, typePart, typeVersionPart, inputsPart, optionsPart, importsPart, onHoldPart, Option(labelsPart)).flatten ++ auxParts + val parts = List(sourcePart, + urlPart, + typePart, + typeVersionPart, + inputsPart, + optionsPart, + importsPart, + onHoldPart, + Option(labelsPart) + ).flatten ++ auxParts Multipart.FormData(parts: _*).toEntity() } - private def auxParts = { - workflowInputsAux map { case (k, v) => Multipart.FormData.BodyPart(k, HttpEntity(MediaTypes.`application/json`, v)) } - } + private def auxParts = + workflowInputsAux map { case (k, v) => + Multipart.FormData.BodyPart(k, HttpEntity(MediaTypes.`application/json`, v)) + } } // FIXME: Unify these w/ Cromwell.PartialWorkflowSources (via common?) diff --git a/CromIAM/src/main/scala/cromiam/webservice/SwaggerUiHttpService.scala b/CromIAM/src/main/scala/cromiam/webservice/SwaggerUiHttpService.scala index 9fed12ca163..874949a5244 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/SwaggerUiHttpService.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/SwaggerUiHttpService.scala @@ -27,7 +27,7 @@ trait SwaggerUiHttpService extends Directives { s"META-INF/resources/webjars/swagger-ui/$swaggerUiVersion" } - private val serveIndex: server.Route = { + private val serveIndex: server.Route = mapResponseEntity { entityFromJar => entityFromJar.transformDataBytes(Flow.fromFunction[ByteString, ByteString] { original: ByteString => ByteString(rewriteSwaggerIndex(original.utf8String)) @@ -35,14 +35,13 @@ trait SwaggerUiHttpService extends Directives { } { getFromResource(s"$resourceDirectory/index.html") } - } /** * Serves up the swagger UI only. Redirects requests to the root of the UI path to the index.html. * * @return Route serving the swagger UI. */ - final def swaggerUiRoute: Route = { + final def swaggerUiRoute: Route = pathEndOrSingleSlash { get { serveIndex @@ -68,16 +67,15 @@ trait SwaggerUiHttpService extends Directives { } } - } /** Rewrite the swagger index.html. Default passes through the origin data. */ protected def rewriteSwaggerIndex(original: String): String = { val swaggerOptions = s""" - | validatorUrl: null, - | apisSorter: "alpha", - | oauth2RedirectUrl: window.location.origin + "/swagger/oauth2-redirect.html", - | operationsSorter: "alpha" + | validatorUrl: null, + | apisSorter: "alpha", + | oauth2RedirectUrl: window.location.origin + "/swagger/oauth2-redirect.html", + | operationsSorter: "alpha" """.stripMargin val initOAuthOriginal = "window.ui = ui" @@ -94,7 +92,6 @@ trait SwaggerUiHttpService extends Directives { |$initOAuthOriginal |""".stripMargin - original .replace(initOAuthOriginal, initOAuthReplacement) .replace("""url: "https://petstore.swagger.io/v2/swagger.json"""", "url: 'cromiam.yaml'") @@ -109,6 +106,7 @@ trait SwaggerUiHttpService extends Directives { * swagger UI, but defaults to "yaml". This is an alternative to spray-swagger's SwaggerHttpService. */ trait SwaggerResourceHttpService { + /** * @return The directory for the resource under the classpath, and in the url */ @@ -134,7 +132,8 @@ trait SwaggerResourceHttpService { */ final def swaggerResourceRoute: Route = { // Serve CromIAM API docs from either `/swagger/cromiam.yaml` or just `cromiam.yaml`. - val swaggerDocsDirective = path(separateOnSlashes(swaggerDocsPath)) | path(s"$swaggerServiceName.$swaggerResourceType") + val swaggerDocsDirective = + path(separateOnSlashes(swaggerDocsPath)) | path(s"$swaggerServiceName.$swaggerResourceType") val route = get { swaggerDocsDirective { // Return /uiPath/serviceName.resourceType from the classpath resources. diff --git a/CromIAM/src/test/scala/cromiam/auth/CollectionSpec.scala b/CromIAM/src/test/scala/cromiam/auth/CollectionSpec.scala index 5f3f4fd3791..0ee89b9bbb6 100644 --- a/CromIAM/src/test/scala/cromiam/auth/CollectionSpec.scala +++ b/CromIAM/src/test/scala/cromiam/auth/CollectionSpec.scala @@ -42,7 +42,6 @@ class CollectionSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } } - behavior of "validateLabels" it should "not throw exception when labels are valid" in { val labels = """{"key-1":"foo","key-2":"bar"}""" diff --git a/CromIAM/src/test/scala/cromiam/cromwell/CromwellClientSpec.scala b/CromIAM/src/test/scala/cromiam/cromwell/CromwellClientSpec.scala index ce6f16935d6..303aff01dc4 100644 --- a/CromIAM/src/test/scala/cromiam/cromwell/CromwellClientSpec.scala +++ b/CromIAM/src/test/scala/cromiam/cromwell/CromwellClientSpec.scala @@ -39,25 +39,35 @@ class CromwellClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfter } "CromwellClient" should "eventually return a subworkflow's root workflow id" in { - cromwellClient.getRootWorkflow(SubworkflowId.id.toString, FictitiousUser, fakeHttpRequest).map(w => assert(w == RootWorkflowId.id.toString)) - .asIo.unsafeToFuture() + cromwellClient + .getRootWorkflow(SubworkflowId.id.toString, FictitiousUser, fakeHttpRequest) + .map(w => assert(w == RootWorkflowId.id.toString)) + .asIo + .unsafeToFuture() } it should "eventually return a top level workflow's ID when requesting root workflow id" in { - cromwellClient.getRootWorkflow(RootWorkflowId.id.toString, FictitiousUser, fakeHttpRequest).map(w => assert(w == RootWorkflowId.id.toString)) - .asIo.unsafeToFuture() + cromwellClient + .getRootWorkflow(RootWorkflowId.id.toString, FictitiousUser, fakeHttpRequest) + .map(w => assert(w == RootWorkflowId.id.toString)) + .asIo + .unsafeToFuture() } it should "properly fetch the collection for a workflow with a collection name" in { - cromwellClient.collectionForWorkflow(RootWorkflowId.id.toString, FictitiousUser, fakeHttpRequest).map(c => - assert(c.name == CollectionName) - ).asIo.unsafeToFuture() + cromwellClient + .collectionForWorkflow(RootWorkflowId.id.toString, FictitiousUser, fakeHttpRequest) + .map(c => assert(c.name == CollectionName)) + .asIo + .unsafeToFuture() } it should "throw an exception if the workflow doesn't have a collection" in { recoverToExceptionIf[IllegalArgumentException] { - cromwellClient.collectionForWorkflow(WorkflowIdWithoutCollection.id.toString, FictitiousUser, fakeHttpRequest) - .asIo.unsafeToFuture() + cromwellClient + .collectionForWorkflow(WorkflowIdWithoutCollection.id.toString, FictitiousUser, fakeHttpRequest) + .asIo + .unsafeToFuture() } map { exception => assert(exception.getMessage == s"Workflow $WorkflowIdWithoutCollection has no associated collection") } @@ -65,24 +75,25 @@ class CromwellClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfter } object CromwellClientSpec { - final class MockCromwellClient()(implicit system: ActorSystem, - ece: ExecutionContextExecutor, - materializer: ActorMaterializer) - extends CromwellClient("http", "bar", 1, NoLogging, ActorRef.noSender) { + final class MockCromwellClient()(implicit + system: ActorSystem, + ece: ExecutionContextExecutor, + materializer: ActorMaterializer + ) extends CromwellClient("http", "bar", 1, NoLogging, ActorRef.noSender) { override val cromwellApiClient: CromwellApiClient = new MockCromwellApiClient() override def sendTimingApi(statsDPath: InstrumentationPath, timing: FiniteDuration, prefixToStatsd: NonEmptyList[String] - ): Unit = () + ): Unit = () } final class MockCromwellApiClient()(implicit actorSystem: ActorSystem, materializer: ActorMaterializer) - extends CromwellApiClient(new URL("http://foo.com"), "bar") { + extends CromwellApiClient(new URL("http://foo.com"), "bar") { - - override def labels(workflowId: WorkflowId, headers: List[HttpHeader] = defaultHeaders) - (implicit ec: ExecutionContext): FailureResponseOrT[WorkflowLabels] = { + override def labels(workflowId: WorkflowId, headers: List[HttpHeader] = defaultHeaders)(implicit + ec: ExecutionContext + ): FailureResponseOrT[WorkflowLabels] = if (workflowId == RootWorkflowId) { FailureResponseOrT.pure(FictitiousWorkflowLabelsWithCollection) } else if (workflowId == WorkflowIdWithoutCollection) { @@ -92,18 +103,17 @@ object CromwellClientSpec { IO.raiseError(new RuntimeException("Unexpected workflow ID sent to MockCromwellApiClient")) } } - } override def metadata(workflowId: WorkflowId, - args: Option[Map[String, List[String]]] = None, - headers: List[HttpHeader] = defaultHeaders - )(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowMetadata] = { + args: Option[Map[String, List[String]]] = None, + headers: List[HttpHeader] = defaultHeaders + )(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowMetadata] = if (workflowId == RootWorkflowId) FailureResponseOrT.pure(RootWorkflowMetadata) else if (workflowId == SubworkflowId) FailureResponseOrT.pure(SubWorkflowMetadata) - else FailureResponseOrT[IO, HttpResponse, WorkflowMetadata] { - IO.raiseError(new RuntimeException("Unexpected workflow ID sent to MockCromwellApiClient")) - } - } + else + FailureResponseOrT[IO, HttpResponse, WorkflowMetadata] { + IO.raiseError(new RuntimeException("Unexpected workflow ID sent to MockCromwellApiClient")) + } } private val SubworkflowId = WorkflowId.fromString("58114f5c-f439-4488-8d73-092273cf92d9") @@ -126,7 +136,8 @@ object CromwellClientSpec { }""") val CollectionName = "foo" - val FictitiousWorkflowLabelsWithCollection = WorkflowLabels(RootWorkflowId.id.toString, JsObject(Map("caas-collection-name" -> JsString(CollectionName)))) - val FictitiousWorkflowLabelsWithoutCollection = WorkflowLabels(RootWorkflowId.id.toString, JsObject(Map("something" -> JsString("foo")))) + val FictitiousWorkflowLabelsWithCollection = + WorkflowLabels(RootWorkflowId.id.toString, JsObject(Map("caas-collection-name" -> JsString(CollectionName)))) + val FictitiousWorkflowLabelsWithoutCollection = + WorkflowLabels(RootWorkflowId.id.toString, JsObject(Map("something" -> JsString("foo")))) } - diff --git a/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala b/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala index 40f0cda2e86..95869718fb5 100644 --- a/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala +++ b/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala @@ -28,7 +28,8 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { HttpResponse(StatusCodes.InternalServerError, entity = HttpEntity("expected error")) private val authorization = Authorization(OAuth2BearerToken("my-token")) - private val authorizedUserWithCollection = User(WorkbenchUserId(MockSamClient.AuthorizedUserCollectionStr), authorization) + private val authorizedUserWithCollection = + User(WorkbenchUserId(MockSamClient.AuthorizedUserCollectionStr), authorization) private val unauthorizedUserWithNoCollection = User(WorkbenchUserId(MockSamClient.UnauthorizedUserCollectionStr), authorization) private val notWhitelistedUser = User(WorkbenchUserId(MockSamClient.NotWhitelistedUser), authorization) @@ -47,25 +48,25 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { super.afterAll() } - behavior of "SamClient" it should "return true if user is whitelisted" in { val samClient = new MockSamClient() - samClient.isSubmitWhitelisted(authorizedUserWithCollection, emptyHttpRequest).map(v => assert(v)) - .asIo.unsafeToFuture() + samClient + .isSubmitWhitelisted(authorizedUserWithCollection, emptyHttpRequest) + .map(v => assert(v)) + .asIo + .unsafeToFuture() } it should "return false if user is not whitelisted" in { val samClient = new MockSamClient() - samClient.isSubmitWhitelisted(notWhitelistedUser, emptyHttpRequest).map(v => assert(!v)) - .asIo.unsafeToFuture() + samClient.isSubmitWhitelisted(notWhitelistedUser, emptyHttpRequest).map(v => assert(!v)).asIo.unsafeToFuture() } it should "return sam errors while checking is whitelisted" in { val samClient = new MockSamClient() { - override def isSubmitWhitelistedSam(user: User, cromiamRequest: HttpRequest): FailureResponseOrT[Boolean] = { + override def isSubmitWhitelistedSam(user: User, cromiamRequest: HttpRequest): FailureResponseOrT[Boolean] = MockSamClient.returnResponse(expectedErrorResponse) - } } samClient.isSubmitWhitelisted(notWhitelistedUser, emptyHttpRequest).value.unsafeToFuture() map { _ should be(Left(expectedErrorResponse)) @@ -74,32 +75,33 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { it should "eventually return the collection(s) of user" in { val samClient = new MockSamClient() - samClient.collectionsForUser(authorizedUserWithCollection, emptyHttpRequest).map(collectionList => - assert(collectionList == MockSamClient.UserCollectionList) - ).asIo.unsafeToFuture() + samClient + .collectionsForUser(authorizedUserWithCollection, emptyHttpRequest) + .map(collectionList => assert(collectionList == MockSamClient.UserCollectionList)) + .asIo + .unsafeToFuture() } it should "fail if user doesn't have any collections" in { val samClient = new MockSamClient() recoverToExceptionIf[Exception] { - samClient.collectionsForUser(unauthorizedUserWithNoCollection, emptyHttpRequest) - .asIo.unsafeToFuture() - } map(exception => - assert(exception.getMessage == s"Unable to look up collections for user ${unauthorizedUserWithNoCollection.userId.value}!") + samClient.collectionsForUser(unauthorizedUserWithNoCollection, emptyHttpRequest).asIo.unsafeToFuture() + } map (exception => + assert( + exception.getMessage == s"Unable to look up collections for user ${unauthorizedUserWithNoCollection.userId.value}!" + ) ) } it should "return true if user is authorized to perform action on collection" in { val samClient = new MockSamClient() - samClient.requestAuth(authorizedCollectionRequest, emptyHttpRequest).map(_ => succeed) - .asIo.unsafeToFuture() + samClient.requestAuth(authorizedCollectionRequest, emptyHttpRequest).map(_ => succeed).asIo.unsafeToFuture() } it should "throw SamDenialException if user is not authorized to perform action on collection" in { val samClient = new MockSamClient() recoverToExceptionIf[SamDenialException] { - samClient.requestAuth(unauthorizedCollectionRequest, emptyHttpRequest) - .asIo.unsafeToFuture() + samClient.requestAuth(unauthorizedCollectionRequest, emptyHttpRequest).asIo.unsafeToFuture() } map { exception => assert(exception.getMessage == "Access Denied") } @@ -107,15 +109,21 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { it should "register collection to Sam if user has authorization to create/add to collection" in { val samClient = new MockSamClient() - samClient.requestSubmission(authorizedUserWithCollection, authorizedCollection, emptyHttpRequest).map(_ => succeed) - .asIo.unsafeToFuture() + samClient + .requestSubmission(authorizedUserWithCollection, authorizedCollection, emptyHttpRequest) + .map(_ => succeed) + .asIo + .unsafeToFuture() } it should "throw SamRegisterCollectionException if user doesn't have authorization to create/add to collection" in { val samClient = new MockSamClient() recoverToExceptionIf[SamRegisterCollectionException] { - samClient.requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest).map(_ => succeed) - .asIo.unsafeToFuture() + samClient + .requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) + .map(_ => succeed) + .asIo + .unsafeToFuture() } map { exception => assert(exception.getMessage == "Can't register collection with Sam. Status code: 400 Bad Request") } @@ -125,15 +133,16 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { val samClient = new BaseMockSamClient() { override protected def registerCreation(user: User, collection: Collection, - cromiamRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { + cromiamRequest: HttpRequest + ): FailureResponseOrT[HttpResponse] = { val conflictResponse = HttpResponse(StatusCodes.Conflict, entity = HttpEntity("expected conflict")) returnResponse(conflictResponse) } override def requestAuth(authorizationRequest: CollectionAuthorizationRequest, - cromiamRequest: HttpRequest): FailureResponseOrT[Unit] = { + cromiamRequest: HttpRequest + ): FailureResponseOrT[Unit] = Monad[FailureResponseOrT].unit - } } samClient .requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) @@ -146,19 +155,22 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { val samClient = new BaseMockSamClient() { override protected def registerCreation(user: User, collection: Collection, - cromiamRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { + cromiamRequest: HttpRequest + ): FailureResponseOrT[HttpResponse] = { val conflictResponse = HttpResponse(StatusCodes.Conflict, entity = HttpEntity("expected conflict")) returnResponse(conflictResponse) } override def requestAuth(authorizationRequest: CollectionAuthorizationRequest, - cromiamRequest: HttpRequest): FailureResponseOrT[Unit] = { + cromiamRequest: HttpRequest + ): FailureResponseOrT[Unit] = returnResponse(expectedErrorResponse) - } } recoverToExceptionIf[UnsuccessfulRequestException] { - samClient.requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) - .asIo.unsafeToFuture() + samClient + .requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) + .asIo + .unsafeToFuture() } map { exception => assert(exception.getMessage == "expected error") } @@ -168,14 +180,17 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { val samClient = new BaseMockSamClient() { override protected def registerCreation(user: User, collection: Collection, - cromiamRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { + cromiamRequest: HttpRequest + ): FailureResponseOrT[HttpResponse] = { val unexpectedOkResponse = HttpResponse(StatusCodes.OK, entity = HttpEntity("elided ok message")) returnResponse(unexpectedOkResponse) } } recoverToExceptionIf[SamRegisterCollectionException] { - samClient.requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) - .asIo.unsafeToFuture() + samClient + .requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) + .asIo + .unsafeToFuture() } map { exception => exception.getMessage should be("Can't register collection with Sam. Status code: 200 OK") } @@ -185,14 +200,17 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { val samClient = new BaseMockSamClient() { override protected def registerCreation(user: User, collection: Collection, - cromiamRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { + cromiamRequest: HttpRequest + ): FailureResponseOrT[HttpResponse] = { val unexpectedFailureResponse = HttpResponse(StatusCodes.ImATeapot, entity = HttpEntity("elided error message")) returnResponse(unexpectedFailureResponse) } } recoverToExceptionIf[SamRegisterCollectionException] { - samClient.requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) - .asIo.unsafeToFuture() + samClient + .requestSubmission(unauthorizedUserWithNoCollection, unauthorizedCollection, emptyHttpRequest) + .asIo + .unsafeToFuture() } map { exception => exception.getMessage should be("Can't register collection with Sam. Status code: 418 I'm a teapot") } diff --git a/CromIAM/src/test/scala/cromiam/server/status/MockStatusService.scala b/CromIAM/src/test/scala/cromiam/server/status/MockStatusService.scala index 9010da8bda0..c7866c68089 100644 --- a/CromIAM/src/test/scala/cromiam/server/status/MockStatusService.scala +++ b/CromIAM/src/test/scala/cromiam/server/status/MockStatusService.scala @@ -6,8 +6,10 @@ import org.broadinstitute.dsde.workbench.util.health.Subsystems.{Cromwell, Sam, import scala.concurrent.{ExecutionContext, Future} -class MockStatusService(checkStatus: () => Map[Subsystem, Future[SubsystemStatus]])(implicit system: ActorSystem, executionContext: ExecutionContext) extends - StatusService(checkStatus)(system, executionContext) { +class MockStatusService(checkStatus: () => Map[Subsystem, Future[SubsystemStatus]])(implicit + system: ActorSystem, + executionContext: ExecutionContext +) extends StatusService(checkStatus)(system, executionContext) { override def status(): Future[StatusCheckResponse] = { val subsystemStatus: SubsystemStatus = SubsystemStatus(ok = true, None) @@ -16,4 +18,3 @@ class MockStatusService(checkStatus: () => Map[Subsystem, Future[SubsystemStatus Future.successful(StatusCheckResponse(ok = true, subsystems)) } } - diff --git a/CromIAM/src/test/scala/cromiam/webservice/CromIamApiServiceSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/CromIamApiServiceSpec.scala index 89945c5bfcb..e93acd51a2a 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/CromIamApiServiceSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/CromIamApiServiceSpec.scala @@ -13,14 +13,23 @@ import cromiam.server.status.StatusService import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with CromIamApiService with ScalatestRouteTest { +class CromIamApiServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with CromIamApiService + with ScalatestRouteTest { override def testConfigSource = "akka.loglevel = DEBUG" val log = NoLogging - override def rootConfig: Config = throw new UnsupportedOperationException("This spec shouldn't need to access the real config") + override def rootConfig: Config = throw new UnsupportedOperationException( + "This spec shouldn't need to access the real config" + ) - override def configuration = throw new UnsupportedOperationException("This spec shouldn't need to access the real interface/port") + override def configuration = throw new UnsupportedOperationException( + "This spec shouldn't need to access the real interface/port" + ) override lazy val cromwellClient = new MockCromwellClient() override lazy val samClient = new MockSamClient() @@ -29,13 +38,15 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma val version = "v1" val authorization = Authorization(OAuth2BearerToken("my-token")) - val badAuthHeaders: List[HttpHeader] = List(authorization, RawHeader("OIDC_CLAIM_user_id", cromwellClient.unauthorizedUserCollectionStr)) - val goodAuthHeaders: List[HttpHeader] = List(authorization, RawHeader("OIDC_CLAIM_user_id", cromwellClient.authorizedUserCollectionStr)) - + val badAuthHeaders: List[HttpHeader] = + List(authorization, RawHeader("OIDC_CLAIM_user_id", cromwellClient.unauthorizedUserCollectionStr)) + val goodAuthHeaders: List[HttpHeader] = + List(authorization, RawHeader("OIDC_CLAIM_user_id", cromwellClient.authorizedUserCollectionStr)) behavior of "Status endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/status").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/status") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -43,7 +54,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 200 for authorized user who has collection associated with subworkflow" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/status").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/status") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -51,15 +63,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/status").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/status") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have view permissions" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/status").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/status") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -72,10 +88,10 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "Outputs endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/outputs").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/outputs") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -83,7 +99,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 200 for authorized user who has collection associated with subworkflow" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/outputs").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/outputs") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -91,15 +108,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/outputs").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/outputs") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have view permissions" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/outputs").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/outputs") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -112,10 +133,10 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "Metadata endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/metadata").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/metadata") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -123,7 +144,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 200 for authorized user who has collection associated with subworkflow" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/metadata").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/metadata") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -131,15 +153,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/metadata").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/metadata") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have view permissions" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/metadata").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/metadata") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -152,10 +178,10 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "Logs endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/logs").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/logs") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -163,7 +189,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 200 for authorized user who has collection associated with subworkflow" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/logs").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/logs") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -171,15 +198,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/logs").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/logs") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have view permissions" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/logs").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/logs") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -192,10 +223,10 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "GET Labels endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -203,7 +234,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 200 for authorized user who has collection associated with subworkflow" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/labels").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/labels") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -211,15 +243,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/labels").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/labels") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have view permissions" in { - Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/labels").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/${cromwellClient.subworkflowId}/labels") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -232,12 +268,13 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "PATCH Labels endpoint" it should "successfully forward request to Cromwell if nothing is untoward" in { val labels = """{"key-1":"foo","key-2":"bar"}""" val labelEntity = HttpEntity(ContentTypes.`application/json`, labels) - Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels").withHeaders(goodAuthHeaders).withEntity(labelEntity) ~> allRoutes ~> check { + Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels") + .withHeaders(goodAuthHeaders) + .withEntity(labelEntity) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -248,7 +285,9 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma val labels = """{"key-1":"foo","caas-collection-name":"bar"}""" val labelEntity = HttpEntity(ContentTypes.`application/json`, labels) - Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels").withHeaders(goodAuthHeaders).withEntity(labelEntity) ~> allRoutes ~> check { + Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels") + .withHeaders(goodAuthHeaders) + .withEntity(labelEntity) ~> allRoutes ~> check { status shouldBe InternalServerError responseAs[String] shouldBe "Submitted labels contain the key caas-collection-name, which is not allowed\n" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -259,7 +298,9 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma val labels = """"key-1":"foo"""" val labelEntity = HttpEntity(ContentTypes.`application/json`, labels) - Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels").withHeaders(goodAuthHeaders).withEntity(labelEntity) ~> allRoutes ~> check { + Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels") + .withHeaders(goodAuthHeaders) + .withEntity(labelEntity) ~> allRoutes ~> check { status shouldBe InternalServerError responseAs[String] shouldBe "Labels must be a valid JSON object, received: \"key-1\":\"foo\"\n" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -269,9 +310,13 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma it should "return 500 for authorized user who doesn't have collection associated with workflow" in { val labels = """{"key-1":"foo","key-2":"bar"}""" val labelEntity = HttpEntity(ContentTypes.`application/json`, labels) - Patch(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/labels").withHeaders(goodAuthHeaders).withEntity(labelEntity) ~> allRoutes ~> check { + Patch(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/labels") + .withHeaders(goodAuthHeaders) + .withEntity(labelEntity) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } @@ -279,7 +324,9 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma it should "return SamDenialException for user who doesn't have update permissions" in { val labels = """{"key-1":"foo","key-2":"bar"}""" val labelEntity = HttpEntity(ContentTypes.`application/json`, labels) - Patch(s"/api/workflows/$version/${cromwellClient.subworkflowId}/labels").withHeaders(badAuthHeaders).withEntity(labelEntity) ~> allRoutes ~> check { + Patch(s"/api/workflows/$version/${cromwellClient.subworkflowId}/labels") + .withHeaders(badAuthHeaders) + .withEntity(labelEntity) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -289,12 +336,12 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma it should "reject request if it doesn't contain OIDC_CLAIM_user_id in header" in { val labels = """{"key-1":"foo","key-2":"bar"}""" val labelEntity = HttpEntity(ContentTypes.`application/json`, labels) - Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels").withEntity(labelEntity) ~> allRoutes ~> check { + Patch(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/labels") + .withEntity(labelEntity) ~> allRoutes ~> check { rejection shouldEqual MissingHeaderRejection("OIDC_CLAIM_user_id") } } - behavior of "Backends endpoint" it should "successfully forward request to Cromwell if auth header is provided" in { Get(s"/api/workflows/$version/backends").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { @@ -346,7 +393,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma behavior of "ReleaseHold endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Post(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/releaseHold").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Post(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/releaseHold") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -354,15 +402,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Post(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/releaseHold").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Post(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/releaseHold") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have update permissions" in { - Post(s"/api/workflows/$version/${cromwellClient.subworkflowId}/releaseHold").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Post(s"/api/workflows/$version/${cromwellClient.subworkflowId}/releaseHold") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -375,10 +427,10 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "Abort endpoint" it should "return 200 for authorized user who has collection associated with root workflow" in { - Post(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/abort").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Post(s"/api/workflows/$version/${cromwellClient.rootWorkflowIdWithCollection}/abort") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -386,15 +438,19 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who doesn't have collection associated with workflow" in { - Post(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/abort").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Post(s"/api/workflows/$version/${cromwellClient.workflowIdWithoutCollection}/abort") + .withHeaders(goodAuthHeaders) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have abort permissions" in { - Post(s"/api/workflows/$version/${cromwellClient.subworkflowId}/abort").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + Post(s"/api/workflows/$version/${cromwellClient.subworkflowId}/abort") + .withHeaders(badAuthHeaders) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -407,11 +463,13 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - behavior of "CallCacheDiff endpoint" it should "return 200 for authorized user who has collection associated with both workflows" in { - val callCacheDiffParams = s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.anotherRootWorkflowIdWithCollection}&callB=helloCall" - Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + val callCacheDiffParams = + s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.anotherRootWorkflowIdWithCollection}&callB=helloCall" + Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders( + goodAuthHeaders + ) ~> allRoutes ~> check { status shouldBe OK responseAs[String] shouldBe "Response from Cromwell" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -420,7 +478,9 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma it should "return BadRequest if request is malformed" in { val callCacheDiffParams = s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall" - Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders( + goodAuthHeaders + ) ~> allRoutes ~> check { status shouldBe BadRequest responseAs[String] shouldBe "Must supply both workflowA and workflowB to the /callcaching/diff endpoint" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -428,17 +488,25 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "return 500 for authorized user who has doesn't have collection associated with any one workflow" in { - val callCacheDiffParams = s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.workflowIdWithoutCollection}&callB=helloCall" - Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders(goodAuthHeaders) ~> allRoutes ~> check { + val callCacheDiffParams = + s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.workflowIdWithoutCollection}&callB=helloCall" + Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders( + goodAuthHeaders + ) ~> allRoutes ~> check { status shouldBe InternalServerError - responseAs[String] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" + responseAs[ + String + ] shouldBe s"CromIAM unexpected error: java.lang.IllegalArgumentException: Workflow ${cromwellClient.workflowIdWithoutCollection} has no associated collection" contentType should be(ContentTypes.`text/plain(UTF-8)`) } } it should "return SamDenialException for user who doesn't have read permissions" in { - val callCacheDiffParams = s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.anotherRootWorkflowIdWithCollection}&callB=helloCall" - Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders(badAuthHeaders) ~> allRoutes ~> check { + val callCacheDiffParams = + s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.anotherRootWorkflowIdWithCollection}&callB=helloCall" + Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams").withHeaders( + badAuthHeaders + ) ~> allRoutes ~> check { status shouldBe Forbidden responseAs[String] shouldBe "Access Denied" contentType should be(ContentTypes.`text/plain(UTF-8)`) @@ -446,7 +514,8 @@ class CromIamApiServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "reject request if it doesn't contain OIDC_CLAIM_user_id in header" in { - val callCacheDiffParams = s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.anotherRootWorkflowIdWithCollection}&callB=helloCall" + val callCacheDiffParams = + s"workflowA=${cromwellClient.rootWorkflowIdWithCollection}&callA=helloCall&workflowB=${cromwellClient.anotherRootWorkflowIdWithCollection}&callB=helloCall" Get(s"/api/workflows/$version/callcaching/diff?$callCacheDiffParams") ~> allRoutes ~> check { rejection shouldEqual MissingHeaderRejection("OIDC_CLAIM_user_id") } diff --git a/CromIAM/src/test/scala/cromiam/webservice/EngineRouteSupportSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/EngineRouteSupportSpec.scala index f6f1e3b75de..9d431f6af05 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/EngineRouteSupportSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/EngineRouteSupportSpec.scala @@ -9,8 +9,12 @@ import cromiam.server.status.{MockStatusService, StatusService} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - -class EngineRouteSupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with EngineRouteSupport { +class EngineRouteSupportSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with EngineRouteSupport { override val cromwellClient = new MockCromwellClient() val samClient = new MockSamClient() override val statusService: StatusService = new MockStatusService(() => Map.empty) diff --git a/CromIAM/src/test/scala/cromiam/webservice/MockClients.scala b/CromIAM/src/test/scala/cromiam/webservice/MockClients.scala index 59e75347159..ff976e24b4b 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/MockClients.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/MockClients.scala @@ -16,10 +16,8 @@ import cromwell.api.model._ import scala.concurrent.ExecutionContextExecutor -class MockCromwellClient()(implicit system: ActorSystem, - ece: ExecutionContextExecutor, - materializer: ActorMaterializer) - extends CromwellClient("http", "bar", 1, NoLogging, ActorRef.noSender)(system, ece, materializer) { +class MockCromwellClient()(implicit system: ActorSystem, ece: ExecutionContextExecutor, materializer: ActorMaterializer) + extends CromwellClient("http", "bar", 1, NoLogging, ActorRef.noSender)(system, ece, materializer) { val version = "v1" val unauthorizedUserCollectionStr: String = "987654321" @@ -56,7 +54,7 @@ class MockCromwellClient()(implicit system: ActorSystem, val womtoolRoutePath = s"/api/womtool/$version/describe" httpRequest.uri.path.toString match { - //version endpoint doesn't require authentication + // version endpoint doesn't require authentication case `versionRoutePath` => FailureResponseOrT.pure(HttpResponse(status = OK, entity = "Response from Cromwell")) // womtool endpoint requires authn which it gets for free from the proxy, does not care about authz @@ -68,18 +66,19 @@ class MockCromwellClient()(implicit system: ActorSystem, override def getRootWorkflow(workflowId: String, user: User, - cromIamRequest: HttpRequest): FailureResponseOrT[String] = { + cromIamRequest: HttpRequest + ): FailureResponseOrT[String] = workflowId match { case `subworkflowId` | `rootWorkflowIdWithCollection` => FailureResponseOrT.pure(rootWorkflowIdWithCollection) case `anotherRootWorkflowIdWithCollection` => FailureResponseOrT.pure(anotherRootWorkflowIdWithCollection) case _ => FailureResponseOrT.pure(workflowIdWithoutCollection) } - } override def collectionForWorkflow(workflowId: String, user: User, - cromIamRequest: HttpRequest): FailureResponseOrT[Collection] = { + cromIamRequest: HttpRequest + ): FailureResponseOrT[Collection] = workflowId match { case `rootWorkflowIdWithCollection` | `anotherRootWorkflowIdWithCollection` => FailureResponseOrT.pure(userCollection) @@ -87,33 +86,32 @@ class MockCromwellClient()(implicit system: ActorSystem, val exception = new IllegalArgumentException(s"Workflow $workflowId has no associated collection") FailureResponseOrT.left(IO.raiseError[HttpResponse](exception)) } - } } /** * Overrides some values, but doesn't override methods. */ -class BaseMockSamClient(checkSubmitWhitelist: Boolean = true) - (implicit system: ActorSystem, - ece: ExecutionContextExecutor, - materializer: ActorMaterializer) - extends SamClient( - "http", - "bar", - 1, - checkSubmitWhitelist, - NoLogging, - ActorRef.noSender - )(system, ece, materializer) +class BaseMockSamClient(checkSubmitWhitelist: Boolean = true)(implicit + system: ActorSystem, + ece: ExecutionContextExecutor, + materializer: ActorMaterializer +) extends SamClient( + "http", + "bar", + 1, + checkSubmitWhitelist, + NoLogging, + ActorRef.noSender + )(system, ece, materializer) /** * Extends the base mock client with overriden methods. */ -class MockSamClient(checkSubmitWhitelist: Boolean = true) - (implicit system: ActorSystem, - ece: ExecutionContextExecutor, - materializer: ActorMaterializer) - extends BaseMockSamClient(checkSubmitWhitelist) { +class MockSamClient(checkSubmitWhitelist: Boolean = true)(implicit + system: ActorSystem, + ece: ExecutionContextExecutor, + materializer: ActorMaterializer +) extends BaseMockSamClient(checkSubmitWhitelist) { override def collectionsForUser(user: User, httpRequest: HttpRequest): FailureResponseOrT[List[Collection]] = { val userId = user.userId.value @@ -127,7 +125,8 @@ class MockSamClient(checkSubmitWhitelist: Boolean = true) override def requestSubmission(user: User, collection: Collection, - cromIamRequest: HttpRequest): FailureResponseOrT[Unit] = { + cromIamRequest: HttpRequest + ): FailureResponseOrT[Unit] = collection match { case c if c.name.equalsIgnoreCase(UnauthorizedUserCollectionStr) => val exception = SamRegisterCollectionException(StatusCodes.BadRequest) @@ -135,28 +134,25 @@ class MockSamClient(checkSubmitWhitelist: Boolean = true) case c if c.name.equalsIgnoreCase(AuthorizedUserCollectionStr) => Monad[FailureResponseOrT].unit case _ => Monad[FailureResponseOrT].unit } - } - override def isSubmitWhitelistedSam(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = { + override def isSubmitWhitelistedSam(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = FailureResponseOrT.pure(!user.userId.value.equalsIgnoreCase(NotWhitelistedUser)) - } - override def isUserEnabledSam(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = { + override def isUserEnabledSam(user: User, cromIamRequest: HttpRequest): FailureResponseOrT[Boolean] = if (user.userId.value == "enabled@example.com" || user.userId.value == MockSamClient.AuthorizedUserCollectionStr) FailureResponseOrT.pure(true) else if (user.userId.value == "disabled@example.com") FailureResponseOrT.pure(false) else throw new Exception("Misconfigured test") - } override def requestAuth(authorizationRequest: CollectionAuthorizationRequest, - cromIamRequest: HttpRequest): FailureResponseOrT[Unit] = { + cromIamRequest: HttpRequest + ): FailureResponseOrT[Unit] = authorizationRequest.user.userId.value match { case AuthorizedUserCollectionStr => Monad[FailureResponseOrT].unit case _ => FailureResponseOrT.left(IO.raiseError[HttpResponse](new SamDenialException)) } - } } object MockSamClient { @@ -165,7 +161,6 @@ object MockSamClient { val NotWhitelistedUser: String = "ABC123" val UserCollectionList: List[Collection] = List(Collection("col1"), Collection("col2")) - def returnResponse[T](response: HttpResponse): FailureResponseOrT[T] = { + def returnResponse[T](response: HttpResponse): FailureResponseOrT[T] = FailureResponseOrT.left(IO.pure(response)) - } } diff --git a/CromIAM/src/test/scala/cromiam/webservice/QuerySupportSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/QuerySupportSpec.scala index 216691c1c35..57dedbaefdf 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/QuerySupportSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/QuerySupportSpec.scala @@ -11,8 +11,12 @@ import org.broadinstitute.dsde.workbench.model.WorkbenchUserId import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - -class QuerySupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with QuerySupport { +class QuerySupportSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with QuerySupport { override val cromwellClient = new MockCromwellClient() override val samClient = new MockSamClient() override val log: LoggingAdapter = NoLogging @@ -27,7 +31,8 @@ class QuerySupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val getQuery = s"$queryPath?status=Submitted&label=foo:bar&label=foo:baz" val badGetQuery = s"$queryPath?status=Submitted&labelor=foo:bar&label=foo:baz" - val goodPostEntity = HttpEntity(ContentTypes.`application/json`, + val goodPostEntity = HttpEntity( + ContentTypes.`application/json`, """|[ | { | "start": "2015-11-01T00:00:00-04:00" @@ -50,7 +55,8 @@ class QuerySupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher |] |""".stripMargin ) - val badPostEntity = HttpEntity(ContentTypes.`application/json`, + val badPostEntity = HttpEntity( + ContentTypes.`application/json`, """|[ | { | "start": "2015-11-01T00:00:00-04:00" diff --git a/CromIAM/src/test/scala/cromiam/webservice/SubmissionSupportSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/SubmissionSupportSpec.scala index c4a424747e6..bfbd38075f5 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/SubmissionSupportSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/SubmissionSupportSpec.scala @@ -12,8 +12,12 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ - -class SubmissionSupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with SubmissionSupport { +class SubmissionSupportSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with SubmissionSupport { override val cromwellClient = new MockCromwellClient() override val samClient = new MockSamClient() override val log: LoggingAdapter = NoLogging @@ -56,7 +60,6 @@ class SubmissionSupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(helloWorldWdl)) val formData = Multipart.FormData(workflowSource).toEntity() - "Submit endpoint" should "forward the request to Cromwell for authorized SAM user" in { Post(submitPath).withHeaders(goodAuthHeaders).withEntity(formData) ~> submitRoute ~> check { status shouldEqual StatusCodes.OK diff --git a/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala index 838a523f4eb..32a7511c6de 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala @@ -16,9 +16,13 @@ import org.yaml.snakeyaml.{LoaderOptions, Yaml => SnakeYaml} import scala.jdk.CollectionConverters._ - -class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with SwaggerService with ScalatestRouteTest with Matchers - with TableDrivenPropertyChecks { +class SwaggerServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with SwaggerService + with ScalatestRouteTest + with Matchers + with TableDrivenPropertyChecks { def actorRefFactory = system override def oauthConfig: SwaggerOauthConfig = SwaggerOauthConfig("clientId", "realm", "appName") val yamlLoaderOptions = new LoaderOptions @@ -33,7 +37,8 @@ class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Swagg contentType should be(ContentTypes.`application/octet-stream`) val body = responseAs[String] - val yaml = new SnakeYaml(new UniqueKeyConstructor(new LoaderOptions)).loadAs(body, classOf[java.util.Map[String, AnyRef]]) + val yaml = new SnakeYaml(new UniqueKeyConstructor(new LoaderOptions)) + .loadAs(body, classOf[java.util.Map[String, AnyRef]]) yaml.get("swagger") should be("2.0") } @@ -62,27 +67,42 @@ class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Swagg resultWithInfo.getSwagger.getDefinitions.asScala foreach { // If no properties, `getProperties` returns `null` instead of an empty map - case (defKey, defVal) => Option(defVal.getProperties).map(_.asScala).getOrElse(Map.empty) foreach { - /* + case (defKey, defVal) => + Option(defVal.getProperties).map(_.asScala).getOrElse(Map.empty) foreach { + /* Two against one. Swagger parser implementation lets a RefProperty have descriptions. http://swagger.io/specification/#referenceObject & http://editor.swagger.io both say it's ref ONLY! - */ - case (propKey, propVal: RefProperty) => - withClue(s"RefProperty $defKey.$propKey has a description: ") { - propVal.getDescription should be(null) - } - case _ => /* ignore */ - } + */ + case (propKey, propVal: RefProperty) => + withClue(s"RefProperty $defKey.$propKey has a description: ") { + propVal.getDescription should be(null) + } + case _ => /* ignore */ + } } } } it should "return status OK when getting OPTIONS on paths" in { - val pathExamples = Table("path", "/", "/swagger", "/swagger/cromwell.yaml", "/swagger/index.html", "/api", - "/api/workflows/", "/api/workflows/v1", "/workflows/v1/outputs", "/workflows/v1/status", - "/api/workflows/v1/validate", "/workflows", "/workflows/v1", "/workflows/v1/outputs", "/workflows/v1/status", - "/workflows/v1/validate") + val pathExamples = Table( + "path", + "/", + "/swagger", + "/swagger/cromwell.yaml", + "/swagger/index.html", + "/api", + "/api/workflows/", + "/api/workflows/v1", + "/workflows/v1/outputs", + "/workflows/v1/status", + "/api/workflows/v1/validate", + "/workflows", + "/workflows/v1", + "/workflows/v1/outputs", + "/workflows/v1/status", + "/workflows/v1/validate" + ) forAll(pathExamples) { path => Options(path) ~> diff --git a/CromIAM/src/test/scala/cromiam/webservice/SwaggerUiHttpServiceSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/SwaggerUiHttpServiceSpec.scala index 1d0103a09fd..b39466ec4f0 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/SwaggerUiHttpServiceSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/SwaggerUiHttpServiceSpec.scala @@ -10,19 +10,38 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks - -trait SwaggerUiHttpServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with SwaggerUiHttpService { +trait SwaggerUiHttpServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with SwaggerUiHttpService { def actorRefFactory = system } -trait SwaggerResourceHttpServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with -TableDrivenPropertyChecks with SwaggerResourceHttpService { - - val testPathsForOptions = Table("endpoint", "/", "/swagger", "/swagger/index.html", "/api", "/api/example", - "/api/example?with=param", "/api/example/path") +trait SwaggerResourceHttpServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with TableDrivenPropertyChecks + with SwaggerResourceHttpService { + + val testPathsForOptions = Table("endpoint", + "/", + "/swagger", + "/swagger/index.html", + "/api", + "/api/example", + "/api/example?with=param", + "/api/example/path" + ) } -trait SwaggerUiResourceHttpServiceSpec extends SwaggerUiHttpServiceSpec with SwaggerResourceHttpServiceSpec with SwaggerUiResourceHttpService +trait SwaggerUiResourceHttpServiceSpec + extends SwaggerUiHttpServiceSpec + with SwaggerResourceHttpServiceSpec + with SwaggerUiResourceHttpService object SwaggerUiHttpServiceSpec { // TODO: Re-common-ize swagger out of cromwell's engine and reuse. @@ -40,7 +59,7 @@ class BasicSwaggerUiHttpServiceSpec extends SwaggerUiHttpServiceSpec { behavior of "SwaggerUiHttpService" override protected def rewriteSwaggerIndex(data: String): String = - // Replace same magic string used in SwaggerUiResourceHttpService.rewriteSwaggerIndex + // Replace same magic string used in SwaggerUiResourceHttpService.rewriteSwaggerIndex data.replace("window.ui = ui", "replaced-client-id") it should "redirect /swagger to /" in { @@ -80,7 +99,9 @@ class BasicSwaggerUiHttpServiceSpec extends SwaggerUiHttpServiceSpec { } override def oauthConfig: SwaggerOauthConfig = SwaggerOauthConfig( - clientId = "test-client-id", realm = "test-realm", appName = "test-appname" + clientId = "test-client-id", + realm = "test-realm", + appName = "test-appname" ) } @@ -195,7 +216,6 @@ class YamlSwaggerUiResourceHttpServiceSpec extends SwaggerUiResourceHttpServiceS } } - class JsonSwaggerUiResourceHttpServiceSpec extends SwaggerUiResourceHttpServiceSpec { override def oauthConfig: SwaggerOauthConfig = SwaggerOauthConfig("clientId", "realm", "appName") diff --git a/CromIAM/src/test/scala/cromiam/webservice/WomtoolRouteSupportSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/WomtoolRouteSupportSpec.scala index 785c887c374..a229f2db255 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/WomtoolRouteSupportSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/WomtoolRouteSupportSpec.scala @@ -10,8 +10,12 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - -class WomtoolRouteSupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with WomtoolRouteSupport with ScalatestRouteTest { +class WomtoolRouteSupportSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with WomtoolRouteSupport + with ScalatestRouteTest { override lazy val cromwellClient = new MockCromwellClient() override lazy val samClient = new MockSamClient() diff --git a/backend/src/main/scala/cromwell/backend/BackendCacheHitCopyingActor.scala b/backend/src/main/scala/cromwell/backend/BackendCacheHitCopyingActor.scala index 54849df4250..549bf761e38 100644 --- a/backend/src/main/scala/cromwell/backend/BackendCacheHitCopyingActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendCacheHitCopyingActor.scala @@ -6,13 +6,19 @@ import cromwell.core.simpleton.WomValueSimpleton import cromwell.services.CallCaching.CallCachingEntryId object BackendCacheHitCopyingActor { - final case class CopyOutputsCommand(womValueSimpletons: Seq[WomValueSimpleton], jobDetritusFiles: Map[String, String], cacheHit: CallCachingEntryId, returnCode: Option[Int]) + final case class CopyOutputsCommand(womValueSimpletons: Seq[WomValueSimpleton], + jobDetritusFiles: Map[String, String], + cacheHit: CallCachingEntryId, + returnCode: Option[Int] + ) final case class CopyingOutputsFailedResponse(jobKey: JobKey, cacheCopyAttempt: Int, failure: CacheCopyFailure) sealed trait CacheCopyFailure + /** A cache hit copy was attempted but failed. */ final case class CopyAttemptError(failure: Throwable) extends CacheCopyFailure + /** Copying was requested for a blacklisted cache hit, however the cache hit copying actor found the hit had already * been blacklisted so no novel copy attempt was made. */ final case class BlacklistSkip(failureCategory: MetricableCacheCopyErrorCategory) extends CacheCopyFailure diff --git a/backend/src/main/scala/cromwell/backend/BackendInitializationData.scala b/backend/src/main/scala/cromwell/backend/BackendInitializationData.scala index b1099b07155..8a9daa3cc39 100644 --- a/backend/src/main/scala/cromwell/backend/BackendInitializationData.scala +++ b/backend/src/main/scala/cromwell/backend/BackendInitializationData.scala @@ -15,12 +15,11 @@ object BackendInitializationData { * @tparam A The type to cast the initialization data. * @return The initialization data as the type A. */ - def as[A <: BackendInitializationData](initializationDataOption: Option[BackendInitializationData]): A = { + def as[A <: BackendInitializationData](initializationDataOption: Option[BackendInitializationData]): A = initializationDataOption match { case Some(initializationData) => initializationData.asInstanceOf[A] case None => throw new RuntimeException("Initialization data was not found.") } - } } object AllBackendInitializationData { @@ -30,5 +29,6 @@ object AllBackendInitializationData { // Holds initialization data for all backends initialized for a workflow. case class AllBackendInitializationData(data: Map[String, Option[BackendInitializationData]]) { def get(backendName: String): Option[BackendInitializationData] = data.get(backendName).flatten - def getWorkflowRoots(): Set[Path] = data.values.collect({case Some(i: StandardInitializationData) => i.workflowPaths.workflowRoot}).toSet[Path] + def getWorkflowRoots(): Set[Path] = + data.values.collect { case Some(i: StandardInitializationData) => i.workflowPaths.workflowRoot }.toSet[Path] } diff --git a/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala b/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala index a58131363d3..493221ec3ce 100644 --- a/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendJobExecutionActor.scala @@ -33,7 +33,8 @@ object BackendJobExecutionActor { jobDetritusFiles: Option[Map[String, Path]], executionEvents: Seq[ExecutionEvent], dockerImageUsed: Option[String], - resultGenerationMode: ResultGenerationMode) extends BackendJobExecutionResponse + resultGenerationMode: ResultGenerationMode + ) extends BackendJobExecutionResponse sealed trait ResultGenerationMode case object RunOnBackend extends ResultGenerationMode @@ -41,25 +42,30 @@ object BackendJobExecutionActor { case object FetchedFromJobStore extends ResultGenerationMode case class JobAbortedResponse(jobKey: BackendJobDescriptorKey) extends BackendJobExecutionResponse - - sealed trait BackendJobFailedResponse extends BackendJobExecutionResponse { def throwable: Throwable; def returnCode: Option[Int] } - case class JobFailedNonRetryableResponse(jobKey: JobKey, throwable: Throwable, returnCode: Option[Int]) extends BackendJobFailedResponse - case class JobFailedRetryableResponse(jobKey: BackendJobDescriptorKey, - throwable: Throwable, - returnCode: Option[Int]) extends BackendJobFailedResponse - - // Reconnection Exceptions - case class JobReconnectionNotSupportedException(jobKey: BackendJobDescriptorKey) extends Exception( - s"This backend does not support job reconnection. The status of the underlying job for ${jobKey.tag} cannot be known." - ) with CromwellFatalExceptionMarker - case class JobNotFoundException(jobKey: BackendJobDescriptorKey) extends Exception ( - s"No backend job for ${jobKey.tag} could be found. The status of the underlying job cannot be known." - ) with CromwellFatalExceptionMarker + sealed trait BackendJobFailedResponse extends BackendJobExecutionResponse { + def throwable: Throwable; def returnCode: Option[Int] + } + case class JobFailedNonRetryableResponse(jobKey: JobKey, throwable: Throwable, returnCode: Option[Int]) + extends BackendJobFailedResponse + case class JobFailedRetryableResponse(jobKey: BackendJobDescriptorKey, throwable: Throwable, returnCode: Option[Int]) + extends BackendJobFailedResponse - def buildJobExecutionActorName(workflowId: WorkflowId, jobKey: BackendJobDescriptorKey) = { + // Reconnection Exceptions + case class JobReconnectionNotSupportedException(jobKey: BackendJobDescriptorKey) + extends Exception( + s"This backend does not support job reconnection. The status of the underlying job for ${jobKey.tag} cannot be known." + ) + with CromwellFatalExceptionMarker + + case class JobNotFoundException(jobKey: BackendJobDescriptorKey) + extends Exception( + s"No backend job for ${jobKey.tag} could be found. The status of the underlying job cannot be known." + ) + with CromwellFatalExceptionMarker + + def buildJobExecutionActorName(workflowId: WorkflowId, jobKey: BackendJobDescriptorKey) = s"$workflowId-BackendJobExecutionActor-${jobKey.tag}" - } } /** @@ -92,7 +98,9 @@ trait BackendJobExecutionActor extends BackendJobLifecycleActor with ActorLoggin */ def recover: Future[BackendJobExecutionResponse] = { log.warning("{} backend currently doesn't support recovering jobs. Starting {} again.", - jobTag, jobDescriptor.key.call.fullyQualifiedName) + jobTag, + jobDescriptor.key.call.fullyQualifiedName + ) execute } @@ -100,28 +108,24 @@ trait BackendJobExecutionActor extends BackendJobLifecycleActor with ActorLoggin * Tries to reconnect to a previously started job. This method differs from recover by sending a ReconnectionFailure * if it can't reconnect to the job for whatever reason. It should NOT execute the job if reconnection is impossible. */ - def reconnect: Future[BackendJobExecutionResponse] = { + def reconnect: Future[BackendJobExecutionResponse] = Future.failed(JobReconnectionNotSupportedException(jobDescriptor.key)) - } /** * Similar to reconnect, except that if the reconnection succeeds and the job is still running, * an abort attempt should be made. */ - def reconnectToAborting: Future[BackendJobExecutionResponse] = { + def reconnectToAborting: Future[BackendJobExecutionResponse] = Future.failed(JobReconnectionNotSupportedException(jobDescriptor.key)) - } /** * Abort a running job. */ - def abort(): Unit = { - log.warning("{} backend currently doesn't support abort for {}.", - jobTag, jobDescriptor.key.call.fullyQualifiedName) - } + def abort(): Unit = + log.warning("{} backend currently doesn't support abort for {}.", jobTag, jobDescriptor.key.call.fullyQualifiedName) - def evaluateOutputs(wdlFunctions: IoFunctionSet, - postMapper: WomValue => Try[WomValue] = v => Success(v))(implicit ec: ExecutionContext): EvaluatedJobOutputs = { + def evaluateOutputs(wdlFunctions: IoFunctionSet, postMapper: WomValue => Try[WomValue] = v => Success(v))(implicit + ec: ExecutionContext + ): EvaluatedJobOutputs = Await.result(OutputEvaluator.evaluateOutputs(jobDescriptor, wdlFunctions, postMapper), Duration.Inf) - } } diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala index 72b0c24a800..450179c43e4 100644 --- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala @@ -30,7 +30,7 @@ trait BackendLifecycleActor extends Actor { /** * The execution context for the actor */ - protected implicit def ec: ExecutionContext = context.dispatcher + implicit protected def ec: ExecutionContext = context.dispatcher /** * The configuration for the backend, in the context of the entire Cromwell configuration file. @@ -39,7 +39,8 @@ trait BackendLifecycleActor extends Actor { protected def performActionThenRespond(operation: => Future[BackendWorkflowLifecycleActorResponse], onFailure: Throwable => BackendWorkflowLifecycleActorResponse, - andThen: => Unit = ()) = { + andThen: => Unit = () + ) = { val respondTo: ActorRef = sender() operation onComplete { case Success(r) => @@ -54,9 +55,9 @@ trait BackendLifecycleActor extends Actor { trait BackendWorkflowLifecycleActor extends BackendLifecycleActor with WorkflowLogging { - //For Logging and boilerplate - override lazy final val workflowIdForLogging = workflowDescriptor.possiblyNotRootWorkflowId - override lazy final val rootWorkflowIdForLogging = workflowDescriptor.rootWorkflowId + // For Logging and boilerplate + final override lazy val workflowIdForLogging = workflowDescriptor.possiblyNotRootWorkflowId + final override lazy val rootWorkflowIdForLogging = workflowDescriptor.rootWorkflowId /** * The workflow descriptor for the workflow in which this Backend is being used diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala index 0d38d87b01f..8be9a7f327c 100644 --- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala +++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActorFactory.scala @@ -25,7 +25,8 @@ trait BackendLifecycleActorFactory { */ def name: String - def nameForCallCachingPurposes: String = configurationDescriptor.backendConfig.getOrElse("name-for-call-caching-purposes", name) + def nameForCallCachingPurposes: String = + configurationDescriptor.backendConfig.getOrElse("name-for-call-caching-purposes", name) /** * Config values for the backend, and a pointer to the global config. @@ -44,7 +45,8 @@ trait BackendLifecycleActorFactory { ioActor: ActorRef, calls: Set[CommandCallNode], serviceRegistryActor: ActorRef, - restarting: Boolean): Option[Props] = None + restarting: Boolean + ): Option[Props] = None /* ****************************** */ /* Job Execution */ @@ -54,7 +56,8 @@ trait BackendLifecycleActorFactory { initializationData: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]): Props + backendSingletonActor: Option[ActorRef] + ): Props lazy val jobExecutionTokenType: JobTokenType = { val concurrentJobLimit = configurationDescriptor.backendConfig.as[Option[Int]]("concurrent-job-limit") @@ -67,7 +70,8 @@ trait BackendLifecycleActorFactory { } lazy val jobRestartCheckTokenType: JobTokenType = { - val concurrentRestartCheckLimit = configurationDescriptor.globalConfig.as[Option[Int]]("system.job-restart-check-rate-control.max-jobs") + val concurrentRestartCheckLimit = + configurationDescriptor.globalConfig.as[Option[Int]]("system.job-restart-check-rate-control.max-jobs") // if defined, use per-backend hog-factor, otherwise use system-level value val hogFactor = configurationDescriptor.backendConfig.as[Option[Int]]("hog-factor") match { case Some(backendHogFactorValue) => backendHogFactorValue @@ -85,13 +89,16 @@ trait BackendLifecycleActorFactory { calls: Set[CommandCallNode], jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, - initializationData: Option[BackendInitializationData]): Option[Props] = None + initializationData: Option[BackendInitializationData] + ): Option[Props] = None /* ****************************** */ /* Call Caching */ /* ****************************** */ - def fileHashingActorProps: Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props] = None + def fileHashingActorProps + : Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props] = + None /** * Providing this method to generate Props for a cache hit copying actor is optional. @@ -102,7 +109,9 @@ trait BackendLifecycleActorFactory { * * Simples! */ - def cacheHitCopyingActorProps: Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Int, Option[BlacklistCache]) => Props] = None + def cacheHitCopyingActorProps: Option[ + (BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Int, Option[BlacklistCache]) => Props + ] = None /* ****************************** */ /* Misc. */ @@ -114,19 +123,26 @@ trait BackendLifecycleActorFactory { jobKey: BackendJobDescriptorKey, initializationData: Option[BackendInitializationData], ioActor: ActorRef, - ec: ExecutionContext): IoFunctionSet = NoIoFunctionSet + ec: ExecutionContext + ): IoFunctionSet = NoIoFunctionSet def pathBuilders(initializationDataOption: Option[BackendInitializationData]): PathBuilders = List.empty - def getExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, backendConfig: Config, initializationData: Option[BackendInitializationData]): Path = { + def getExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, + backendConfig: Config, + initializationData: Option[BackendInitializationData] + ): Path = new WorkflowPathsWithDocker(workflowDescriptor, backendConfig).executionRoot - } - def getWorkflowExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, backendConfig: Config, initializationData: Option[BackendInitializationData]): Path = { + def getWorkflowExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, + backendConfig: Config, + initializationData: Option[BackendInitializationData] + ): Path = new WorkflowPathsWithDocker(workflowDescriptor, backendConfig).workflowRoot - } - def runtimeAttributeDefinitions(initializationDataOption: Option[BackendInitializationData]): Set[RuntimeAttributeDefinition] = Set.empty + def runtimeAttributeDefinitions( + initializationDataOption: Option[BackendInitializationData] + ): Set[RuntimeAttributeDefinition] = Set.empty /** * A set of KV store keys that this backend requests that the engine lookup before running each job. @@ -136,13 +152,16 @@ trait BackendLifecycleActorFactory { /** * A set of KV store keys that are requested and looked up on behalf of all backends before running each job. */ - def defaultKeyValueStoreKeys: Seq[String] = Seq(BackendLifecycleActorFactory.FailedRetryCountKey, BackendLifecycleActorFactory.MemoryMultiplierKey) + def defaultKeyValueStoreKeys: Seq[String] = + Seq(BackendLifecycleActorFactory.FailedRetryCountKey, BackendLifecycleActorFactory.MemoryMultiplierKey) /* * Returns credentials that can be used to authenticate to a docker registry server * in order to obtain a docker hash. */ - def dockerHashCredentials(workflowDescriptor: BackendWorkflowDescriptor, initializationDataOption: Option[BackendInitializationData]): List[Any] = List.empty + def dockerHashCredentials(workflowDescriptor: BackendWorkflowDescriptor, + initializationDataOption: Option[BackendInitializationData] + ): List[Any] = List.empty } object BackendLifecycleActorFactory { diff --git a/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala b/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala index 2f77c2ebc2a..04abe76e372 100644 --- a/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala @@ -27,8 +27,8 @@ object BackendWorkflowFinalizationActor { */ trait BackendWorkflowFinalizationActor extends BackendWorkflowLifecycleActor with ActorLogging { - def receive: Receive = LoggingReceive { - case Finalize => performActionThenRespond(afterAll() map { _ => FinalizationSuccess }, onFailure = FinalizationFailed) + def receive: Receive = LoggingReceive { case Finalize => + performActionThenRespond(afterAll() map { _ => FinalizationSuccess }, onFailure = FinalizationFailed) } /** diff --git a/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala b/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala index bc3dd963f63..9f59a451478 100644 --- a/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendWorkflowInitializationActor.scala @@ -34,7 +34,8 @@ object BackendWorkflowInitializationActor { // Responses sealed trait BackendWorkflowInitializationActorResponse extends BackendWorkflowLifecycleActorResponse sealed trait InitializationResponse extends BackendWorkflowInitializationActorResponse - case class InitializationSuccess(backendInitializationData: Option[BackendInitializationData]) extends InitializationResponse + case class InitializationSuccess(backendInitializationData: Option[BackendInitializationData]) + extends InitializationResponse case class InitializationFailed(reason: Throwable) extends Exception with InitializationResponse /** @@ -70,24 +71,25 @@ object BackendWorkflowInitializationActor { * - It would be nice to memoize as much of the work that gets done here as possible so it doesn't have to all be * repeated when the various `FooRuntimeAttributes` classes are created, in the spirit of #1076. */ - def validateRuntimeAttributes( - taskName: String, - defaultRuntimeAttributes: Map[String, WomValue], - runtimeAttributes: Map[String, WomExpression], - runtimeAttributeValidators: Map[String, Option[WomExpression] => Boolean] - ): ValidatedNel[RuntimeAttributeValidationFailure, Unit] = { - - //This map append will overwrite default key/values with runtime settings upon key collisions - val lookups = defaultRuntimeAttributes.safeMapValues(_.asWomExpression) ++ runtimeAttributes - - runtimeAttributeValidators.toList.traverse{ - case (attributeName, validator) => - val runtimeAttributeValue: Option[WomExpression] = lookups.get(attributeName) - validator(runtimeAttributeValue).fold( - validNel(()), - Invalid(NonEmptyList.of(RuntimeAttributeValidationFailure(taskName, attributeName, runtimeAttributeValue))) - ) - }.map(_ => ()) + def validateRuntimeAttributes( + taskName: String, + defaultRuntimeAttributes: Map[String, WomValue], + runtimeAttributes: Map[String, WomExpression], + runtimeAttributeValidators: Map[String, Option[WomExpression] => Boolean] + ): ValidatedNel[RuntimeAttributeValidationFailure, Unit] = { + + // This map append will overwrite default key/values with runtime settings upon key collisions + val lookups = defaultRuntimeAttributes.safeMapValues(_.asWomExpression) ++ runtimeAttributes + + runtimeAttributeValidators.toList + .traverse { case (attributeName, validator) => + val runtimeAttributeValue: Option[WomExpression] = lookups.get(attributeName) + validator(runtimeAttributeValue).fold( + validNel(()), + Invalid(NonEmptyList.of(RuntimeAttributeValidationFailure(taskName, attributeName, runtimeAttributeValue))) + ) + } + .map(_ => ()) } } @@ -110,7 +112,9 @@ trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor w * declarations will fail evaluation and return `true` from this predicate, even if the type could be determined * to be wrong with consideration of task declarations or inputs. */ - protected def womTypePredicate(valueRequired: Boolean, predicate: WomType => Boolean)(womExpressionMaybe: Option[WomExpression]): Boolean = { + protected def womTypePredicate(valueRequired: Boolean, predicate: WomType => Boolean)( + womExpressionMaybe: Option[WomExpression] + ): Boolean = womExpressionMaybe match { case None => !valueRequired case Some(womExpression: WomExpression) => @@ -119,29 +123,31 @@ trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor w case Invalid(_) => true // If we can't evaluate it, we'll let it pass for now... } } - } /** * This predicate is only appropriate for validation during workflow initialization. The logic does not differentiate * between evaluation failures due to missing call inputs or evaluation failures due to malformed expressions, and will * return `true` in both cases. */ - protected def continueOnReturnCodePredicate(valueRequired: Boolean)(womExpressionMaybe: Option[WomValue]): Boolean = { - ContinueOnReturnCodeValidation.default(configurationDescriptor.backendRuntimeAttributesConfig).validateOptionalWomValue(womExpressionMaybe) - } + protected def continueOnReturnCodePredicate(valueRequired: Boolean)(womExpressionMaybe: Option[WomValue]): Boolean = + ContinueOnReturnCodeValidation + .default(configurationDescriptor.backendRuntimeAttributesConfig) + .validateOptionalWomValue(womExpressionMaybe) protected def runtimeAttributeValidators: Map[String, Option[WomExpression] => Boolean] // FIXME: If a workflow executes jobs using multiple backends, // each backend will try to write its own workflow root and override any previous one. // They should be structured differently or at least be prefixed by the backend name - protected def publishWorkflowRoot(workflowRoot: String): Unit = { - serviceRegistryActor ! PutMetadataAction(MetadataEvent(MetadataKey(workflowDescriptor.id, None, WorkflowMetadataKeys.WorkflowRoot), MetadataValue(workflowRoot))) - } + protected def publishWorkflowRoot(workflowRoot: String): Unit = + serviceRegistryActor ! PutMetadataAction( + MetadataEvent(MetadataKey(workflowDescriptor.id, None, WorkflowMetadataKeys.WorkflowRoot), + MetadataValue(workflowRoot) + ) + ) protected def coerceDefaultRuntimeAttributes(options: WorkflowOptions): Try[Map[String, WomValue]] - def receive: Receive = LoggingReceive { case Initialize => performActionThenRespond(initSequence(), onFailure = InitializationFailed) case Abort => abortInitialization() @@ -154,10 +160,11 @@ trait BackendWorkflowInitializationActor extends BackendWorkflowLifecycleActor w for { defaultRuntimeAttributes <- coerceDefaultRuntimeAttributes(workflowDescriptor.workflowOptions) |> Future.fromTry _ taskList = calls.toList.map(_.callable).map(t => t.name -> t.runtimeAttributes.attributes) - _ <- taskList. - traverse{ - case (name, runtimeAttributes) => validateRuntimeAttributes(name, defaultRuntimeAttributes, runtimeAttributes, runtimeAttributeValidators) - }.toFuture(errors => RuntimeAttributeValidationFailures(errors.toList)) + _ <- taskList + .traverse { case (name, runtimeAttributes) => + validateRuntimeAttributes(name, defaultRuntimeAttributes, runtimeAttributes, runtimeAttributeValidators) + } + .toFuture(errors => RuntimeAttributeValidationFailures(errors.toList)) _ <- validate() data <- beforeAll() } yield InitializationSuccess(data) diff --git a/backend/src/main/scala/cromwell/backend/Command.scala b/backend/src/main/scala/cromwell/backend/Command.scala index 40310c68485..3a4eaf3b88a 100644 --- a/backend/src/main/scala/cromwell/backend/Command.scala +++ b/backend/src/main/scala/cromwell/backend/Command.scala @@ -24,11 +24,16 @@ object Command { */ def instantiate(jobDescriptor: BackendJobDescriptor, callEngineFunction: IoFunctionSet, - inputsPreProcessor: WomEvaluatedCallInputs => Try[WomEvaluatedCallInputs] = (i: WomEvaluatedCallInputs) => Success(i), + inputsPreProcessor: WomEvaluatedCallInputs => Try[WomEvaluatedCallInputs] = + (i: WomEvaluatedCallInputs) => Success(i), valueMapper: WomValue => WomValue, - runtimeEnvironment: RuntimeEnvironment): ErrorOr[InstantiatedCommand] = { + runtimeEnvironment: RuntimeEnvironment + ): ErrorOr[InstantiatedCommand] = inputsPreProcessor(jobDescriptor.evaluatedTaskInputs).toErrorOr flatMap { mappedInputs => - jobDescriptor.taskCall.callable.instantiateCommand(mappedInputs, callEngineFunction, valueMapper, runtimeEnvironment) + jobDescriptor.taskCall.callable.instantiateCommand(mappedInputs, + callEngineFunction, + valueMapper, + runtimeEnvironment + ) } - } } diff --git a/backend/src/main/scala/cromwell/backend/FileSizeTooBig.scala b/backend/src/main/scala/cromwell/backend/FileSizeTooBig.scala index 2580d9218fc..79ccc698b05 100644 --- a/backend/src/main/scala/cromwell/backend/FileSizeTooBig.scala +++ b/backend/src/main/scala/cromwell/backend/FileSizeTooBig.scala @@ -1,4 +1,3 @@ package cromwell.backend case class FileSizeTooBig(override val getMessage: String) extends Exception - diff --git a/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala b/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala index 2bf215fb0fa..8c799e36ad5 100644 --- a/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala +++ b/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala @@ -26,15 +26,20 @@ object OutputEvaluator { def evaluateOutputs(jobDescriptor: BackendJobDescriptor, ioFunctions: IoFunctionSet, - postMapper: WomValue => Try[WomValue] = v => Success(v))(implicit ec: ExecutionContext): Future[EvaluatedJobOutputs] = { + postMapper: WomValue => Try[WomValue] = v => Success(v) + )(implicit ec: ExecutionContext): Future[EvaluatedJobOutputs] = { val taskInputValues: Map[String, WomValue] = jobDescriptor.localInputs - def foldFunction(accumulatedOutputs: Try[ErrorOr[List[(OutputPort, WomValue)]]], output: ExpressionBasedOutputPort) = accumulatedOutputs flatMap { accumulated => + def foldFunction(accumulatedOutputs: Try[ErrorOr[List[(OutputPort, WomValue)]]], + output: ExpressionBasedOutputPort + ) = accumulatedOutputs flatMap { accumulated => // Extract the valid pairs from the job outputs accumulated so far, and add to it the inputs (outputs can also reference inputs) val allKnownValues: Map[String, WomValue] = accumulated match { case Valid(outputs) => // The evaluateValue methods needs a Map[String, WomValue], use the output port name for already computed outputs - outputs.toMap[OutputPort, WomValue].map({ case (port, value) => port.internalName -> value }) ++ taskInputValues + outputs.toMap[OutputPort, WomValue].map { case (port, value) => + port.internalName -> value + } ++ taskInputValues case Invalid(_) => taskInputValues } @@ -45,22 +50,24 @@ object OutputEvaluator { } // Attempt to coerce the womValue to the desired output type - def coerceOutputValue(womValue: WomValue, coerceTo: WomType): OutputResult[WomValue] = { + def coerceOutputValue(womValue: WomValue, coerceTo: WomType): OutputResult[WomValue] = fromEither[Try]( // TODO WOM: coerceRawValue should return an ErrorOr - coerceTo.coerceRawValue(womValue).toEither.leftMap(t => NonEmptyList.one(t.getClass.getSimpleName + ": " + t.getMessage)) + coerceTo + .coerceRawValue(womValue) + .toEither + .leftMap(t => NonEmptyList.one(t.getClass.getSimpleName + ": " + t.getMessage)) ) - } /* - * Go through evaluation, coercion and post processing. - * Transform the result to a validated Try[ErrorOr[(String, WomValue)]] with toValidated - * If we have a valid pair, add it to the previously accumulated outputs, otherwise combine the Nels of errors + * Go through evaluation, coercion and post processing. + * Transform the result to a validated Try[ErrorOr[(String, WomValue)]] with toValidated + * If we have a valid pair, add it to the previously accumulated outputs, otherwise combine the Nels of errors */ val evaluated = for { evaluated <- evaluateOutputExpression coerced <- coerceOutputValue(evaluated, output.womType) - postProcessed <- EitherT { postMapper(coerced).map(_.validNelCheck) }: OutputResult[WomValue] + postProcessed <- EitherT(postMapper(coerced).map(_.validNelCheck)): OutputResult[WomValue] pair = output -> postProcessed } yield pair @@ -73,24 +80,27 @@ object OutputEvaluator { val emptyValue = Success(List.empty[(OutputPort, WomValue)].validNel): Try[ErrorOr[List[(OutputPort, WomValue)]]] // Fold over the outputs to evaluate them in order, map the result to an EvaluatedJobOutputs - def fromOutputPorts: EvaluatedJobOutputs = jobDescriptor.taskCall.expressionBasedOutputPorts.foldLeft(emptyValue)(foldFunction) match { - case Success(Valid(outputs)) => ValidJobOutputs(CallOutputs(outputs.toMap)) - case Success(Invalid(errors)) => InvalidJobOutputs(errors) - case Failure(exception) => JobOutputsEvaluationException(exception) - } + def fromOutputPorts: EvaluatedJobOutputs = + jobDescriptor.taskCall.expressionBasedOutputPorts.foldLeft(emptyValue)(foldFunction) match { + case Success(Valid(outputs)) => ValidJobOutputs(CallOutputs(outputs.toMap)) + case Success(Invalid(errors)) => InvalidJobOutputs(errors) + case Failure(exception) => JobOutputsEvaluationException(exception) + } /* - * Because Cromwell doesn't trust anyone, if custom evaluation is provided, - * still make sure that all the output ports have been filled with values + * Because Cromwell doesn't trust anyone, if custom evaluation is provided, + * still make sure that all the output ports have been filled with values */ def validateCustomEvaluation(outputs: Map[OutputPort, WomValue]): EvaluatedJobOutputs = { - def toError(outputPort: OutputPort) = s"Missing output value for ${outputPort.identifier.fullyQualifiedName.value}" + def toError(outputPort: OutputPort) = + s"Missing output value for ${outputPort.identifier.fullyQualifiedName.value}" jobDescriptor.taskCall.expressionBasedOutputPorts.diff(outputs.keySet.toList) match { case Nil => val errorMessagePrefix = "Error applying postMapper in short-circuit output evaluation" - TryUtil.sequenceMap(outputs map { case (k, v) => (k, postMapper(v))}, errorMessagePrefix) match { - case Failure(e) => InvalidJobOutputs(NonEmptyList.of(e.getMessage, e.getStackTrace.take(5).toIndexedSeq.map(_.toString):_*)) + TryUtil.sequenceMap(outputs map { case (k, v) => (k, postMapper(v)) }, errorMessagePrefix) match { + case Failure(e) => + InvalidJobOutputs(NonEmptyList.of(e.getMessage, e.getStackTrace.take(5).toIndexedSeq.map(_.toString): _*)) case Success(postMappedOutputs) => ValidJobOutputs(CallOutputs(postMappedOutputs)) } case head :: tail => InvalidJobOutputs(NonEmptyList.of(toError(head), tail.map(toError): _*)) @@ -98,18 +108,20 @@ object OutputEvaluator { } /* - * See if the task definition has "short-circuit" for the default output evaluation. - * In the case of CWL for example, this gives a chance to look for cwl.output.json and use it as the output of the tool, - * instead of the default behavior of going over each output port of the task and evaluates their expression. - * If the "customOutputEvaluation" returns None (which will happen if the cwl.output.json is not there, as well as for all WDL workflows), - * we fallback to the default behavior. + * See if the task definition has "short-circuit" for the default output evaluation. + * In the case of CWL for example, this gives a chance to look for cwl.output.json and use it as the output of the tool, + * instead of the default behavior of going over each output port of the task and evaluates their expression. + * If the "customOutputEvaluation" returns None (which will happen if the cwl.output.json is not there, as well as for all WDL workflows), + * we fallback to the default behavior. */ - jobDescriptor.taskCall.customOutputEvaluation(taskInputValues, ioFunctions, ec).value - .map({ + jobDescriptor.taskCall + .customOutputEvaluation(taskInputValues, ioFunctions, ec) + .value + .map { case Some(Right(outputs)) => validateCustomEvaluation(outputs) case Some(Left(errors)) => InvalidJobOutputs(errors) // If it returns an empty value, fallback to canonical output evaluation case None => fromOutputPorts - }) + } } } diff --git a/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala b/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala index 33e6e2db020..7e73301af71 100644 --- a/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala +++ b/backend/src/main/scala/cromwell/backend/RuntimeAttributeDefinition.scala @@ -22,7 +22,8 @@ object RuntimeAttributeDefinition { def evaluateRuntimeAttributes(unevaluated: RuntimeAttributes, wdlFunctions: IoFunctionSet, - evaluatedInputs: Map[InputDefinition, WomValue]): ErrorOr[Map[String, WomValue]] = { + evaluatedInputs: Map[InputDefinition, WomValue] + ): ErrorOr[Map[String, WomValue]] = { import common.validation.ErrorOr._ val inputsMap = evaluatedInputs map { case (x, y) => x.name -> y } unevaluated.attributes.traverseValues(_.evaluateValue(inputsMap, wdlFunctions)) @@ -32,18 +33,23 @@ object RuntimeAttributeDefinition { val successfulEvaluations = evaluatedDeclarations collect { case (k, v) if v.isSuccess => k.name -> v.get } - successfulEvaluations.getOrElse(identifier, throw new WomExpressionException(s"Could not resolve variable $identifier as a task input")) + successfulEvaluations.getOrElse( + identifier, + throw new WomExpressionException(s"Could not resolve variable $identifier as a task input") + ) } - def addDefaultsToAttributes(runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition], workflowOptions: WorkflowOptions) - (specifiedAttributes: Map[LocallyQualifiedName, WomValue]): Map[LocallyQualifiedName, WomValue] = { + def addDefaultsToAttributes(runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition], + workflowOptions: WorkflowOptions + )(specifiedAttributes: Map[LocallyQualifiedName, WomValue]): Map[LocallyQualifiedName, WomValue] = { import WomValueJsonFormatter._ def isUnspecifiedAttribute(name: String) = !specifiedAttributes.contains(name) val missing = runtimeAttributeDefinitions filter { x => isUnspecifiedAttribute(x.name) } val defaults = missing map { x => (x, workflowOptions.getDefaultRuntimeOption(x.name)) } collect { - case (runtimeAttributeDefinition, Success(jsValue)) => runtimeAttributeDefinition.name -> jsValue.convertTo[WomValue] + case (runtimeAttributeDefinition, Success(jsValue)) => + runtimeAttributeDefinition.name -> jsValue.convertTo[WomValue] case (RuntimeAttributeDefinition(name, Some(factoryDefault), _), _) => name -> factoryDefault } specifiedAttributes ++ defaults diff --git a/backend/src/main/scala/cromwell/backend/RuntimeEnvironment.scala b/backend/src/main/scala/cromwell/backend/RuntimeEnvironment.scala index c6c4cd22c21..40f8f1f7b07 100644 --- a/backend/src/main/scala/cromwell/backend/RuntimeEnvironment.scala +++ b/backend/src/main/scala/cromwell/backend/RuntimeEnvironment.scala @@ -15,31 +15,33 @@ import wom.values.WomValue object RuntimeEnvironmentBuilder { - def apply(runtimeAttributes: Map[String, WomValue], callRoot: Path, callExecutionRoot: Path): MinimumRuntimeSettings => RuntimeEnvironment = { - minimums => + def apply(runtimeAttributes: Map[String, WomValue], + callRoot: Path, + callExecutionRoot: Path + ): MinimumRuntimeSettings => RuntimeEnvironment = { minimums => + val outputPath: String = callExecutionRoot.pathAsString - val outputPath: String = callExecutionRoot.pathAsString + val tempPath: String = { + val uuid = UUID.randomUUID().toString + val hash = uuid.substring(0, uuid.indexOf('-')) + callRoot.resolve(s"tmp.$hash").pathAsString + } - val tempPath: String = { - val uuid = UUID.randomUUID().toString - val hash = uuid.substring(0, uuid.indexOf('-')) - callRoot.resolve(s"tmp.$hash").pathAsString - } + val cores: Int Refined Positive = CpuValidation.instanceMin.validate(runtimeAttributes).getOrElse(minimums.cores) - val cores: Int Refined Positive = CpuValidation.instanceMin.validate(runtimeAttributes).getOrElse(minimums.cores) + val memoryInMB: Double = + MemoryValidation + .instance() + .validate(runtimeAttributes) + .map(_.to(MemoryUnit.MB).amount) + .getOrElse(minimums.ram.amount) - val memoryInMB: Double = - MemoryValidation.instance(). - validate(runtimeAttributes). - map(_.to(MemoryUnit.MB).amount). - getOrElse(minimums.ram.amount) + // TODO: Read these from somewhere else + val outputPathSize: Long = minimums.outputPathSize - //TODO: Read these from somewhere else - val outputPathSize: Long = minimums.outputPathSize + val tempPathSize: Long = minimums.outputPathSize - val tempPathSize: Long = minimums.outputPathSize - - RuntimeEnvironment(outputPath, tempPath, cores, memoryInMB, outputPathSize, tempPathSize) + RuntimeEnvironment(outputPath, tempPath, cores, memoryInMB, outputPathSize, tempPathSize) } /** @@ -48,12 +50,14 @@ object RuntimeEnvironmentBuilder { * "For cores, ram, outdirSize and tmpdirSize, if an implementation can't provide the actual number of reserved cores * during the expression evaluation time, it should report back the minimal requested amount." */ - def apply(runtimeAttributes: Map[String, WomValue], jobPaths: JobPaths): MinimumRuntimeSettings => RuntimeEnvironment = { + def apply(runtimeAttributes: Map[String, WomValue], + jobPaths: JobPaths + ): MinimumRuntimeSettings => RuntimeEnvironment = this.apply(runtimeAttributes, jobPaths.callRoot, jobPaths.callExecutionRoot) - } } case class MinimumRuntimeSettings(cores: Int Refined Positive = refineMV(1), ram: MemorySize = MemorySize(4, MemoryUnit.GB), outputPathSize: Long = Long.MaxValue, - tempPathSize: Long = Long.MaxValue) + tempPathSize: Long = Long.MaxValue +) diff --git a/backend/src/main/scala/cromwell/backend/SlowJobWarning.scala b/backend/src/main/scala/cromwell/backend/SlowJobWarning.scala index 074895e366a..22db6d7f92a 100644 --- a/backend/src/main/scala/cromwell/backend/SlowJobWarning.scala +++ b/backend/src/main/scala/cromwell/backend/SlowJobWarning.scala @@ -12,20 +12,21 @@ trait SlowJobWarning { this: Actor with ActorLogging => def slowJobWarningReceive: Actor.Receive = { case WarnAboutSlownessAfter(jobId, duration) => alreadyWarned = false - warningDetails = Option(WarningDetails(jobId, OffsetDateTime.now(), OffsetDateTime.now().plusSeconds(duration.toSeconds))) + warningDetails = Option( + WarningDetails(jobId, OffsetDateTime.now(), OffsetDateTime.now().plusSeconds(duration.toSeconds)) + ) case WarnAboutSlownessIfNecessary => handleWarnMessage() } var warningDetails: Option[WarningDetails] = None var alreadyWarned: Boolean = false - def warnAboutSlowJobIfNecessary(jobId: String) = { + def warnAboutSlowJobIfNecessary(jobId: String) = // Don't do anything here because we might need to update state. // Instead, send a message and handle this in the receive block. self ! WarnAboutSlownessIfNecessary - } - private def handleWarnMessage(): Unit = { + private def handleWarnMessage(): Unit = if (!alreadyWarned) { warningDetails match { case Some(WarningDetails(jobId, startTime, warningTime)) if OffsetDateTime.now().isAfter(warningTime) => @@ -34,7 +35,6 @@ trait SlowJobWarning { this: Actor with ActorLogging => case _ => // Nothing to do } } - } } diff --git a/backend/src/main/scala/cromwell/backend/WriteFunctions.scala b/backend/src/main/scala/cromwell/backend/WriteFunctions.scala index 572a417c047..5a8224d37a6 100644 --- a/backend/src/main/scala/cromwell/backend/WriteFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/WriteFunctions.scala @@ -24,13 +24,14 @@ trait WriteFunctions extends PathFactory with IoFunctionSet with AsyncIoFunction */ def writeDirectory: Path - private lazy val _writeDirectory = if (isDocker) writeDirectory.createPermissionedDirectories() else writeDirectory.createDirectories() + private lazy val _writeDirectory = + if (isDocker) writeDirectory.createPermissionedDirectories() else writeDirectory.createDirectories() override def createTemporaryDirectory(name: Option[String]) = { val tempDirPath = _writeDirectory / name.getOrElse(UUID.randomUUID().toString) // This is evil, but has the added advantage to work both for cloud and local val tempDirHiddenFile = tempDirPath / ".file" - asyncIo.writeAsync(tempDirHiddenFile, "", OpenOptions.default) as { tempDirPath.pathAsString } + asyncIo.writeAsync(tempDirHiddenFile, "", OpenOptions.default) as tempDirPath.pathAsString } protected def writeAsync(file: Path, content: String) = asyncIo.writeAsync(file, content, OpenOptions.default) @@ -38,21 +39,22 @@ trait WriteFunctions extends PathFactory with IoFunctionSet with AsyncIoFunction override def writeFile(path: String, content: String): Future[WomSingleFile] = { val file = _writeDirectory / path asyncIo.existsAsync(file) flatMap { - case false => writeAsync(file, content) as { WomSingleFile(file.pathAsString) } + case false => writeAsync(file, content) as WomSingleFile(file.pathAsString) case true => Future.successful(WomSingleFile(file.pathAsString)) } } private val relativeToLocal = System.getProperty("user.dir").ensureSlashed - def relativeToAbsolutePath(pathFrom: String): String = if (buildPath(pathFrom).isAbsolute) pathFrom else relativeToLocal + pathFrom + def relativeToAbsolutePath(pathFrom: String): String = + if (buildPath(pathFrom).isAbsolute) pathFrom else relativeToLocal + pathFrom override def copyFile(pathFrom: String, targetName: String): Future[WomSingleFile] = { val source = buildPath(relativeToAbsolutePath(pathFrom)) val destination = _writeDirectory / targetName - asyncIo.copyAsync(source, destination).as(WomSingleFile(destination.pathAsString)) recoverWith { - case e => Future.failed(new Exception(s"Could not copy ${source.toAbsolutePath} to ${destination.toAbsolutePath}", e)) + asyncIo.copyAsync(source, destination).as(WomSingleFile(destination.pathAsString)) recoverWith { case e => + Future.failed(new Exception(s"Could not copy ${source.toAbsolutePath} to ${destination.toAbsolutePath}", e)) } } } diff --git a/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala b/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala index 241081f5a02..2872633d9bd 100644 --- a/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/async/AsyncBackendJobExecutionActor.scala @@ -1,6 +1,5 @@ package cromwell.backend.async - import java.util.concurrent.ExecutionException import akka.actor.{Actor, ActorLogging, ActorRef} @@ -18,10 +17,12 @@ import scala.util.{Failure, Success} object AsyncBackendJobExecutionActor { sealed trait AsyncBackendJobExecutionActorMessage - private final case class IssuePollRequest(executionHandle: ExecutionHandle) extends AsyncBackendJobExecutionActorMessage - private final case class PollResponseReceived(executionHandle: ExecutionHandle) extends AsyncBackendJobExecutionActorMessage - private final case class FailAndStop(reason: Throwable) extends AsyncBackendJobExecutionActorMessage - private final case class Finish(executionHandle: ExecutionHandle) extends AsyncBackendJobExecutionActorMessage + final private case class IssuePollRequest(executionHandle: ExecutionHandle) + extends AsyncBackendJobExecutionActorMessage + final private case class PollResponseReceived(executionHandle: ExecutionHandle) + extends AsyncBackendJobExecutionActorMessage + final private case class FailAndStop(reason: Throwable) extends AsyncBackendJobExecutionActorMessage + final private case class Finish(executionHandle: ExecutionHandle) extends AsyncBackendJobExecutionActorMessage trait JobId @@ -57,27 +58,24 @@ trait AsyncBackendJobExecutionActor { this: Actor with ActorLogging with SlowJob def isTransient(throwable: Throwable): Boolean = false - private def withRetry[A](work: () => Future[A], backOff: SimpleExponentialBackoff): Future[A] = { + private def withRetry[A](work: () => Future[A], backOff: SimpleExponentialBackoff): Future[A] = Retry.withRetry(work, isTransient = isTransient, isFatal = isFatal, backoff = backOff)(context.system) - } - private def robustExecuteOrRecover(mode: ExecutionMode) = { + private def robustExecuteOrRecover(mode: ExecutionMode) = withRetry(() => executeOrRecover(mode), executeOrRecoverBackOff) onComplete { case Success(h) => self ! IssuePollRequest(h) case Failure(t) => self ! FailAndStop(t) } - } def pollBackOff: SimpleExponentialBackoff def executeOrRecoverBackOff: SimpleExponentialBackoff - private def robustPoll(handle: ExecutionHandle) = { + private def robustPoll(handle: ExecutionHandle) = withRetry(() => poll(handle), pollBackOff) onComplete { case Success(h) => self ! PollResponseReceived(h) case Failure(t) => self ! FailAndStop(t) } - } private def failAndStop(t: Throwable) = { completionPromise.success(JobFailedNonRetryableResponse(jobDescriptor.key, t, None)) @@ -94,7 +92,16 @@ trait AsyncBackendJobExecutionActor { this: Actor with ActorLogging with SlowJob context.system.scheduler.scheduleOnce(pollBackOff.backoffMillis.millis, self, IssuePollRequest(handle)) () case Finish(SuccessfulExecutionHandle(outputs, returnCode, jobDetritusFiles, executionEvents, _)) => - completionPromise.success(JobSucceededResponse(jobDescriptor.key, Some(returnCode), outputs, Option(jobDetritusFiles), executionEvents, dockerImageUsed, resultGenerationMode = RunOnBackend)) + completionPromise.success( + JobSucceededResponse(jobDescriptor.key, + Some(returnCode), + outputs, + Option(jobDetritusFiles), + executionEvents, + dockerImageUsed, + resultGenerationMode = RunOnBackend + ) + ) context.stop(self) case Finish(FailedNonRetryableExecutionHandle(throwable, returnCode, _)) => completionPromise.success(JobFailedNonRetryableResponse(jobDescriptor.key, throwable, returnCode)) @@ -133,5 +140,5 @@ trait AsyncBackendJobExecutionActor { this: Actor with ActorLogging with SlowJob def jobDescriptor: BackendJobDescriptor - protected implicit def ec: ExecutionContext + implicit protected def ec: ExecutionContext } diff --git a/backend/src/main/scala/cromwell/backend/async/ExecutionHandle.scala b/backend/src/main/scala/cromwell/backend/async/ExecutionHandle.scala index ab1354e4323..ff6a088b1f9 100644 --- a/backend/src/main/scala/cromwell/backend/async/ExecutionHandle.scala +++ b/backend/src/main/scala/cromwell/backend/async/ExecutionHandle.scala @@ -15,8 +15,7 @@ sealed trait ExecutionHandle { def result: ExecutionResult } -final case class PendingExecutionHandle[BackendJobId <: JobId, BackendRunInfo, BackendRunState] -( +final case class PendingExecutionHandle[BackendJobId <: JobId, BackendRunInfo, BackendRunState]( jobDescriptor: BackendJobDescriptor, pendingJob: BackendJobId, runInfo: Option[BackendRunInfo], @@ -30,7 +29,8 @@ final case class SuccessfulExecutionHandle(outputs: CallOutputs, returnCode: Int, jobDetritusFiles: Map[String, Path], executionEvents: Seq[ExecutionEvent], - resultsClonedFrom: Option[BackendJobDescriptor] = None) extends ExecutionHandle { + resultsClonedFrom: Option[BackendJobDescriptor] = None +) extends ExecutionHandle { override val isDone = true override val result = SuccessfulExecution(outputs, returnCode, jobDetritusFiles, executionEvents, resultsClonedFrom) } @@ -41,7 +41,8 @@ sealed trait FailedExecutionHandle extends ExecutionHandle { final case class FailedNonRetryableExecutionHandle(throwable: Throwable, returnCode: Option[Int] = None, - override val kvPairsToSave: Option[Seq[KvPair]]) extends FailedExecutionHandle { + override val kvPairsToSave: Option[Seq[KvPair]] +) extends FailedExecutionHandle { override val isDone = true override val result = NonRetryableExecution(throwable, returnCode) @@ -49,7 +50,8 @@ final case class FailedNonRetryableExecutionHandle(throwable: Throwable, final case class FailedRetryableExecutionHandle(throwable: Throwable, returnCode: Option[Int] = None, - override val kvPairsToSave: Option[Seq[KvPair]]) extends FailedExecutionHandle { + override val kvPairsToSave: Option[Seq[KvPair]] +) extends FailedExecutionHandle { override val isDone = true override val result = RetryableExecution(throwable, returnCode) diff --git a/backend/src/main/scala/cromwell/backend/async/ExecutionResult.scala b/backend/src/main/scala/cromwell/backend/async/ExecutionResult.scala index fc8f95e049b..b3c4bcda692 100644 --- a/backend/src/main/scala/cromwell/backend/async/ExecutionResult.scala +++ b/backend/src/main/scala/cromwell/backend/async/ExecutionResult.scala @@ -16,7 +16,8 @@ final case class SuccessfulExecution(outputs: CallOutputs, returnCode: Int, jobDetritusFiles: Map[String, Path], executionEvents: Seq[ExecutionEvent], - resultsClonedFrom: Option[BackendJobDescriptor] = None) extends ExecutionResult + resultsClonedFrom: Option[BackendJobDescriptor] = None +) extends ExecutionResult /** * A user-requested abort of the command. diff --git a/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala b/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala index 8dd9aef3544..e106f06fcd2 100644 --- a/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala +++ b/backend/src/main/scala/cromwell/backend/async/KnownJobFailureException.scala @@ -9,61 +9,77 @@ abstract class KnownJobFailureException extends Exception { def stderrPath: Option[Path] } -final case class WrongReturnCode(jobTag: String, returnCode: Int, stderrPath: Option[Path]) extends KnownJobFailureException { - override def getMessage = s"Job $jobTag exited with return code $returnCode which has not been declared as a valid return code. See 'continueOnReturnCode' runtime attribute for more details." +final case class WrongReturnCode(jobTag: String, returnCode: Int, stderrPath: Option[Path]) + extends KnownJobFailureException { + override def getMessage = + s"Job $jobTag exited with return code $returnCode which has not been declared as a valid return code. See 'continueOnReturnCode' runtime attribute for more details." } -final case class ReturnCodeIsNotAnInt(jobTag: String, returnCode: String, stderrPath: Option[Path]) extends KnownJobFailureException { - override def getMessage = { +final case class ReturnCodeIsNotAnInt(jobTag: String, returnCode: String, stderrPath: Option[Path]) + extends KnownJobFailureException { + override def getMessage = if (returnCode.isEmpty) s"The return code file for job $jobTag was empty." else s"Job $jobTag exited with return code $returnCode which couldn't be converted to an Integer." - } } -final case class StderrNonEmpty(jobTag: String, stderrLength: Long, stderrPath: Option[Path]) extends KnownJobFailureException { - override def getMessage = s"stderr for job $jobTag has length $stderrLength and 'failOnStderr' runtime attribute was true." +final case class StderrNonEmpty(jobTag: String, stderrLength: Long, stderrPath: Option[Path]) + extends KnownJobFailureException { + override def getMessage = + s"stderr for job $jobTag has length $stderrLength and 'failOnStderr' runtime attribute was true." } final case class RetryWithMoreMemory(jobTag: String, stderrPath: Option[Path], memoryRetryErrorKeys: Option[List[String]], - logger: LoggingAdapter) extends KnownJobFailureException { + logger: LoggingAdapter +) extends KnownJobFailureException { val errorKeysAsString = memoryRetryErrorKeys match { case None => // this should not occur at this point as one would reach this error class only if Cromwell found one of the // `memory-retry-error-keys` in `stderr` of the task, which is only checked if the `memory-retry-error-keys` // are instantiated in Cromwell config - logger.error(s"Programmer error: found one of the `system.memory-retry-error-keys` in the `stderr` of task but " + - s"didn't find the error keys while generating the exception!") + logger.error( + s"Programmer error: found one of the `system.memory-retry-error-keys` in the `stderr` of task but " + + s"didn't find the error keys while generating the exception!" + ) "" case Some(keys) => keys.mkString(": [", ",", "]") } - override def getMessage = s"stderr for job `$jobTag` contained one of the `memory-retry-error-keys${errorKeysAsString}` specified in " + - s"the Cromwell config. Job might have run out of memory." + override def getMessage = + s"stderr for job `$jobTag` contained one of the `memory-retry-error-keys${errorKeysAsString}` specified in " + + s"the Cromwell config. Job might have run out of memory." } - object RuntimeAttributeValidationFailure { def apply(jobTag: String, runtimeAttributeName: String, - runtimeAttributeValue: Option[WomExpression]): RuntimeAttributeValidationFailure = RuntimeAttributeValidationFailure(jobTag, runtimeAttributeName, runtimeAttributeValue, None) + runtimeAttributeValue: Option[WomExpression] + ): RuntimeAttributeValidationFailure = + RuntimeAttributeValidationFailure(jobTag, runtimeAttributeName, runtimeAttributeValue, None) } final case class RuntimeAttributeValidationFailure private (jobTag: String, runtimeAttributeName: String, runtimeAttributeValue: Option[WomExpression], - stderrPath: Option[Path]) extends KnownJobFailureException { - override def getMessage = s"Task $jobTag has an invalid runtime attribute $runtimeAttributeName = ${runtimeAttributeValue map { _.evaluateValue(Map.empty, NoIoFunctionSet)} getOrElse "!! NOT FOUND !!"}" + stderrPath: Option[Path] +) extends KnownJobFailureException { + override def getMessage = + s"Task $jobTag has an invalid runtime attribute $runtimeAttributeName = ${runtimeAttributeValue map { + _.evaluateValue(Map.empty, NoIoFunctionSet) + } getOrElse "!! NOT FOUND !!"}" } -final case class RuntimeAttributeValidationFailures(throwables: List[RuntimeAttributeValidationFailure]) extends KnownJobFailureException with ThrowableAggregation { +final case class RuntimeAttributeValidationFailures(throwables: List[RuntimeAttributeValidationFailure]) + extends KnownJobFailureException + with ThrowableAggregation { override def exceptionContext = "Runtime validation failed" override val stderrPath: Option[Path] = None } -final case class JobAlreadyFailedInJobStore(jobTag: String, originalErrorMessage: String) extends KnownJobFailureException { +final case class JobAlreadyFailedInJobStore(jobTag: String, originalErrorMessage: String) + extends KnownJobFailureException { override def stderrPath: Option[Path] = None override def getMessage = originalErrorMessage } diff --git a/backend/src/main/scala/cromwell/backend/async/package.scala b/backend/src/main/scala/cromwell/backend/async/package.scala index b496126ef11..1f7037e711e 100644 --- a/backend/src/main/scala/cromwell/backend/async/package.scala +++ b/backend/src/main/scala/cromwell/backend/async/package.scala @@ -2,7 +2,6 @@ package cromwell.backend import scala.concurrent.{ExecutionContext, Future} - package object async { implicit class EnhancedFutureFuture[A](val ffa: Future[Future[A]])(implicit ec: ExecutionContext) { def flatten: Future[A] = ffa flatMap identity diff --git a/backend/src/main/scala/cromwell/backend/backend.scala b/backend/src/main/scala/cromwell/backend/backend.scala index 33d16f34b5a..ab7cdf602b6 100644 --- a/backend/src/main/scala/cromwell/backend/backend.scala +++ b/backend/src/main/scala/cromwell/backend/backend.scala @@ -26,9 +26,7 @@ import scala.util.Try /** * For uniquely identifying a job which has been or will be sent to the backend. */ -case class BackendJobDescriptorKey(call: CommandCallNode, - index: Option[Int], - attempt: Int) extends CallKey { +case class BackendJobDescriptorKey(call: CommandCallNode, index: Option[Int], attempt: Int) extends CallKey { def node = call private val indexString = index map { _.toString } getOrElse "NA" lazy val tag = s"${call.fullyQualifiedName}:$indexString:$attempt" @@ -44,15 +42,19 @@ final case class BackendJobDescriptor(workflowDescriptor: BackendWorkflowDescrip evaluatedTaskInputs: WomEvaluatedCallInputs, maybeCallCachingEligible: MaybeCallCachingEligible, dockerSize: Option[DockerSize], - prefetchedKvStoreEntries: Map[String, KvResponse]) { + prefetchedKvStoreEntries: Map[String, KvResponse] +) { val fullyQualifiedInputs: Map[String, WomValue] = evaluatedTaskInputs map { case (declaration, value) => key.call.identifier.combine(declaration.name).fullyQualifiedName.value -> value } - def findInputFilesByParameterMeta(filter: MetaValueElement => Boolean): Set[WomFile] = evaluatedTaskInputs.collect { - case (declaration, value) if declaration.parameterMeta.exists(filter) => findFiles(value) - }.flatten.toSet + def findInputFilesByParameterMeta(filter: MetaValueElement => Boolean): Set[WomFile] = evaluatedTaskInputs + .collect { + case (declaration, value) if declaration.parameterMeta.exists(filter) => findFiles(value) + } + .flatten + .toSet def findFiles(v: WomValue): Set[WomFile] = v match { case value: WomFile => Set(value) @@ -79,7 +81,8 @@ case class BackendWorkflowDescriptor(id: WorkflowId, customLabels: Labels, hogGroup: HogGroup, breadCrumbs: List[BackendJobBreadCrumb], - outputRuntimeExtractor: Option[WomOutputRuntimeExtractor]) { + outputRuntimeExtractor: Option[WomOutputRuntimeExtractor] +) { val rootWorkflow = breadCrumbs.headOption.map(_.callable).getOrElse(callable) val possiblyNotRootWorkflowId = id.toPossiblyNotRoot @@ -100,33 +103,31 @@ case class BackendConfigurationDescriptor(backendConfig: Config, globalConfig: C Option(backendConfig.getConfig("default-runtime-attributes")) else None - + // So it can be overridden in tests - private [backend] lazy val cromwellFileSystems = CromwellFileSystems.instance + private[backend] lazy val cromwellFileSystems = CromwellFileSystems.instance - lazy val configuredPathBuilderFactories: Map[String, PathBuilderFactory] = { + lazy val configuredPathBuilderFactories: Map[String, PathBuilderFactory] = cromwellFileSystems.factoriesFromConfig(backendConfig).unsafe("Failed to instantiate backend filesystem") - } - private lazy val configuredFactoriesWithDefault = if (configuredPathBuilderFactories.values.exists(_ == DefaultPathBuilderFactory)) { - configuredPathBuilderFactories - } else configuredPathBuilderFactories + DefaultPathBuilderFactory.tuple + private lazy val configuredFactoriesWithDefault = + if (configuredPathBuilderFactories.values.exists(_ == DefaultPathBuilderFactory)) { + configuredPathBuilderFactories + } else configuredPathBuilderFactories + DefaultPathBuilderFactory.tuple /** * Creates path builders using only the configured factories. */ - def pathBuilders(workflowOptions: WorkflowOptions)(implicit as: ActorSystem) = { + def pathBuilders(workflowOptions: WorkflowOptions)(implicit as: ActorSystem) = PathBuilderFactory.instantiatePathBuilders(configuredPathBuilderFactories.values.toList, workflowOptions) - } /** * Creates path builders using only the configured factories + the default factory */ - def pathBuildersWithDefault(workflowOptions: WorkflowOptions)(implicit as: ActorSystem) = { + def pathBuildersWithDefault(workflowOptions: WorkflowOptions)(implicit as: ActorSystem) = PathBuilderFactory.instantiatePathBuilders(configuredFactoriesWithDefault.values.toList, workflowOptions) - } - lazy val slowJobWarningAfter = backendConfig.as[Option[FiniteDuration]](path="slow-job-warning-time") + lazy val slowJobWarningAfter = backendConfig.as[Option[FiniteDuration]](path = "slow-job-warning-time") } object CommonBackendConfigurationAttributes { @@ -147,7 +148,7 @@ object CommonBackendConfigurationAttributes { "dockerhub.token", "dockerhub.auth", "dockerhub.key-name", - "name-for-call-caching-purposes", + "name-for-call-caching-purposes" ) } diff --git a/backend/src/main/scala/cromwell/backend/dummy/DummyAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/dummy/DummyAsyncExecutionActor.scala index 5c491c5a741..b84843fea5e 100644 --- a/backend/src/main/scala/cromwell/backend/dummy/DummyAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/dummy/DummyAsyncExecutionActor.scala @@ -9,7 +9,12 @@ import cats.implicits._ import common.exception.AggregatedMessageException import common.validation.ErrorOr.ErrorOr import cromwell.backend.BackendJobLifecycleActor -import cromwell.backend.async.{ExecutionHandle, FailedNonRetryableExecutionHandle, PendingExecutionHandle, SuccessfulExecutionHandle} +import cromwell.backend.async.{ + ExecutionHandle, + FailedNonRetryableExecutionHandle, + PendingExecutionHandle, + SuccessfulExecutionHandle +} import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} import cromwell.core.CallOutputs import cromwell.core.retry.SimpleExponentialBackoff @@ -22,12 +27,13 @@ import scala.concurrent.Future import scala.concurrent.duration._ class DummyAsyncExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackendJobLifecycleActor + extends BackendJobLifecycleActor with StandardAsyncExecutionActor with CromwellInstrumentation { /** The type of the run info when a job is started. */ override type StandardAsyncRunInfo = String + /** The type of the run status returned during each poll. */ override type StandardAsyncRunState = String @@ -46,14 +52,17 @@ class DummyAsyncExecutionActor(override val standardParams: StandardAsyncExecuti override def dockerImageUsed: Option[String] = None - override def pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(initialInterval = 1.second, maxInterval = 300.seconds, multiplier = 1.1) + override def pollBackOff: SimpleExponentialBackoff = + SimpleExponentialBackoff(initialInterval = 1.second, maxInterval = 300.seconds, multiplier = 1.1) - override def executeOrRecoverBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(initialInterval = 1.second, maxInterval = 300.seconds, multiplier = 1.1) + override def executeOrRecoverBackOff: SimpleExponentialBackoff = + SimpleExponentialBackoff(initialInterval = 1.second, maxInterval = 300.seconds, multiplier = 1.1) override val logJobIds: Boolean = false val singletonActor = standardParams.backendSingletonActorOption.getOrElse( - throw new RuntimeException("Dummy Backend actor cannot exist without its singleton actor")) + throw new RuntimeException("Dummy Backend actor cannot exist without its singleton actor") + ) var finishTime: Option[OffsetDateTime] = None @@ -71,46 +80,54 @@ class DummyAsyncExecutionActor(override val standardParams: StandardAsyncExecuti ) } - override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[String] = { + override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[String] = finishTime match { - case Some(ft) if (ft.isBefore(OffsetDateTime.now)) => Future.successful("done") + case Some(ft) if ft.isBefore(OffsetDateTime.now) => Future.successful("done") case Some(_) => Future.successful("running") - case None => Future.failed(new Exception("Dummy backend polling for status before finishTime is established(!!?)")) + case None => + Future.failed(new Exception("Dummy backend polling for status before finishTime is established(!!?)")) } - } - - override def handlePollSuccess(oldHandle: StandardAsyncPendingExecutionHandle, state: String): Future[ExecutionHandle] = { - + override def handlePollSuccess(oldHandle: StandardAsyncPendingExecutionHandle, + state: String + ): Future[ExecutionHandle] = if (state == "done") { increment(NonEmptyList("jobs", List("dummy", "executing", "done"))) singletonActor ! DummySingletonActor.MinusOne - val outputsValidation: ErrorOr[Map[OutputPort, WomValue]] = jobDescriptor.taskCall.outputPorts.toList.traverse { - case expressionBasedOutputPort: ExpressionBasedOutputPort => - expressionBasedOutputPort.expression.evaluateValue(Map.empty, NoIoFunctionSet).map(expressionBasedOutputPort -> _) - case other => s"Unknown output port type for Dummy backend output evaluator: ${other.getClass.getSimpleName}".invalidNel - }.map(_.toMap) + val outputsValidation: ErrorOr[Map[OutputPort, WomValue]] = jobDescriptor.taskCall.outputPorts.toList + .traverse { + case expressionBasedOutputPort: ExpressionBasedOutputPort => + expressionBasedOutputPort.expression + .evaluateValue(Map.empty, NoIoFunctionSet) + .map(expressionBasedOutputPort -> _) + case other => + s"Unknown output port type for Dummy backend output evaluator: ${other.getClass.getSimpleName}".invalidNel + } + .map(_.toMap) outputsValidation match { case Valid(outputs) => - Future.successful(SuccessfulExecutionHandle( - outputs = CallOutputs(outputs.toMap), - returnCode = 0, - jobDetritusFiles = Map.empty, - executionEvents = Seq.empty, - resultsClonedFrom = None - )) + Future.successful( + SuccessfulExecutionHandle( + outputs = CallOutputs(outputs.toMap), + returnCode = 0, + jobDetritusFiles = Map.empty, + executionEvents = Seq.empty, + resultsClonedFrom = None + ) + ) case Invalid(errors) => - Future.successful(FailedNonRetryableExecutionHandle( - throwable = AggregatedMessageException("Evaluate outputs from dummy job", errors.toList), - returnCode = None, - kvPairsToSave = None - )) + Future.successful( + FailedNonRetryableExecutionHandle( + throwable = AggregatedMessageException("Evaluate outputs from dummy job", errors.toList), + returnCode = None, + kvPairsToSave = None + ) + ) } - } - else if (state == "running") { + } else if (state == "running") { Future.successful( PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState]( jobDescriptor = jobDescriptor, @@ -119,9 +136,7 @@ class DummyAsyncExecutionActor(override val standardParams: StandardAsyncExecuti previousState = Option(state) ) ) - } - else { + } else { Future.failed(new Exception(s"Unexpected Dummy state in handlePollSuccess: $state")) } - } } diff --git a/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala b/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala index 34fd2e8bc51..53a310d1dd8 100644 --- a/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala +++ b/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala @@ -2,16 +2,22 @@ package cromwell.backend.dummy import cats.syntax.validated._ import common.validation.ErrorOr.ErrorOr -import cromwell.backend.standard.{StandardInitializationActor, StandardInitializationActorParams, StandardValidatedRuntimeAttributesBuilder} +import cromwell.backend.standard.{ + StandardInitializationActor, + StandardInitializationActorParams, + StandardValidatedRuntimeAttributesBuilder +} import cromwell.backend.validation.RuntimeAttributesValidation import wom.expression.WomExpression import wom.types.{WomStringType, WomType} import wom.values.{WomString, WomValue} class DummyInitializationActor(pipelinesParams: StandardInitializationActorParams) - extends StandardInitializationActor(pipelinesParams) { + extends StandardInitializationActor(pipelinesParams) { - override protected lazy val runtimeAttributeValidators: Map[String, Option[WomExpression] => Boolean] = Map("backend" -> { _ => true } ) + override protected lazy val runtimeAttributeValidators: Map[String, Option[WomExpression] => Boolean] = Map( + "backend" -> { _ => true } + ) // Specific validator for "backend" to let me specify it in test cases (to avoid accidentally submitting the workflow to real backends!) val backendAttributeValidation: RuntimeAttributesValidation[String] = new RuntimeAttributesValidation[String] { @@ -25,5 +31,6 @@ class DummyInitializationActor(pipelinesParams: StandardInitializationActorParam } } - override def runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder = super.runtimeAttributesBuilder.withValidation(backendAttributeValidation) + override def runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder = + super.runtimeAttributesBuilder.withValidation(backendAttributeValidation) } diff --git a/backend/src/main/scala/cromwell/backend/dummy/DummyLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/dummy/DummyLifecycleActorFactory.scala index ee959513a36..492de4970a9 100644 --- a/backend/src/main/scala/cromwell/backend/dummy/DummyLifecycleActorFactory.scala +++ b/backend/src/main/scala/cromwell/backend/dummy/DummyLifecycleActorFactory.scala @@ -3,9 +3,15 @@ package cromwell.backend.dummy import akka.actor.{ActorRef, Props} import cromwell.backend.BackendConfigurationDescriptor import cromwell.backend.standard.callcaching.{StandardCacheHitCopyingActor, StandardFileHashingActor} -import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardInitializationActor, StandardLifecycleActorFactory} +import cromwell.backend.standard.{ + StandardAsyncExecutionActor, + StandardInitializationActor, + StandardLifecycleActorFactory +} -class DummyLifecycleActorFactory(override val name: String, override val configurationDescriptor: BackendConfigurationDescriptor) extends StandardLifecycleActorFactory { +class DummyLifecycleActorFactory(override val name: String, + override val configurationDescriptor: BackendConfigurationDescriptor +) extends StandardLifecycleActorFactory { /** * @return the key to use for storing and looking up the job id. @@ -23,8 +29,11 @@ class DummyLifecycleActorFactory(override val name: String, override val configu // Don't hash files override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = None - override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = Option(Props(new DummySingletonActor())) + override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = Option( + Props(new DummySingletonActor()) + ) - override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = classOf[DummyInitializationActor] + override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = + classOf[DummyInitializationActor] } diff --git a/backend/src/main/scala/cromwell/backend/dummy/DummySingletonActor.scala b/backend/src/main/scala/cromwell/backend/dummy/DummySingletonActor.scala index 35c689ae2b4..4ea2cf7951c 100644 --- a/backend/src/main/scala/cromwell/backend/dummy/DummySingletonActor.scala +++ b/backend/src/main/scala/cromwell/backend/dummy/DummySingletonActor.scala @@ -22,7 +22,7 @@ final class DummySingletonActor() extends Actor with StrictLogging { case PlusOne => count = count + 1 case MinusOne => count = count - 1 case PrintCount => - if(countHistory.lastOption.exists(_._2 != count)) { + if (countHistory.lastOption.exists(_._2 != count)) { countHistory = countHistory :+ (OffsetDateTime.now() -> count) logger.info("The current count is now: " + count) if (count == 0) { @@ -52,7 +52,7 @@ final class DummySingletonActor() extends Actor with StrictLogging { bw.close() } - context.system.scheduler.schedule(10.seconds, 1.second) { self ! PrintCount } + context.system.scheduler.schedule(10.seconds, 1.second)(self ! PrintCount) } object DummySingletonActor { @@ -60,4 +60,3 @@ object DummySingletonActor { case object MinusOne case object PrintCount } - diff --git a/backend/src/main/scala/cromwell/backend/io/DirectoryFunctions.scala b/backend/src/main/scala/cromwell/backend/io/DirectoryFunctions.scala index 7b68e2bf723..84d45d1ee61 100644 --- a/backend/src/main/scala/cromwell/backend/io/DirectoryFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/io/DirectoryFunctions.scala @@ -11,7 +11,14 @@ import cromwell.core.path.{Path, PathFactory} import wom.expression.IoFunctionSet.{IoDirectory, IoElement, IoFile} import wom.expression.{IoFunctionSet, IoFunctionSetAdapter} import wom.graph.CommandCallNode -import wom.values.{WomFile, WomGlobFile, WomMaybeListedDirectory, WomMaybePopulatedFile, WomSingleFile, WomUnlistedDirectory} +import wom.values.{ + WomFile, + WomGlobFile, + WomMaybeListedDirectory, + WomMaybePopulatedFile, + WomSingleFile, + WomUnlistedDirectory +} import scala.concurrent.Future import scala.util.Try @@ -21,13 +28,18 @@ trait DirectoryFunctions extends IoFunctionSet with PathFactory with AsyncIoFunc private lazy val evaluateFileFunctions = new IoFunctionSetAdapter(this) with FileEvaluationIoFunctionSet def findDirectoryOutputs(call: CommandCallNode, - jobDescriptor: BackendJobDescriptor): ErrorOr[List[WomUnlistedDirectory]] = { + jobDescriptor: BackendJobDescriptor + ): ErrorOr[List[WomUnlistedDirectory]] = call.callable.outputs.flatTraverse[ErrorOr, WomUnlistedDirectory] { outputDefinition => - outputDefinition.expression.evaluateFiles(jobDescriptor.localInputs, evaluateFileFunctions, outputDefinition.womType) map { - _.toList.flatMap(_.file.flattenFiles) collect { case unlistedDirectory: WomUnlistedDirectory => unlistedDirectory } + outputDefinition.expression.evaluateFiles(jobDescriptor.localInputs, + evaluateFileFunctions, + outputDefinition.womType + ) map { + _.toList.flatMap(_.file.flattenFiles) collect { case unlistedDirectory: WomUnlistedDirectory => + unlistedDirectory + } } } - } override def isDirectory(path: String) = asyncIo.isDirectory(buildPath(path)) @@ -39,7 +51,7 @@ trait DirectoryFunctions extends IoFunctionSet with PathFactory with AsyncIoFunc * implementation which lists files and directories children. What we need is the unix behavior, even for cloud filesystems. * 3) It uses the isDirectory function directly on the path, which cannot be trusted for GCS paths. It should use asyncIo.isDirectory instead. */ - override def listDirectory(path: String)(visited: Vector[String] = Vector.empty): Future[Iterator[IoElement]] = { + override def listDirectory(path: String)(visited: Vector[String] = Vector.empty): Future[Iterator[IoElement]] = Future.fromTry(Try { val visitedPaths = visited.map(buildPath) val cromwellPath = buildPath(path.ensureSlashed) @@ -47,21 +59,21 @@ trait DirectoryFunctions extends IoFunctionSet with PathFactory with AsyncIoFunc // To prevent infinite recursion through symbolic links make sure we don't visit the same directory twice def hasBeenVisited(other: Path) = visitedPaths.exists(_.isSameFileAs(other)) - cromwellPath.list.collect({ - case directory if directory.isDirectory && - !cromwellPath.isSamePathAs(directory) && - !hasBeenVisited(directory) => IoDirectory(directory.pathAsString) + cromwellPath.list.collect { + case directory + if directory.isDirectory && + !cromwellPath.isSamePathAs(directory) && + !hasBeenVisited(directory) => + IoDirectory(directory.pathAsString) case file => IoFile(file.pathAsString) - }) + } }) - } - override def listAllFilesUnderDirectory(dirPath: String): Future[Seq[String]] = { + override def listAllFilesUnderDirectory(dirPath: String): Future[Seq[String]] = temporaryImplListPaths(dirPath) - } // TODO: WOM: WOMFILE: This will likely use a Tuple2(tar file, dir list file) for each dirPath. - private final def temporaryImplListPaths(dirPath: String): Future[Seq[String]] = { + final private def temporaryImplListPaths(dirPath: String): Future[Seq[String]] = { val errorOrPaths = for { dir <- validate(buildPath(dirPath.ensureSlashed)) files <- listFiles(dir) @@ -74,7 +86,7 @@ object DirectoryFunctions { def listFiles(path: Path): ErrorOr[List[Path]] = path.listRecursively.filterNot(_.isDirectory).toList.validNel def listWomSingleFiles(womFile: WomFile, pathFactory: PathFactory): ErrorOr[List[WomSingleFile]] = { - def listWomSingleFiles(womFile: WomFile): ErrorOr[List[WomSingleFile]] = { + def listWomSingleFiles(womFile: WomFile): ErrorOr[List[WomSingleFile]] = womFile match { case womSingleFile: WomSingleFile => List(womSingleFile).valid @@ -99,7 +111,6 @@ object DirectoryFunctions { case _: WomGlobFile => s"Unexpected glob / unable to list glob files at this time: $womFile".invalidNel } - } listWomSingleFiles(womFile) } diff --git a/backend/src/main/scala/cromwell/backend/io/FileEvaluationIoFunctionSet.scala b/backend/src/main/scala/cromwell/backend/io/FileEvaluationIoFunctionSet.scala index f46ff9ce9e2..0408c590cb2 100644 --- a/backend/src/main/scala/cromwell/backend/io/FileEvaluationIoFunctionSet.scala +++ b/backend/src/main/scala/cromwell/backend/io/FileEvaluationIoFunctionSet.scala @@ -5,5 +5,6 @@ import wom.expression.IoFunctionSet import scala.concurrent.Future trait FileEvaluationIoFunctionSet { this: IoFunctionSet => - override def glob(pattern: String) = Future.failed(new IllegalStateException("Cannot perform globing while evaluating files")) + override def glob(pattern: String) = + Future.failed(new IllegalStateException("Cannot perform globing while evaluating files")) } diff --git a/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala b/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala index 680c073bf45..5ed3b4a30ff 100644 --- a/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/io/GlobFunctions.scala @@ -19,7 +19,10 @@ trait GlobFunctions extends IoFunctionSet with AsyncIoFunctions { def findGlobOutputs(call: CommandCallNode, jobDescriptor: BackendJobDescriptor): ErrorOr[List[WomGlobFile]] = { def fromOutputs = call.callable.outputs.flatTraverse[ErrorOr, WomGlobFile] { outputDefinition => - outputDefinition.expression.evaluateFiles(jobDescriptor.localInputs, evaluateFileFunctions, outputDefinition.womType) map { + outputDefinition.expression.evaluateFiles(jobDescriptor.localInputs, + evaluateFileFunctions, + outputDefinition.womType + ) map { _.toList.flatMap(_.file.flattenFiles) collect { case glob: WomGlobFile => glob } } } @@ -40,7 +43,7 @@ trait GlobFunctions extends IoFunctionSet with AsyncIoFunctions { val listFilePath = callContext.root.resolve(s"${globName(pattern)}.list") asyncIo.readLinesAsync(listFilePath.getSymlinkSafePath()) map { lines => lines.toList map { fileName => - (callContext.root / globPatternName / fileName).pathAsString + (callContext.root / globPatternName / fileName).pathAsString } } } diff --git a/backend/src/main/scala/cromwell/backend/io/JobPaths.scala b/backend/src/main/scala/cromwell/backend/io/JobPaths.scala index eb9e9ec31d7..05ad6a56dc1 100644 --- a/backend/src/main/scala/cromwell/backend/io/JobPaths.scala +++ b/backend/src/main/scala/cromwell/backend/io/JobPaths.scala @@ -54,16 +54,14 @@ trait JobPaths { /** * Return a host path corresponding to the specified container path. */ - def hostPathFromContainerPath(string: String): Path = { + def hostPathFromContainerPath(string: String): Path = // No container here, just return a Path of the absolute path to the file. callExecutionRoot.resolve(string.stripPrefix(rootWithSlash)) - } def hostPathFromContainerInputs(string: String): Path = // No container here, just return a Path of the absolute path to the file. callExecutionRoot.resolve(string.stripPrefix(rootWithSlash)) - def scriptFilename: String = "script" def dockerCidFilename: String = "docker_cid" diff --git a/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala b/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala index f6d8855f3fa..2fb81a44019 100644 --- a/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala +++ b/backend/src/main/scala/cromwell/backend/io/JobPathsWithDocker.scala @@ -6,18 +6,19 @@ import cromwell.backend.{BackendJobDescriptorKey, BackendWorkflowDescriptor} import cromwell.core.path.Path object JobPathsWithDocker { - def apply(jobKey: BackendJobDescriptorKey, - workflowDescriptor: BackendWorkflowDescriptor, - config: Config) = { + def apply(jobKey: BackendJobDescriptorKey, workflowDescriptor: BackendWorkflowDescriptor, config: Config) = { val workflowPaths = new WorkflowPathsWithDocker(workflowDescriptor, config, WorkflowPaths.DefaultPathBuilders) new JobPathsWithDocker(workflowPaths, jobKey) } } -case class JobPathsWithDocker private[io] (override val workflowPaths: WorkflowPathsWithDocker, jobKey: BackendJobDescriptorKey, override val isCallCacheCopyAttempt: Boolean = false) extends JobPaths { +case class JobPathsWithDocker private[io] (override val workflowPaths: WorkflowPathsWithDocker, + jobKey: BackendJobDescriptorKey, + override val isCallCacheCopyAttempt: Boolean = false +) extends JobPaths { import JobPaths._ - override lazy val callExecutionRoot = { callRoot.resolve("execution") } + override lazy val callExecutionRoot = callRoot.resolve("execution") override def isDocker: Boolean = true val callDockerRoot = callPathBuilder(workflowPaths.dockerWorkflowRoot, jobKey, isCallCacheCopyAttempt) val callExecutionDockerRoot = callDockerRoot.resolve("execution") @@ -29,34 +30,31 @@ case class JobPathsWithDocker private[io] (override val workflowPaths: WorkflowP override def isInExecution(string: String): Boolean = string.startsWith(callExecutionDockerRootWithSlash) - override def hostPathFromContainerPath(string: String): Path = { + override def hostPathFromContainerPath(string: String): Path = callExecutionRoot.resolve(string.stripPrefix(callExecutionDockerRootWithSlash)) - } - override def hostPathFromContainerInputs(string: String): Path = { val stripped = string.stripPrefix(callInputsDockerRootWithSlash) callInputsRoot.resolve(stripped) } - def toDockerPath(path: Path): Path = { + def toDockerPath(path: Path): Path = path.toAbsolutePath match { case p if p.startsWith(workflowPaths.dockerRoot) => p case p => /* For example: - * - * p = /abs/path/to/cromwell-executions/three-step/f00ba4/call-ps/stdout.txt - * localExecutionRoot = /abs/path/to/cromwell-executions - * subpath = three-step/f00ba4/call-ps/stdout.txt - * - * return value = /root/three-step/f00ba4/call-ps/stdout.txt - * - * TODO: this assumes that p.startsWith(localExecutionRoot) - */ + * + * p = /abs/path/to/cromwell-executions/three-step/f00ba4/call-ps/stdout.txt + * localExecutionRoot = /abs/path/to/cromwell-executions + * subpath = three-step/f00ba4/call-ps/stdout.txt + * + * return value = /root/three-step/f00ba4/call-ps/stdout.txt + * + * TODO: this assumes that p.startsWith(localExecutionRoot) + */ val subpath = p.subpath(workflowPaths.executionRoot.getNameCount, p.getNameCount) workflowPaths.dockerRoot.resolve(subpath) } - } override def forCallCacheCopyAttempts: JobPaths = this.copy(isCallCacheCopyAttempt = true) } diff --git a/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala b/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala index 8c233717898..228285dbd5d 100644 --- a/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala +++ b/backend/src/main/scala/cromwell/backend/io/WorkflowPaths.scala @@ -20,7 +20,8 @@ trait WorkflowPaths extends PathFactory { /** * Path (as a String) of the root directory Cromwell should use for ALL workflows. */ - protected lazy val executionRootString: String = config.as[Option[String]]("root").getOrElse(WorkflowPaths.DefaultExecutionRootString) + protected lazy val executionRootString: String = + config.as[Option[String]]("root").getOrElse(WorkflowPaths.DefaultExecutionRootString) /** * Implementers of this trait might override this to provide an appropriate prefix corresponding to the execution root @@ -51,18 +52,17 @@ trait WorkflowPaths extends PathFactory { def getPath(url: String): Try[Path] = Try(buildPath(url)) // Rebuild potential intermediate call directories in case of a sub workflow - protected def workflowPathBuilder(root: Path): Path = { - workflowDescriptor.breadCrumbs.foldLeft(root)((acc, breadCrumb) => { - breadCrumb.toPath(acc) - }).resolve(workflowDescriptor.callable.name).resolve(workflowDescriptor.id.toString + "/") - } + protected def workflowPathBuilder(root: Path): Path = + workflowDescriptor.breadCrumbs + .foldLeft(root)((acc, breadCrumb) => breadCrumb.toPath(acc)) + .resolve(workflowDescriptor.callable.name) + .resolve(workflowDescriptor.id.toString + "/") lazy val finalCallLogsPath: Option[Path] = workflowDescriptor.getWorkflowOption(FinalCallLogsDir) map getPath map { _.get } - def toJobPaths(jobDescriptor: BackendJobDescriptor): JobPaths = { + def toJobPaths(jobDescriptor: BackendJobDescriptor): JobPaths = toJobPaths(jobDescriptor.key, jobDescriptor.workflowDescriptor) - } /** * Creates job paths using the key and workflow descriptor. @@ -73,11 +73,10 @@ trait WorkflowPaths extends PathFactory { * @param jobWorkflowDescriptor The workflow descriptor for the job. * @return The paths for the job. */ - def toJobPaths(jobKey: BackendJobDescriptorKey, jobWorkflowDescriptor: BackendWorkflowDescriptor): JobPaths = { + def toJobPaths(jobKey: BackendJobDescriptorKey, jobWorkflowDescriptor: BackendWorkflowDescriptor): JobPaths = // If the descriptors are the same, no need to create a new WorkflowPaths if (workflowDescriptor == jobWorkflowDescriptor) toJobPaths(this, jobKey) else toJobPaths(withDescriptor(jobWorkflowDescriptor), jobKey) - } protected def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): JobPaths diff --git a/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala b/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala index 78dc1ed77e3..38ea17fe61c 100644 --- a/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala +++ b/backend/src/main/scala/cromwell/backend/io/WorkflowPathsWithDocker.scala @@ -9,16 +9,19 @@ object WorkflowPathsWithDocker { val DefaultDockerRoot = "/cromwell-executions" } -final case class WorkflowPathsWithDocker(workflowDescriptor: BackendWorkflowDescriptor, config: Config, pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) extends WorkflowPaths { +final case class WorkflowPathsWithDocker(workflowDescriptor: BackendWorkflowDescriptor, + config: Config, + pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders +) extends WorkflowPaths { val dockerRoot: Path = DefaultPathBuilder.get( config.getOrElse[String]("dockerRoot", WorkflowPathsWithDocker.DefaultDockerRoot) ) val dockerWorkflowRoot: Path = workflowPathBuilder(dockerRoot) - override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): JobPathsWithDocker = { + override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): JobPathsWithDocker = new JobPathsWithDocker(workflowPaths.asInstanceOf[WorkflowPathsWithDocker], jobKey, isCallCacheCopyAttempt = false) - } - override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor) + override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = + this.copy(workflowDescriptor = workflowDescriptor) } diff --git a/backend/src/main/scala/cromwell/backend/package.scala b/backend/src/main/scala/cromwell/backend/package.scala index 132fcf57887..ca58701641f 100644 --- a/backend/src/main/scala/cromwell/backend/package.scala +++ b/backend/src/main/scala/cromwell/backend/package.scala @@ -1,6 +1,7 @@ package cromwell package object backend { + /** Represents the jobKeys executed by a (potentially sub-) workflow at a given point in time */ type JobExecutionMap = Map[BackendWorkflowDescriptor, List[BackendJobDescriptorKey]] } diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index 3b116ae1b99..00b5f37513b 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -11,7 +11,11 @@ import common.util.TryUtil import common.validation.ErrorOr.{ErrorOr, ShortCircuitingFlatMap} import common.validation.IOChecked._ import common.validation.Validation._ -import cromwell.backend.BackendJobExecutionActor.{BackendJobExecutionResponse, JobAbortedResponse, JobReconnectionNotSupportedException} +import cromwell.backend.BackendJobExecutionActor.{ + BackendJobExecutionResponse, + JobAbortedResponse, + JobReconnectionNotSupportedException +} import cromwell.backend.BackendLifecycleActor.AbortJobCommand import cromwell.backend.BackendLifecycleActorFactory.{FailedRetryCountKey, MemoryMultiplierKey} import cromwell.backend.OutputEvaluator._ @@ -44,12 +48,12 @@ import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} trait StandardAsyncExecutionActorParams extends StandardJobExecutionActorParams { + /** The promise that will be completed when the async run is complete. */ def completionPromise: Promise[BackendJobExecutionResponse] } -case class DefaultStandardAsyncExecutionActorParams -( +case class DefaultStandardAsyncExecutionActorParams( override val jobIdKey: String, override val serviceRegistryActor: ActorRef, override val ioActor: ActorRef, @@ -76,7 +80,11 @@ case class ScriptPreambleData(bashString: String, executeInSubshell: Boolean = t * as the common behavior among the backends adjusts in unison. */ trait StandardAsyncExecutionActor - extends AsyncBackendJobExecutionActor with StandardCachingActorHelper with AsyncIoActorClient with KvClient with SlowJobWarning { + extends AsyncBackendJobExecutionActor + with StandardCachingActorHelper + with AsyncIoActorClient + with KvClient + with SlowJobWarning { this: Actor with ActorLogging with BackendJobLifecycleActor => override lazy val ioCommandBuilder: IoCommandBuilder = DefaultIoCommandBuilder @@ -100,7 +108,8 @@ trait StandardAsyncExecutionActor def statusEquivalentTo(thiz: StandardAsyncRunState)(that: StandardAsyncRunState): Boolean /** The pending execution handle for each poll. */ - type StandardAsyncPendingExecutionHandle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState] + type StandardAsyncPendingExecutionHandle = + PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState] /** Standard set of parameters passed to the backend. */ def standardParams: StandardAsyncExecutionActorParams @@ -150,39 +159,36 @@ trait StandardAsyncExecutionActor protected def cloudResolveWomFile(womFile: WomFile): WomFile = womFile /** Process files while resolving files that will not be localized to their actual cloud locations. */ - private def mapOrCloudResolve(mapper: WomFile => WomFile): WomValue => Try[WomValue] = { - WomFileMapper.mapWomFiles( - womFile => - if (inputsToNotLocalize.contains(womFile)) { - cloudResolveWomFile(womFile) - } else { - mapper(womFile) - } + private def mapOrCloudResolve(mapper: WomFile => WomFile): WomValue => Try[WomValue] = + WomFileMapper.mapWomFiles(womFile => + if (inputsToNotLocalize.contains(womFile)) { + cloudResolveWomFile(womFile) + } else { + mapper(womFile) + } ) - } /** Process files while ignoring files that will not be localized. */ - private def mapOrNoResolve(mapper: WomFile => WomFile): WomValue => Try[WomValue] = { - WomFileMapper.mapWomFiles( - womFile => - if (inputsToNotLocalize.contains(womFile)) { - womFile - } else { - mapper(womFile) - } + private def mapOrNoResolve(mapper: WomFile => WomFile): WomValue => Try[WomValue] = + WomFileMapper.mapWomFiles(womFile => + if (inputsToNotLocalize.contains(womFile)) { + womFile + } else { + mapper(womFile) + } ) - } /** @see [[Command.instantiate]] */ final def commandLinePreProcessor(inputs: WomEvaluatedCallInputs): Try[WomEvaluatedCallInputs] = { val map = inputs map { case (k, v) => k -> mapOrCloudResolve(preProcessWomFile)(v) } - TryUtil.sequenceMap(map). - recoverWith { - case e => Failure(new IOException(e.getMessage) with CromwellFatalExceptionMarker) - } + TryUtil.sequenceMap(map).recoverWith { case e => + Failure(new IOException(e.getMessage) with CromwellFatalExceptionMarker) + } } - final lazy val localizedInputs: Try[WomEvaluatedCallInputs] = commandLinePreProcessor(jobDescriptor.evaluatedTaskInputs) + final lazy val localizedInputs: Try[WomEvaluatedCallInputs] = commandLinePreProcessor( + jobDescriptor.evaluatedTaskInputs + ) /** * Maps WomFile to a local path, for use in the commandLineValueMapper. @@ -205,47 +211,58 @@ trait StandardAsyncExecutionActor def inputsToNotLocalize: Set[WomFile] = Set.empty /** @see [[Command.instantiate]] */ - final lazy val commandLineValueMapper: WomValue => WomValue = { - womValue => mapOrNoResolve(mapCommandLineWomFile)(womValue).get + final lazy val commandLineValueMapper: WomValue => WomValue = { womValue => + mapOrNoResolve(mapCommandLineWomFile)(womValue).get } /** @see [[Command.instantiate]] */ - final lazy val commandLineJobInputValueMapper: WomValue => WomValue = { - womValue => mapOrNoResolve(mapCommandLineJobInputWomFile)(womValue).get + final lazy val commandLineJobInputValueMapper: WomValue => WomValue = { womValue => + mapOrNoResolve(mapCommandLineJobInputWomFile)(womValue).get } - lazy val jobShell: String = configurationDescriptor.backendConfig.getOrElse("job-shell", - configurationDescriptor.globalConfig.getOrElse("system.job-shell", "/bin/bash")) + lazy val jobShell: String = configurationDescriptor.backendConfig.getOrElse( + "job-shell", + configurationDescriptor.globalConfig.getOrElse("system.job-shell", "/bin/bash") + ) - lazy val abbreviateCommandLength: Int = configurationDescriptor.backendConfig.getOrElse("abbreviate-command-length", - configurationDescriptor.globalConfig.getOrElse("system.abbreviate-command-length", 0)) + lazy val abbreviateCommandLength: Int = configurationDescriptor.backendConfig.getOrElse( + "abbreviate-command-length", + configurationDescriptor.globalConfig.getOrElse("system.abbreviate-command-length", 0) + ) /** * The local path where the command will run. */ lazy val commandDirectory: Path = jobPaths.callExecutionRoot - lazy val memoryRetryErrorKeys: Option[List[String]] = configurationDescriptor.globalConfig.as[Option[List[String]]]("system.memory-retry-error-keys") + lazy val memoryRetryErrorKeys: Option[List[String]] = + configurationDescriptor.globalConfig.as[Option[List[String]]]("system.memory-retry-error-keys") - lazy val memoryRetryFactor: Option[MemoryRetryMultiplierRefined] = { + lazy val memoryRetryFactor: Option[MemoryRetryMultiplierRefined] = jobDescriptor.workflowDescriptor.getWorkflowOption(WorkflowOptions.MemoryRetryMultiplier) flatMap { value: String => Try(value.toDouble) match { - case Success(v) => refineV[MemoryRetryMultiplier](v.toDouble) match { - case Left(e) => - // should not happen, this case should have been screened for and fast-failed during workflow materialization. - log.error(e, s"Programmer error: unexpected failure attempting to read value for workflow option " + - s"'${WorkflowOptions.MemoryRetryMultiplier.name}'. Expected value should be in range 1.0 ≤ n ≤ 99.0") - None - case Right(refined) => Option(refined) - } + case Success(v) => + refineV[MemoryRetryMultiplier](v.toDouble) match { + case Left(e) => + // should not happen, this case should have been screened for and fast-failed during workflow materialization. + log.error( + e, + s"Programmer error: unexpected failure attempting to read value for workflow option " + + s"'${WorkflowOptions.MemoryRetryMultiplier.name}'. Expected value should be in range 1.0 ≤ n ≤ 99.0" + ) + None + case Right(refined) => Option(refined) + } case Failure(e) => // should not happen, this case should have been screened for and fast-failed during workflow materialization. - log.error(e, s"Programmer error: unexpected failure attempting to convert value for workflow option " + - s"'${WorkflowOptions.MemoryRetryMultiplier.name}' to Double.") + log.error( + e, + s"Programmer error: unexpected failure attempting to convert value for workflow option " + + s"'${WorkflowOptions.MemoryRetryMultiplier.name}' to Double." + ) None } } - } lazy val memoryRetryRequested: Boolean = memoryRetryFactor.nonEmpty @@ -305,7 +322,8 @@ trait StandardAsyncExecutionActor val globList = parentDirectory./(s"$globDir.list") val controlFileName = "cromwell_glob_control_file" val absoluteGlobValue = commandDirectory.resolve(globFile.value).pathAsString - val globLinkCommand: String = configurationDescriptor.backendConfig.getAs[String]("glob-link-command") + val globLinkCommand: String = configurationDescriptor.backendConfig + .getAs[String]("glob-link-command") .map("( " + _ + " )") .getOrElse("( ln -L GLOB_PATTERN GLOB_DIRECTORY 2> /dev/null ) || ( ln GLOB_PATTERN GLOB_DIRECTORY )") .replaceAll("GLOB_PATTERN", absoluteGlobValue) @@ -350,13 +368,14 @@ trait StandardAsyncExecutionActor // Absolutize any redirect and overridden paths. All of these files must have absolute paths since the command script // references them outside a (cd "execution dir"; ...) subshell. The default names are known to be relative paths, // the names from redirections may or may not be relative. - private def absolutizeContainerPath(path: String): String = { + private def absolutizeContainerPath(path: String): String = if (path.startsWith(cwd.pathAsString)) path else cwd.resolve(path).pathAsString - } def executionStdin: Option[String] = instantiatedCommand.evaluatedStdinRedirection map absolutizeContainerPath - def executionStdout: String = instantiatedCommand.evaluatedStdoutOverride.getOrElse(jobPaths.defaultStdoutFilename) |> absolutizeContainerPath - def executionStderr: String = instantiatedCommand.evaluatedStderrOverride.getOrElse(jobPaths.defaultStderrFilename) |> absolutizeContainerPath + def executionStdout: String = + instantiatedCommand.evaluatedStdoutOverride.getOrElse(jobPaths.defaultStdoutFilename) |> absolutizeContainerPath + def executionStderr: String = + instantiatedCommand.evaluatedStderrOverride.getOrElse(jobPaths.defaultStderrFilename) |> absolutizeContainerPath /* * Ensures the standard paths are correct w.r.t overridden paths. This is called in two places: when generating the command and @@ -408,9 +427,11 @@ trait StandardAsyncExecutionActor val errorOrGlobFiles: ErrorOr[List[WomGlobFile]] = backendEngineFunctions.findGlobOutputs(call, jobDescriptor) - lazy val environmentVariables = instantiatedCommand.environmentVariables map { case (k, v) => s"""export $k="$v"""" } mkString("", "\n", "\n") + lazy val environmentVariables = instantiatedCommand.environmentVariables map { case (k, v) => + s"""export $k="$v"""" + } mkString ("", "\n", "\n") - val home = jobDescriptor.taskCall.callable.homeOverride.map { _ (runtimeEnvironment) }.getOrElse("$HOME") + val home = jobDescriptor.taskCall.callable.homeOverride.map(_(runtimeEnvironment)).getOrElse("$HOME") val shortId = jobDescriptor.workflowDescriptor.id.shortString // Give the out and error FIFO variables names that are unlikely to conflict with anything the user is doing. val (out, err) = (s"out$shortId", s"err$shortId") @@ -422,72 +443,73 @@ trait StandardAsyncExecutionActor // Only adjust the temporary directory permissions if this is executing under Docker. val tmpDirPermissionsAdjustment = if (isDockerRun) s"""chmod 777 "$$tmpDir"""" else "" - val emptyDirectoryFillCommand: String = configurationDescriptor.backendConfig.getAs[String]("empty-dir-fill-command") - .getOrElse( - s"""( - |# add a .file in every empty directory to facilitate directory delocalization on the cloud - |cd $cwd - |find . -type d -exec sh -c '[ -z "$$(ls -A '"'"'{}'"'"')" ] && touch '"'"'{}'"'"'/.file' \\; - |)""".stripMargin) - - val errorOrPreamble: ErrorOr[String] = scriptPreamble.map{ preambleData => - preambleData.executeInSubshell match { - case true => - s""" - |( - |cd ${cwd.pathAsString} - |${preambleData.bashString} - |) - |""".stripMargin - case false => - s""" - |cd ${cwd.pathAsString} - |${preambleData.bashString} - |""".stripMargin - } + val emptyDirectoryFillCommand: String = configurationDescriptor.backendConfig + .getAs[String]("empty-dir-fill-command") + .getOrElse(s"""( + |# add a .file in every empty directory to facilitate directory delocalization on the cloud + |cd $cwd + |find . -type d -exec sh -c '[ -z "$$(ls -A '"'"'{}'"'"')" ] && touch '"'"'{}'"'"'/.file' \\; + |)""".stripMargin) + + val errorOrPreamble: ErrorOr[String] = scriptPreamble.map { preambleData => + preambleData.executeInSubshell match { + case true => + s""" + |( + |cd ${cwd.pathAsString} + |${preambleData.bashString} + |) + |""".stripMargin + case false => + s""" + |cd ${cwd.pathAsString} + |${preambleData.bashString} + |""".stripMargin + } } // The `tee` trickery below is to be able to redirect to known filenames for CWL while also streaming // stdout and stderr for PAPI to periodically upload to cloud storage. // https://stackoverflow.com/questions/692000/how-do-i-write-stderr-to-a-file-while-using-tee-with-a-pipe (errorOrDirectoryOutputs, errorOrGlobFiles, errorOrPreamble).mapN((directoryOutputs, globFiles, preamble) => - s"""|#!$jobShell - |DOCKER_OUTPUT_DIR_LINK - |cd ${cwd.pathAsString} - |tmpDir=$temporaryDirectory - |$tmpDirPermissionsAdjustment - |export _JAVA_OPTIONS=-Djava.io.tmpdir="$$tmpDir" - |export TMPDIR="$$tmpDir" - |export HOME="$home" - | - |SCRIPT_PREAMBLE - | - |$out="$${tmpDir}/out.$$$$" $err="$${tmpDir}/err.$$$$" - |mkfifo "$$$out" "$$$err" - |trap 'rm "$$$out" "$$$err"' EXIT - |touch $stdoutRedirection $stderrRedirection - |tee $stdoutRedirection < "$$$out" & - |tee $stderrRedirection < "$$$err" >&2 & - |( - |cd ${cwd.pathAsString} - |ENVIRONMENT_VARIABLES - |INSTANTIATED_COMMAND - |) $stdinRedirection > "$$$out" 2> "$$$err" - |echo $$? > $rcTmpPath - |$emptyDirectoryFillCommand - |( - |cd ${cwd.pathAsString} - |SCRIPT_EPILOGUE - |${globScripts(globFiles)} - |${directoryScripts(directoryOutputs)} - |) - |mv $rcTmpPath $rcPath - |""".stripMargin - .replace("SCRIPT_PREAMBLE", preamble) - .replace("ENVIRONMENT_VARIABLES", environmentVariables) - .replace("INSTANTIATED_COMMAND", commandString) - .replace("SCRIPT_EPILOGUE", scriptEpilogue) - .replace("DOCKER_OUTPUT_DIR_LINK", dockerOutputDir)) + s"""|#!$jobShell + |DOCKER_OUTPUT_DIR_LINK + |cd ${cwd.pathAsString} + |tmpDir=$temporaryDirectory + |$tmpDirPermissionsAdjustment + |export _JAVA_OPTIONS=-Djava.io.tmpdir="$$tmpDir" + |export TMPDIR="$$tmpDir" + |export HOME="$home" + | + |SCRIPT_PREAMBLE + | + |$out="$${tmpDir}/out.$$$$" $err="$${tmpDir}/err.$$$$" + |mkfifo "$$$out" "$$$err" + |trap 'rm "$$$out" "$$$err"' EXIT + |touch $stdoutRedirection $stderrRedirection + |tee $stdoutRedirection < "$$$out" & + |tee $stderrRedirection < "$$$err" >&2 & + |( + |cd ${cwd.pathAsString} + |ENVIRONMENT_VARIABLES + |INSTANTIATED_COMMAND + |) $stdinRedirection > "$$$out" 2> "$$$err" + |echo $$? > $rcTmpPath + |$emptyDirectoryFillCommand + |( + |cd ${cwd.pathAsString} + |SCRIPT_EPILOGUE + |${globScripts(globFiles)} + |${directoryScripts(directoryOutputs)} + |) + |mv $rcTmpPath $rcPath + |""".stripMargin + .replace("SCRIPT_PREAMBLE", preamble) + .replace("ENVIRONMENT_VARIABLES", environmentVariables) + .replace("INSTANTIATED_COMMAND", commandString) + .replace("SCRIPT_EPILOGUE", scriptEpilogue) + .replace("DOCKER_OUTPUT_DIR_LINK", dockerOutputDir) + ) } def runtimeEnvironmentPathMapper(env: RuntimeEnvironment): RuntimeEnvironment = { @@ -495,9 +517,10 @@ trait StandardAsyncExecutionActor env.copy(outputPath = env.outputPath |> localize, tempPath = env.tempPath |> localize) } - lazy val runtimeEnvironment: RuntimeEnvironment = { - RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, jobPaths)(standardParams.minimumRuntimeSettings) |> runtimeEnvironmentPathMapper - } + lazy val runtimeEnvironment: RuntimeEnvironment = + RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, jobPaths)( + standardParams.minimumRuntimeSettings + ) |> runtimeEnvironmentPathMapper /** * Turns WomFiles into relative paths. These paths are relative to the working disk. @@ -507,23 +530,21 @@ trait StandardAsyncExecutionActor * relativeLocalizationPath("gs://some/bucket/foo.txt") -> "some/bucket/foo.txt" * etc */ - protected def relativeLocalizationPath(file: WomFile): WomFile = { + protected def relativeLocalizationPath(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { case Success(path) => path.pathWithoutScheme case _ => value } ) - } - protected def fileName(file: WomFile): WomFile = { + protected def fileName(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { case Success(path) => path.name case _ => value } ) - } protected def localizationPath(f: CommandSetupSideEffectFile): WomFile = { val fileTransformer = if (isAdHocFile(f.file)) fileName _ else relativeLocalizationPath _ @@ -539,7 +560,6 @@ trait StandardAsyncExecutionActor * Maybe this should be the other way around: the default implementation is noop and SFS / TES override it ? */ lazy val localizeAdHocValues: List[AdHocValue] => ErrorOr[List[StandardAdHocValue]] = { adHocValues => - // Localize an adhoc file to the callExecutionRoot as needed val localize: (AdHocValue, Path) => Future[LocalizedAdHocValue] = { (adHocValue, file) => val actualName = adHocValue.alternativeName.getOrElse(file.name) @@ -547,17 +567,19 @@ trait StandardAsyncExecutionActor // First check that it's not already there under execution root asyncIo.existsAsync(finalPath) flatMap { // If it's not then copy it - case false => asyncIo.copyAsync(file, finalPath) as { LocalizedAdHocValue(adHocValue, finalPath) } + case false => asyncIo.copyAsync(file, finalPath) as LocalizedAdHocValue(adHocValue, finalPath) case true => Future.successful(LocalizedAdHocValue(adHocValue, finalPath)) } } - adHocValues.traverse[ErrorOr, (AdHocValue, Path)]({ adHocValue => - // Build an actionable Path from the ad hoc file - getPath(adHocValue.womValue.value).toErrorOr.map(adHocValue -> _) - }) + adHocValues + .traverse[ErrorOr, (AdHocValue, Path)] { adHocValue => + // Build an actionable Path from the ad hoc file + getPath(adHocValue.womValue.value).toErrorOr.map(adHocValue -> _) + } // Localize the values if necessary - .map(_.traverse[Future, LocalizedAdHocValue](localize.tupled)).toEither + .map(_.traverse[Future, LocalizedAdHocValue](localize.tupled)) + .toEither // TODO: Asynchronify // This is obviously sad but turning it into a Future has earth-shattering consequences, so synchronizing it for now .flatMap(future => Try(Await.result(future, 1.hour)).toChecked) @@ -577,37 +599,41 @@ trait StandardAsyncExecutionActor val callable = jobDescriptor.taskCall.callable /* - * Try to map the command line values. - * - * May not work as the commandLineValueMapper was originally meant to modify paths in the later stages of command - * line instantiation. However, due to [[AdHocValue]] support the command line instantiation itself currently needs - * to use the commandLineValue mapper. So the commandLineValueMapper is attempted first, and if that fails then - * returns the original womValue. - */ - def tryCommandLineValueMapper(womValue: WomValue): WomValue = { + * Try to map the command line values. + * + * May not work as the commandLineValueMapper was originally meant to modify paths in the later stages of command + * line instantiation. However, due to [[AdHocValue]] support the command line instantiation itself currently needs + * to use the commandLineValue mapper. So the commandLineValueMapper is attempted first, and if that fails then + * returns the original womValue. + */ + def tryCommandLineValueMapper(womValue: WomValue): WomValue = Try(commandLineJobInputValueMapper(womValue)).getOrElse(womValue) - } - val unmappedInputs: Map[String, WomValue] = jobDescriptor.evaluatedTaskInputs.map({ + val unmappedInputs: Map[String, WomValue] = jobDescriptor.evaluatedTaskInputs.map { case (inputDefinition, womValue) => inputDefinition.localName.value -> womValue - }) - - val mappedInputs: Checked[Map[String, WomValue]] = localizedInputs.toErrorOr.map( - _.map({ - case (inputDefinition, value) => inputDefinition.localName.value -> tryCommandLineValueMapper(value) - }) - ).toEither - - val evaluateAndInitialize = (containerizedInputExpression: ContainerizedInputExpression) => for { - mapped <- mappedInputs - evaluated <- containerizedInputExpression.evaluate(unmappedInputs, mapped, backendEngineFunctions).toChecked - initialized <- evaluated.traverse[IOChecked, AdHocValue]({ adHocValue => - adHocValue.womValue.initialize(backendEngineFunctions).map({ - case file: WomFile => adHocValue.copy(womValue = file) - case _ => adHocValue - }) - }).toChecked - } yield initialized + } + + val mappedInputs: Checked[Map[String, WomValue]] = localizedInputs.toErrorOr + .map( + _.map { case (inputDefinition, value) => + inputDefinition.localName.value -> tryCommandLineValueMapper(value) + } + ) + .toEither + + val evaluateAndInitialize = (containerizedInputExpression: ContainerizedInputExpression) => + for { + mapped <- mappedInputs + evaluated <- containerizedInputExpression.evaluate(unmappedInputs, mapped, backendEngineFunctions).toChecked + initialized <- evaluated + .traverse[IOChecked, AdHocValue] { adHocValue => + adHocValue.womValue.initialize(backendEngineFunctions).map { + case file: WomFile => adHocValue.copy(womValue = file) + case _ => adHocValue + } + } + .toChecked + } yield initialized callable.adHocFileCreation.toList .flatTraverse[ErrorOr, AdHocValue](evaluateAndInitialize.andThen(_.toValidated)) @@ -617,9 +643,10 @@ trait StandardAsyncExecutionActor .flatMap(localizeAdHocValues.andThen(_.toEither)) .toValidated - protected def asAdHocFile(womFile: WomFile): Option[AdHocValue] = evaluatedAdHocFiles map { _.find({ - case AdHocValue(file, _, _) => file.value == womFile.value - }) + protected def asAdHocFile(womFile: WomFile): Option[AdHocValue] = evaluatedAdHocFiles map { + _.find { case AdHocValue(file, _, _) => + file.value == womFile.value + } } getOrElse None protected def isAdHocFile(womFile: WomFile): Boolean = asAdHocFile(womFile).isDefined @@ -629,18 +656,22 @@ trait StandardAsyncExecutionActor val callable = jobDescriptor.taskCall.callable // Replace input files with the ad hoc updated version - def adHocFilePreProcessor(in: WomEvaluatedCallInputs): Try[WomEvaluatedCallInputs] = { + def adHocFilePreProcessor(in: WomEvaluatedCallInputs): Try[WomEvaluatedCallInputs] = localizedAdHocValues.toTry("Error evaluating ad hoc files") map { adHocFiles => - in map { - case (inputDefinition, originalWomValue) => - inputDefinition -> adHocFiles.collectFirst({ - case AsAdHocValue(AdHocValue(originalWomFile, _, Some(inputName))) if inputName == inputDefinition.localName.value => originalWomFile - case AsLocalizedAdHocValue(LocalizedAdHocValue(AdHocValue(originalWomFile, _, Some(inputName)), localizedPath)) if inputName == inputDefinition.localName.value => + in map { case (inputDefinition, originalWomValue) => + inputDefinition -> adHocFiles + .collectFirst { + case AsAdHocValue(AdHocValue(originalWomFile, _, Some(inputName))) + if inputName == inputDefinition.localName.value => + originalWomFile + case AsLocalizedAdHocValue( + LocalizedAdHocValue(AdHocValue(originalWomFile, _, Some(inputName)), localizedPath) + ) if inputName == inputDefinition.localName.value => originalWomFile.mapFile(_ => localizedPath.pathAsString) - }).getOrElse(originalWomValue) + } + .getOrElse(originalWomValue) } } - } // Gets the inputs that will be mutated by instantiating the command. val mutatingPreProcessor: WomEvaluatedCallInputs => Try[WomEvaluatedCallInputs] = { _ => @@ -658,12 +689,16 @@ trait StandardAsyncExecutionActor runtimeEnvironment ) - def makeStringKeyedMap(list: List[(LocalName, WomValue)]): Map[String, WomValue] = list.toMap map { case (k, v) => k.value -> v } + def makeStringKeyedMap(list: List[(LocalName, WomValue)]): Map[String, WomValue] = list.toMap map { case (k, v) => + k.value -> v + } val command = instantiatedCommandValidation flatMap { instantiatedCommand => val valueMappedPreprocessedInputs = instantiatedCommand.valueMappedPreprocessedInputs |> makeStringKeyedMap - val adHocFileCreationSideEffectFiles: ErrorOr[List[CommandSetupSideEffectFile]] = localizedAdHocValues map { _.map(adHocValueToCommandSetupSideEffectFile) } + val adHocFileCreationSideEffectFiles: ErrorOr[List[CommandSetupSideEffectFile]] = localizedAdHocValues map { + _.map(adHocValueToCommandSetupSideEffectFile) + } def evaluateEnvironmentExpression(nameAndExpression: (String, WomExpression)): ErrorOr[(String, String)] = { val (name, expression) = nameAndExpression @@ -674,16 +709,25 @@ trait StandardAsyncExecutionActor // Build a list of functions from a CommandTaskDefinition to an Option[WomExpression] representing a possible // redirection or override of the filename of a redirection. Evaluate that expression if present and stringify. - val List(stdinRedirect, stdoutOverride, stderrOverride) = List[CommandTaskDefinition => Option[WomExpression]]( - _.stdinRedirection, _.stdoutOverride, _.stderrOverride) map { - _.apply(callable).traverse[ErrorOr, String] { _.evaluateValue(valueMappedPreprocessedInputs, backendEngineFunctions) map { _.valueString} } - } + val List(stdinRedirect, stdoutOverride, stderrOverride) = + List[CommandTaskDefinition => Option[WomExpression]](_.stdinRedirection, + _.stdoutOverride, + _.stderrOverride + ) map { + _.apply(callable).traverse[ErrorOr, String] { + _.evaluateValue(valueMappedPreprocessedInputs, backendEngineFunctions) map { _.valueString } + } + } (adHocFileCreationSideEffectFiles, environmentVariables, stdinRedirect, stdoutOverride, stderrOverride) mapN { (adHocFiles, env, in, out, err) => instantiatedCommand.copy( - createdFiles = instantiatedCommand.createdFiles ++ adHocFiles, environmentVariables = env.toMap, - evaluatedStdinRedirection = in, evaluatedStdoutOverride = out, evaluatedStderrOverride = err) + createdFiles = instantiatedCommand.createdFiles ++ adHocFiles, + environmentVariables = env.toMap, + evaluatedStdinRedirection = in, + evaluatedStdoutOverride = out, + evaluatedStderrOverride = err + ) }: ErrorOr[InstantiatedCommand] } @@ -700,11 +744,10 @@ trait StandardAsyncExecutionActor * * If the `command` errors for some reason, put a "-1" into the rc file. */ - def redirectOutputs(command: String): String = { + def redirectOutputs(command: String): String = // > 128 is the cutoff for signal-induced process deaths such as might be observed with abort. // http://www.tldp.org/LDP/abs/html/exitcodes.html s"""$command < /dev/null || { rc=$$?; if [ "$$rc" -gt "128" ]; then echo $$rc; else echo -1; fi } > ${jobPaths.returnCode}""" - } /** A tag that may be used for logging. */ lazy val tag = s"${this.getClass.getSimpleName} [UUID(${workflowIdForLogging.shortString}):${jobDescriptor.key.tag}]" @@ -714,42 +757,47 @@ trait StandardAsyncExecutionActor * * @return True if a non-empty `remoteStdErrPath` should fail the job. */ - lazy val failOnStdErr: Boolean = RuntimeAttributesValidation.extract( - FailOnStderrValidation.instance, validatedRuntimeAttributes) + lazy val failOnStdErr: Boolean = + RuntimeAttributesValidation.extract(FailOnStderrValidation.instance, validatedRuntimeAttributes) /** * Returns the behavior for continuing on the return code, obtained by converting `returnCodeContents` to an Int. * * @return the behavior for continuing on the return code. */ - lazy val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract( - ContinueOnReturnCodeValidation.instance, validatedRuntimeAttributes) + lazy val continueOnReturnCode: ContinueOnReturnCode = + RuntimeAttributesValidation.extract(ContinueOnReturnCodeValidation.instance, validatedRuntimeAttributes) /** * Returns the max number of times that a failed job should be retried, obtained by converting `maxRetries` to an Int. */ - lazy val maxRetries: Int = RuntimeAttributesValidation.extract( - MaxRetriesValidation.instance, validatedRuntimeAttributes) + lazy val maxRetries: Int = + RuntimeAttributesValidation.extract(MaxRetriesValidation.instance, validatedRuntimeAttributes) - - lazy val previousFailedRetries: Int = jobDescriptor.prefetchedKvStoreEntries.get(BackendLifecycleActorFactory.FailedRetryCountKey) match { - case Some(KvPair(_,v)) => v.toInt - case _ => 0 - } + lazy val previousFailedRetries: Int = + jobDescriptor.prefetchedKvStoreEntries.get(BackendLifecycleActorFactory.FailedRetryCountKey) match { + case Some(KvPair(_, v)) => v.toInt + case _ => 0 + } /** * Returns the memory multiplier for previous attempt if available */ - lazy val previousMemoryMultiplier: Option[Double] = jobDescriptor.prefetchedKvStoreEntries.get(BackendLifecycleActorFactory.MemoryMultiplierKey) match { - case Some(KvPair(_,v)) => Try(v.toDouble) match { - case Success(m) => Option(m) - case Failure(e) => - // should not happen as Cromwell itself had written the value as a Double - log.error(e, s"Programmer error: unexpected failure attempting to convert value of MemoryMultiplierKey from JOB_KEY_VALUE_ENTRY table to Double.") - None + lazy val previousMemoryMultiplier: Option[Double] = + jobDescriptor.prefetchedKvStoreEntries.get(BackendLifecycleActorFactory.MemoryMultiplierKey) match { + case Some(KvPair(_, v)) => + Try(v.toDouble) match { + case Success(m) => Option(m) + case Failure(e) => + // should not happen as Cromwell itself had written the value as a Double + log.error( + e, + s"Programmer error: unexpected failure attempting to convert value of MemoryMultiplierKey from JOB_KEY_VALUE_ENTRY table to Double." + ) + None + } + case _ => None } - case _ => None - } /** * Execute the job specified in the params. Should return a `StandardAsyncPendingExecutionHandle`, or a @@ -757,9 +805,8 @@ trait StandardAsyncExecutionActor * * @return the execution handle for the job. */ - def execute(): ExecutionHandle = { + def execute(): ExecutionHandle = throw new UnsupportedOperationException(s"Neither execute() nor executeAsync() implemented by $getClass") - } /** * Async execute the job specified in the params. Should return a `StandardAsyncPendingExecutionHandle`, or a @@ -797,7 +844,8 @@ trait StandardAsyncExecutionActor * @param jobId The previously recorded job id. * @return the execution handle for the job. */ - def reconnectToAbortAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = Future.failed(JobReconnectionNotSupportedException(jobDescriptor.key)) + def reconnectToAbortAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = + Future.failed(JobReconnectionNotSupportedException(jobDescriptor.key)) /** * This is in spirit similar to recover except it does not defaults back to running the job if not implemented. @@ -809,7 +857,8 @@ trait StandardAsyncExecutionActor * @param jobId The previously recorded job id. * @return the execution handle for the job. */ - def reconnectAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = Future.failed(JobReconnectionNotSupportedException(jobDescriptor.key)) + def reconnectAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = + Future.failed(JobReconnectionNotSupportedException(jobDescriptor.key)) /** * Returns the run status for the job. @@ -817,9 +866,8 @@ trait StandardAsyncExecutionActor * @param handle The handle of the running job. * @return The status of the job. */ - def pollStatus(handle: StandardAsyncPendingExecutionHandle): StandardAsyncRunState = { + def pollStatus(handle: StandardAsyncPendingExecutionHandle): StandardAsyncRunState = throw new UnsupportedOperationException(s"Neither pollStatus nor pollStatusAsync implemented by $getClass") - } /** * Returns the async run status for the job. @@ -827,7 +875,8 @@ trait StandardAsyncExecutionActor * @param handle The handle of the running job. * @return The status of the job. */ - def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[StandardAsyncRunState] = Future.fromTry(Try(pollStatus(handle))) + def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[StandardAsyncRunState] = + Future.fromTry(Try(pollStatus(handle))) /** * Adds custom behavior invoked when polling fails due to some exception. By default adds nothing. @@ -838,9 +887,8 @@ trait StandardAsyncExecutionActor * * @return A partial function handler for exceptions after polling. */ - def customPollStatusFailure: PartialFunction[(ExecutionHandle, Exception), ExecutionHandle] = { + def customPollStatusFailure: PartialFunction[(ExecutionHandle, Exception), ExecutionHandle] = PartialFunction.empty - } /** * Returns true when a job is complete, either successfully or unsuccessfully. @@ -911,13 +959,12 @@ trait StandardAsyncExecutionActor * * By default handles the behavior of `requestsAbortAndDiesImmediately`. */ - def postAbort(): Unit = { + def postAbort(): Unit = if (requestsAbortAndDiesImmediately) { tellMetadata(Map(CallMetadataKeys.BackendStatus -> "Aborted")) context.parent ! JobAbortedResponse(jobDescriptor.key) context.stop(self) } - } /** * Output value mapper. @@ -925,19 +972,17 @@ trait StandardAsyncExecutionActor * @param womValue The original WOM value. * @return The Try wrapped and mapped WOM value. */ - final def outputValueMapper(womValue: WomValue): Try[WomValue] = { + final def outputValueMapper(womValue: WomValue): Try[WomValue] = WomFileMapper.mapWomFiles(mapOutputWomFile)(womValue) - } /** * Used to convert to output paths. * */ def mapOutputWomFile(womFile: WomFile): WomFile = - womFile.mapFile{ - path => - val pathFromContainerInputs = jobPaths.hostPathFromContainerInputs(path) - pathFromContainerInputs.toAbsolutePath.toString + womFile.mapFile { path => + val pathFromContainerInputs = jobPaths.hostPathFromContainerInputs(path) + pathFromContainerInputs.toAbsolutePath.toString } /** @@ -947,9 +992,8 @@ trait StandardAsyncExecutionActor * * @return A Try wrapping evaluated outputs. */ - def evaluateOutputs()(implicit ec: ExecutionContext): Future[EvaluatedJobOutputs] = { + def evaluateOutputs()(implicit ec: ExecutionContext): Future[EvaluatedJobOutputs] = OutputEvaluator.evaluateOutputs(jobDescriptor, backendEngineFunctions, outputValueMapper) - } /** * Tests whether an attempted result of evaluateOutputs should possibly be retried. @@ -965,7 +1009,7 @@ trait StandardAsyncExecutionActor * @param exception The exception, possibly an CromwellAggregatedException. * @return True if evaluateOutputs should be retried later. */ - final def retryEvaluateOutputsAggregated(exception: Exception): Boolean = { + final def retryEvaluateOutputsAggregated(exception: Exception): Boolean = exception match { case aggregated: CromwellAggregatedException => aggregated.throwables.collectFirst { @@ -973,7 +1017,6 @@ trait StandardAsyncExecutionActor }.isDefined case _ => retryEvaluateOutputs(exception) } - } /** * Tests whether an attempted result of evaluateOutputs should possibly be retried. @@ -999,7 +1042,8 @@ trait StandardAsyncExecutionActor */ def handleExecutionSuccess(runStatus: StandardAsyncRunState, handle: StandardAsyncPendingExecutionHandle, - returnCode: Int)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { + returnCode: Int + )(implicit ec: ExecutionContext): Future[ExecutionHandle] = evaluateOutputs() map { case ValidJobOutputs(outputs) => // Need to make sure the paths are up to date before sending the detritus back in the response @@ -1016,7 +1060,6 @@ trait StandardAsyncExecutionActor handle case JobOutputsEvaluationException(ex) => FailedNonRetryableExecutionHandle(ex, kvPairsToSave = None) } - } /** * Process an unsuccessful run, as interpreted by `handleExecutionFailure`. @@ -1024,18 +1067,18 @@ trait StandardAsyncExecutionActor * @return The execution handle. */ def retryElseFail(backendExecutionStatus: Future[ExecutionHandle], - retryWithMoreMemory: Boolean = false): Future[ExecutionHandle] = { - + retryWithMoreMemory: Boolean = false + ): Future[ExecutionHandle] = backendExecutionStatus flatMap { case failedRetryableOrNonRetryable: FailedExecutionHandle => - val kvsFromPreviousAttempt = jobDescriptor.prefetchedKvStoreEntries.collect { case (key: String, kvPair: KvPair) => key -> kvPair } val kvsForNextAttempt = failedRetryableOrNonRetryable.kvPairsToSave match { - case Some(kvPairs) => kvPairs.map { - case kvPair@KvPair(ScopedKey(_, _, key), _) => key -> kvPair - }.toMap + case Some(kvPairs) => + kvPairs.map { case kvPair @ KvPair(ScopedKey(_, _, key), _) => + key -> kvPair + }.toMap case None => Map.empty[String, KvPair] } @@ -1045,23 +1088,35 @@ trait StandardAsyncExecutionActor (retryWithMoreMemory, memoryRetryFactor, previousMemoryMultiplier) match { case (true, Some(retryFactor), Some(previousMultiplier)) => val nextMemoryMultiplier = previousMultiplier * retryFactor.value - saveAttrsAndRetry(failed, kvsFromPreviousAttempt, kvsForNextAttempt, incFailedCount = true, Option(nextMemoryMultiplier)) + saveAttrsAndRetry(failed, + kvsFromPreviousAttempt, + kvsForNextAttempt, + incFailedCount = true, + Option(nextMemoryMultiplier) + ) case (true, Some(retryFactor), None) => - saveAttrsAndRetry(failed, kvsFromPreviousAttempt, kvsForNextAttempt, incFailedCount = true, Option(retryFactor.value)) - case (_, _, _) => saveAttrsAndRetry(failed, kvsFromPreviousAttempt, kvsForNextAttempt, incFailedCount = true) + saveAttrsAndRetry(failed, + kvsFromPreviousAttempt, + kvsForNextAttempt, + incFailedCount = true, + Option(retryFactor.value) + ) + case (_, _, _) => + saveAttrsAndRetry(failed, kvsFromPreviousAttempt, kvsForNextAttempt, incFailedCount = true) } case failedNonRetryable: FailedNonRetryableExecutionHandle => Future.successful(failedNonRetryable) - case failedRetryable: FailedRetryableExecutionHandle => saveAttrsAndRetry(failedRetryable, kvsFromPreviousAttempt, kvsForNextAttempt, incFailedCount = false) + case failedRetryable: FailedRetryableExecutionHandle => + saveAttrsAndRetry(failedRetryable, kvsFromPreviousAttempt, kvsForNextAttempt, incFailedCount = false) } case _ => backendExecutionStatus } - } private def saveAttrsAndRetry(failedExecHandle: FailedExecutionHandle, kvPrev: Map[String, KvPair], kvNext: Map[String, KvPair], incFailedCount: Boolean, - nextMemoryMultiplier: Option[Double] = None): Future[FailedRetryableExecutionHandle] = { + nextMemoryMultiplier: Option[Double] = None + ): Future[FailedRetryableExecutionHandle] = failedExecHandle match { case failedNonRetryable: FailedNonRetryableExecutionHandle => saveKvPairsForNextAttempt(kvPrev, kvNext, incFailedCount, nextMemoryMultiplier) map { _ => @@ -1070,7 +1125,6 @@ trait StandardAsyncExecutionActor case failedRetryable: FailedRetryableExecutionHandle => saveKvPairsForNextAttempt(kvPrev, kvNext, incFailedCount, nextMemoryMultiplier) map (_ => failedRetryable) } - } /** * Merge key-value pairs from previous job execution attempt with incoming pairs from current attempt, which has just @@ -1081,8 +1135,10 @@ trait StandardAsyncExecutionActor private def saveKvPairsForNextAttempt(kvsFromPreviousAttempt: Map[String, KvPair], kvsForNextAttempt: Map[String, KvPair], incrementFailedRetryCount: Boolean, - nextMemoryMultiplierOption: Option[Double]): Future[Seq[KvResponse]] = { - val nextKvJobKey = KvJobKey(jobDescriptor.key.call.fullyQualifiedName, jobDescriptor.key.index, jobDescriptor.key.attempt + 1) + nextMemoryMultiplierOption: Option[Double] + ): Future[Seq[KvResponse]] = { + val nextKvJobKey = + KvJobKey(jobDescriptor.key.call.fullyQualifiedName, jobDescriptor.key.index, jobDescriptor.key.attempt + 1) def getNextKvPair[A](key: String, value: String): Map[String, KvPair] = { val nextScopedKey = ScopedKey(jobDescriptor.workflowDescriptor.id, nextKvJobKey, key) @@ -1090,7 +1146,8 @@ trait StandardAsyncExecutionActor Map(key -> nextKvPair) } - val kvsFromPreviousAttemptUpd = kvsFromPreviousAttempt.view.mapValues(kvPair => kvPair.copy(key = kvPair.key.copy(jobKey = nextKvJobKey))) + val kvsFromPreviousAttemptUpd = + kvsFromPreviousAttempt.view.mapValues(kvPair => kvPair.copy(key = kvPair.key.copy(jobKey = nextKvJobKey))) val failedRetryCountKvPair: Map[String, KvPair] = if (incrementFailedRetryCount) getNextKvPair(FailedRetryCountKey, (previousFailedRetries + 1).toString) @@ -1108,8 +1165,10 @@ trait StandardAsyncExecutionActor if (failures.isEmpty) { respSeq } else { - throw new RuntimeException("Failed to save one or more job execution attributes to the database between " + - "attempts:\n " + failures.mkString("\n")) + throw new RuntimeException( + "Failed to save one or more job execution attributes to the database between " + + "attempts:\n " + failures.mkString("\n") + ) } } } @@ -1120,8 +1179,7 @@ trait StandardAsyncExecutionActor * @param runStatus The run status. * @return The execution handle. */ - def handleExecutionFailure(runStatus: StandardAsyncRunState, - returnCode: Option[Int]): Future[ExecutionHandle] = { + def handleExecutionFailure(runStatus: StandardAsyncRunState, returnCode: Option[Int]): Future[ExecutionHandle] = { val exception = new RuntimeException(s"Task ${jobDescriptor.key.tag} failed for unknown reason: $runStatus") Future.successful(FailedNonRetryableExecutionHandle(exception, returnCode, None)) } @@ -1150,30 +1208,33 @@ trait StandardAsyncExecutionActor } override def executeOrRecover(mode: ExecutionMode)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { - val executeOrRecoverFuture = { + val executeOrRecoverFuture = mode match { - case Reconnect(jobId: StandardAsyncJob@unchecked) => reconnectAsync(jobId) - case ReconnectToAbort(jobId: StandardAsyncJob@unchecked) => reconnectToAbortAsync(jobId) - case Recover(jobId: StandardAsyncJob@unchecked) => recoverAsync(jobId) + case Reconnect(jobId: StandardAsyncJob @unchecked) => reconnectAsync(jobId) + case ReconnectToAbort(jobId: StandardAsyncJob @unchecked) => reconnectToAbortAsync(jobId) + case Recover(jobId: StandardAsyncJob @unchecked) => recoverAsync(jobId) case _ => tellMetadata(startMetadataKeyValues) executeAsync() } - } - executeOrRecoverFuture flatMap executeOrRecoverSuccess recoverWith { - case throwable: Throwable => Future failed { + executeOrRecoverFuture flatMap executeOrRecoverSuccess recoverWith { case throwable: Throwable => + Future failed { jobLogger.error(s"Error attempting to $mode", throwable) throwable } } } - private def executeOrRecoverSuccess(executionHandle: ExecutionHandle): Future[ExecutionHandle] = { + private def executeOrRecoverSuccess(executionHandle: ExecutionHandle): Future[ExecutionHandle] = executionHandle match { - case handle: PendingExecutionHandle[StandardAsyncJob@unchecked, StandardAsyncRunInfo@unchecked, StandardAsyncRunState@unchecked] => - - configurationDescriptor.slowJobWarningAfter foreach { duration => self ! WarnAboutSlownessAfter(handle.pendingJob.jobId, duration) } + case handle: PendingExecutionHandle[StandardAsyncJob @unchecked, + StandardAsyncRunInfo @unchecked, + StandardAsyncRunState @unchecked + ] => + configurationDescriptor.slowJobWarningAfter foreach { duration => + self ! WarnAboutSlownessAfter(handle.pendingJob.jobId, duration) + } tellKvJobId(handle.pendingJob) map { _ => if (logJobIds) jobLogger.info(s"job id: ${handle.pendingJob.jobId}") @@ -1183,34 +1244,31 @@ trait StandardAsyncExecutionActor the prior runnable to the thread pool this actor doesn't know the job id for aborting. These runnables are queued up and may still be run by the thread pool anytime in the future. Issue #1218 may address this inconsistency at a later time. For now, just go back and check if we missed the abort command. - */ + */ self ! CheckMissedAbort(handle.pendingJob) executionHandle } case _ => Future.successful(executionHandle) } - } - override def poll(previous: ExecutionHandle)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { + override def poll(previous: ExecutionHandle)(implicit ec: ExecutionContext): Future[ExecutionHandle] = previous match { - case handle: PendingExecutionHandle[ - StandardAsyncJob@unchecked, StandardAsyncRunInfo@unchecked, StandardAsyncRunState@unchecked] => - + case handle: PendingExecutionHandle[StandardAsyncJob @unchecked, + StandardAsyncRunInfo @unchecked, + StandardAsyncRunState @unchecked + ] => jobLogger.debug(s"$tag Polling Job ${handle.pendingJob}") - pollStatusAsync(handle) flatMap { - backendRunStatus => - self ! WarnAboutSlownessIfNecessary - handlePollSuccess(handle, backendRunStatus) - } recover { - case throwable => - handlePollFailure(handle, throwable) + pollStatusAsync(handle) flatMap { backendRunStatus => + self ! WarnAboutSlownessIfNecessary + handlePollSuccess(handle, backendRunStatus) + } recover { case throwable => + handlePollFailure(handle, throwable) } case successful: SuccessfulExecutionHandle => Future.successful(successful) case failed: FailedNonRetryableExecutionHandle => Future.successful(failed) case failedRetryable: FailedRetryableExecutionHandle => Future.successful(failedRetryable) case badHandle => Future.failed(new IllegalArgumentException(s"Unexpected execution handle: $badHandle")) } - } /** * Process a poll success. @@ -1220,7 +1278,8 @@ trait StandardAsyncExecutionActor * @return The updated execution handle. */ def handlePollSuccess(oldHandle: StandardAsyncPendingExecutionHandle, - state: StandardAsyncRunState): Future[ExecutionHandle] = { + state: StandardAsyncRunState + ): Future[ExecutionHandle] = { val previousState = oldHandle.previousState if (!(previousState exists statusEquivalentTo(state))) { // If this is the first time checking the status, we log the transition as '-' to 'currentStatus'. Otherwise just use @@ -1236,7 +1295,10 @@ trait StandardAsyncExecutionActor val metadata = getTerminalMetadata(state) tellMetadata(metadata) handleExecutionResult(state, oldHandle) - case s => Future.successful(oldHandle.copy(previousState = Option(s))) // Copy the current handle with updated previous status. + case s => + Future.successful( + oldHandle.copy(previousState = Option(s)) + ) // Copy the current handle with updated previous status. } } @@ -1247,8 +1309,7 @@ trait StandardAsyncExecutionActor * @param throwable The cause of the polling failure. * @return The updated execution handle. */ - def handlePollFailure(oldHandle: StandardAsyncPendingExecutionHandle, - throwable: Throwable): ExecutionHandle = { + def handlePollFailure(oldHandle: StandardAsyncPendingExecutionHandle, throwable: Throwable): ExecutionHandle = throwable match { case exception: Exception => val handler: PartialFunction[(ExecutionHandle, Exception), ExecutionHandle] = @@ -1259,7 +1320,9 @@ trait StandardAsyncExecutionActor FailedNonRetryableExecutionHandle(exception, kvPairsToSave = None) case (handle: ExecutionHandle, exception: Exception) => // Log exceptions and return the original handle to try again. - jobLogger.warn(s"Caught non-fatal ${exception.getClass.getSimpleName} exception trying to poll, retrying", exception) + jobLogger.warn(s"Caught non-fatal ${exception.getClass.getSimpleName} exception trying to poll, retrying", + exception + ) handle } handler((oldHandle, exception)) @@ -1268,7 +1331,6 @@ trait StandardAsyncExecutionActor // Someone has subclassed or instantiated Throwable directly. Kill the job. They should be using an Exception. FailedNonRetryableExecutionHandle(throwable, kvPairsToSave = None) } - } /** * Process an execution result. @@ -1278,33 +1340,35 @@ trait StandardAsyncExecutionActor * @return The updated execution handle. */ def handleExecutionResult(status: StandardAsyncRunState, - oldHandle: StandardAsyncPendingExecutionHandle): Future[ExecutionHandle] = { + oldHandle: StandardAsyncPendingExecutionHandle + ): Future[ExecutionHandle] = { // Returns true if the task has written an RC file that indicates OOM, false otherwise def memoryRetryRC: Future[Boolean] = { - def returnCodeAsBoolean(codeAsOption: Option[String]): Boolean = { + def returnCodeAsBoolean(codeAsOption: Option[String]): Boolean = codeAsOption match { case Some(codeAsString) => Try(codeAsString.trim.toInt) match { - case Success(code) => code match { - case StderrContainsRetryKeysCode => true - case _ => false - } + case Success(code) => + code match { + case StderrContainsRetryKeysCode => true + case _ => false + } case Failure(e) => - log.error(s"'CheckingForMemoryRetry' action exited with code '$codeAsString' which couldn't be " + - s"converted to an Integer. Task will not be retried with more memory. Error: ${ExceptionUtils.getMessage(e)}") + log.error( + s"'CheckingForMemoryRetry' action exited with code '$codeAsString' which couldn't be " + + s"converted to an Integer. Task will not be retried with more memory. Error: ${ExceptionUtils.getMessage(e)}" + ) false } case None => false } - } - def readMemoryRetryRCFile(fileExists: Boolean): Future[Option[String]] = { + def readMemoryRetryRCFile(fileExists: Boolean): Future[Option[String]] = if (fileExists) asyncIo.contentAsStringAsync(jobPaths.memoryRetryRC, None, failOnOverflow = false).map(Option(_)) else Future.successful(None) - } for { fileExists <- asyncIo.existsAsync(jobPaths.memoryRetryRC) @@ -1324,47 +1388,73 @@ trait StandardAsyncExecutionActor outOfMemoryDetected <- memoryRetryRC } yield (stderrSize, returnCodeAsString, outOfMemoryDetected) - stderrSizeAndReturnCodeAndMemoryRetry flatMap { - case (stderrSize, returnCodeAsString, outOfMemoryDetected) => - val tryReturnCodeAsInt = Try(returnCodeAsString.trim.toInt) - - if (isDone(status)) { - tryReturnCodeAsInt match { - case Success(returnCodeAsInt) if failOnStdErr && stderrSize.intValue > 0 => - val executionHandle = Future.successful(FailedNonRetryableExecutionHandle(StderrNonEmpty(jobDescriptor.key.tag, stderrSize, stderrAsOption), Option(returnCodeAsInt), None)) - retryElseFail(executionHandle) - case Success(returnCodeAsInt) if continueOnReturnCode.continueFor(returnCodeAsInt) => - handleExecutionSuccess(status, oldHandle, returnCodeAsInt) - // It's important that we check retryWithMoreMemory case before isAbort. RC could be 137 in either case; - // if it was caused by OOM killer, want to handle as OOM and not job abort. - case Success(returnCodeAsInt) if outOfMemoryDetected && memoryRetryRequested => - val executionHandle = Future.successful(FailedNonRetryableExecutionHandle(RetryWithMoreMemory(jobDescriptor.key.tag, stderrAsOption, memoryRetryErrorKeys, log), Option(returnCodeAsInt), None)) - retryElseFail(executionHandle, outOfMemoryDetected) - case Success(returnCodeAsInt) if isAbort(returnCodeAsInt) => - Future.successful(AbortedExecutionHandle) - case Success(returnCodeAsInt) => - val executionHandle = Future.successful(FailedNonRetryableExecutionHandle(WrongReturnCode(jobDescriptor.key.tag, returnCodeAsInt, stderrAsOption), Option(returnCodeAsInt), None)) - retryElseFail(executionHandle) - case Failure(_) => - Future.successful(FailedNonRetryableExecutionHandle(ReturnCodeIsNotAnInt(jobDescriptor.key.tag, returnCodeAsString, stderrAsOption), kvPairsToSave = None)) - } - } else { - tryReturnCodeAsInt match { - case Success(returnCodeAsInt) if outOfMemoryDetected && memoryRetryRequested && !continueOnReturnCode.continueFor(returnCodeAsInt) => - val executionHandle = Future.successful(FailedNonRetryableExecutionHandle(RetryWithMoreMemory(jobDescriptor.key.tag, stderrAsOption, memoryRetryErrorKeys, log), Option(returnCodeAsInt), None)) - retryElseFail(executionHandle, outOfMemoryDetected) - case _ => - val failureStatus = handleExecutionFailure(status, tryReturnCodeAsInt.toOption) - retryElseFail(failureStatus) - } + stderrSizeAndReturnCodeAndMemoryRetry flatMap { case (stderrSize, returnCodeAsString, outOfMemoryDetected) => + val tryReturnCodeAsInt = Try(returnCodeAsString.trim.toInt) + + if (isDone(status)) { + tryReturnCodeAsInt match { + case Success(returnCodeAsInt) if failOnStdErr && stderrSize.intValue > 0 => + val executionHandle = Future.successful( + FailedNonRetryableExecutionHandle(StderrNonEmpty(jobDescriptor.key.tag, stderrSize, stderrAsOption), + Option(returnCodeAsInt), + None + ) + ) + retryElseFail(executionHandle) + case Success(returnCodeAsInt) if continueOnReturnCode.continueFor(returnCodeAsInt) => + handleExecutionSuccess(status, oldHandle, returnCodeAsInt) + // It's important that we check retryWithMoreMemory case before isAbort. RC could be 137 in either case; + // if it was caused by OOM killer, want to handle as OOM and not job abort. + case Success(returnCodeAsInt) if outOfMemoryDetected && memoryRetryRequested => + val executionHandle = Future.successful( + FailedNonRetryableExecutionHandle( + RetryWithMoreMemory(jobDescriptor.key.tag, stderrAsOption, memoryRetryErrorKeys, log), + Option(returnCodeAsInt), + None + ) + ) + retryElseFail(executionHandle, outOfMemoryDetected) + case Success(returnCodeAsInt) if isAbort(returnCodeAsInt) => + Future.successful(AbortedExecutionHandle) + case Success(returnCodeAsInt) => + val executionHandle = Future.successful( + FailedNonRetryableExecutionHandle(WrongReturnCode(jobDescriptor.key.tag, returnCodeAsInt, stderrAsOption), + Option(returnCodeAsInt), + None + ) + ) + retryElseFail(executionHandle) + case Failure(_) => + Future.successful( + FailedNonRetryableExecutionHandle( + ReturnCodeIsNotAnInt(jobDescriptor.key.tag, returnCodeAsString, stderrAsOption), + kvPairsToSave = None + ) + ) } - } recoverWith { - case exception => - if (isDone(status)) Future.successful(FailedNonRetryableExecutionHandle(exception, kvPairsToSave = None)) - else { - val failureStatus = handleExecutionFailure(status, None) - retryElseFail(failureStatus) + } else { + tryReturnCodeAsInt match { + case Success(returnCodeAsInt) + if outOfMemoryDetected && memoryRetryRequested && !continueOnReturnCode.continueFor(returnCodeAsInt) => + val executionHandle = Future.successful( + FailedNonRetryableExecutionHandle( + RetryWithMoreMemory(jobDescriptor.key.tag, stderrAsOption, memoryRetryErrorKeys, log), + Option(returnCodeAsInt), + None + ) + ) + retryElseFail(executionHandle, outOfMemoryDetected) + case _ => + val failureStatus = handleExecutionFailure(status, tryReturnCodeAsInt.toOption) + retryElseFail(failureStatus) } + } + } recoverWith { case exception => + if (isDone(status)) Future.successful(FailedNonRetryableExecutionHandle(exception, kvPairsToSave = None)) + else { + val failureStatus = handleExecutionFailure(status, None) + retryElseFail(failureStatus) + } } } @@ -1393,7 +1483,7 @@ trait StandardAsyncExecutionActor serviceRegistryActor.putMetadata(jobDescriptor.workflowDescriptor.id, Option(jobDescriptor.key), metadataKeyValues) } - override protected implicit lazy val ec: ExecutionContextExecutor = context.dispatcher + implicit override protected lazy val ec: ExecutionContextExecutor = context.dispatcher } /** diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala b/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala index a64c6a5439c..9d049ee8fff 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardCachingActorHelper.scala @@ -48,16 +48,16 @@ trait StandardCachingActorHelper extends JobCachingActorHelper { lazy val call: CommandCallNode = jobDescriptor.key.call - lazy val standardInitializationData: StandardInitializationData = BackendInitializationData. - as[StandardInitializationData](backendInitializationDataOption) + lazy val standardInitializationData: StandardInitializationData = + BackendInitializationData.as[StandardInitializationData](backendInitializationDataOption) lazy val validatedRuntimeAttributes: ValidatedRuntimeAttributes = { val builder = standardInitializationData.runtimeAttributesBuilder builder.build(jobDescriptor.runtimeAttributes, jobLogger) } - lazy val isDockerRun: Boolean = RuntimeAttributesValidation.extractOption( - DockerValidation.instance, validatedRuntimeAttributes).isDefined + lazy val isDockerRun: Boolean = + RuntimeAttributesValidation.extractOption(DockerValidation.instance, validatedRuntimeAttributes).isDefined /** * Returns the paths to the job. diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala b/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala index 7782c2da901..ac4c39fc8d7 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardExpressionFunctions.scala @@ -24,11 +24,15 @@ case class DefaultStandardExpressionFunctionsParams(override val pathBuilders: P override val callContext: CallContext, override val ioActorProxy: ActorRef, override val executionContext: ExecutionContext - ) extends StandardExpressionFunctionsParams +) extends StandardExpressionFunctionsParams // TODO: Once we figure out premapping and postmapping, maybe we can standardize that behavior. Currently that's the most important feature that subclasses override. class StandardExpressionFunctions(val standardParams: StandardExpressionFunctionsParams) - extends GlobFunctions with DirectoryFunctions with ReadLikeFunctions with WriteFunctions with CallCorePathFunctions { + extends GlobFunctions + with DirectoryFunctions + with ReadLikeFunctions + with WriteFunctions + with CallCorePathFunctions { override lazy val ec = standardParams.executionContext @@ -41,6 +45,6 @@ class StandardExpressionFunctions(val standardParams: StandardExpressionFunction val callContext: CallContext = standardParams.callContext val writeDirectory: Path = callContext.root - + val isDocker: Boolean = callContext.isDocker } diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala index 22eb6763b53..4bac258312e 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardFinalizationActor.scala @@ -24,8 +24,7 @@ trait StandardFinalizationActorParams { def configurationDescriptor: BackendConfigurationDescriptor } -case class DefaultStandardFinalizationActorParams -( +case class DefaultStandardFinalizationActorParams( workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], jobExecutionMap: JobExecutionMap, @@ -42,7 +41,7 @@ case class DefaultStandardFinalizationActorParams * @param standardParams Standard parameters. */ class StandardFinalizationActor(val standardParams: StandardFinalizationActorParams) - extends BackendWorkflowFinalizationActor { + extends BackendWorkflowFinalizationActor { override lazy val workflowDescriptor: BackendWorkflowDescriptor = standardParams.workflowDescriptor override lazy val calls: Set[CommandCallNode] = standardParams.calls @@ -59,7 +58,7 @@ class StandardFinalizationActor(val standardParams: StandardFinalizationActorPar override def afterAll(): Future[Unit] = copyCallLogs() - lazy val logPaths: Seq[Path] = { + lazy val logPaths: Seq[Path] = for { actualWorkflowPath <- workflowPaths.toSeq (backendWorkflowDescriptor, keys) <- jobExecutionMap.toSeq @@ -67,9 +66,8 @@ class StandardFinalizationActor(val standardParams: StandardFinalizationActorPar jobPaths = actualWorkflowPath.toJobPaths(key, backendWorkflowDescriptor) logPath <- jobPaths.logPaths.values } yield logPath - } - protected def copyCallLogs(): Future[Unit] = { + protected def copyCallLogs(): Future[Unit] = /* NOTE: Only using one thread pool slot here to upload all the files for all the calls. Using the io-dispatcher defined in application.conf because this might take a while. @@ -77,21 +75,18 @@ class StandardFinalizationActor(val standardParams: StandardFinalizationActorPar pool for parallel uploads. Measure and optimize as necessary. Will likely need retry code at some level as well. - */ + */ workflowPaths match { case Some(paths) => Future(paths.finalCallLogsPath foreach copyCallLogs)(ioExecutionContext) case _ => Future.successful(()) } - } - private def copyCallLogs(callLogsPath: Path): Unit = { + private def copyCallLogs(callLogsPath: Path): Unit = copyLogs(callLogsPath, logPaths) - } - private def copyLogs(callLogsDirPath: Path, logPaths: Seq[Path]): Unit = { + private def copyLogs(callLogsDirPath: Path, logPaths: Seq[Path]): Unit = workflowPaths match { case Some(paths) => logPaths.foreach(PathCopier.copy(paths.executionRoot, _, callLogsDirPath)) case None => } - } } diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala index 95e898d6711..c4adba3cd59 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationActor.scala @@ -4,7 +4,12 @@ import akka.actor.ActorRef import cromwell.backend.io.WorkflowPaths import cromwell.backend.validation.RuntimeAttributesDefault import cromwell.backend.wfs.WorkflowPathBuilder -import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor, BackendWorkflowInitializationActor} +import cromwell.backend.{ + BackendConfigurationDescriptor, + BackendInitializationData, + BackendWorkflowDescriptor, + BackendWorkflowInitializationActor +} import cromwell.core.WorkflowOptions import cromwell.core.path.PathBuilder import wom.expression.WomExpression @@ -24,8 +29,7 @@ trait StandardInitializationActorParams { def configurationDescriptor: BackendConfigurationDescriptor } -case class DefaultInitializationActorParams -( +case class DefaultInitializationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], @@ -42,7 +46,7 @@ case class DefaultInitializationActorParams * @param standardParams Standard parameters */ class StandardInitializationActor(val standardParams: StandardInitializationActorParams) - extends BackendWorkflowInitializationActor { + extends BackendWorkflowInitializationActor { implicit protected val system = context.system @@ -50,16 +54,18 @@ class StandardInitializationActor(val standardParams: StandardInitializationActo override lazy val calls: Set[CommandCallNode] = standardParams.calls - override def beforeAll(): Future[Option[BackendInitializationData]] = { + override def beforeAll(): Future[Option[BackendInitializationData]] = initializationData map Option.apply - } lazy val initializationData: Future[StandardInitializationData] = - workflowPaths map { new StandardInitializationData(_, runtimeAttributesBuilder, classOf[StandardExpressionFunctions]) } + workflowPaths map { + new StandardInitializationData(_, runtimeAttributesBuilder, classOf[StandardExpressionFunctions]) + } lazy val expressionFunctions: Class[_ <: StandardExpressionFunctions] = classOf[StandardExpressionFunctions] - lazy val pathBuilders: Future[List[PathBuilder]] = standardParams.configurationDescriptor.pathBuilders(workflowDescriptor.workflowOptions) + lazy val pathBuilders: Future[List[PathBuilder]] = + standardParams.configurationDescriptor.pathBuilders(workflowDescriptor.workflowOptions) lazy val workflowPaths: Future[WorkflowPaths] = pathBuilders map { WorkflowPathBuilder.workflowPaths(configurationDescriptor, workflowDescriptor, _) } @@ -74,13 +80,11 @@ class StandardInitializationActor(val standardParams: StandardInitializationActo def runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder = StandardValidatedRuntimeAttributesBuilder.default(configurationDescriptor.backendRuntimeAttributesConfig) - override protected lazy val runtimeAttributeValidators: Map[String, (Option[WomExpression]) => Boolean] = { + override protected lazy val runtimeAttributeValidators: Map[String, (Option[WomExpression]) => Boolean] = runtimeAttributesBuilder.validatorMap - } - override protected def coerceDefaultRuntimeAttributes(options: WorkflowOptions): Try[Map[String, WomValue]] = { + override protected def coerceDefaultRuntimeAttributes(options: WorkflowOptions): Try[Map[String, WomValue]] = RuntimeAttributesDefault.workflowOptionsDefault(options, runtimeAttributesBuilder.coercionMap) - } def validateWorkflowOptions(): Try[Unit] = Success(()) @@ -93,19 +97,19 @@ class StandardInitializationActor(val standardParams: StandardInitializationActo val notSupportedAttrString = notSupportedAttributes mkString ", " workflowLogger.warn( s"Key/s [$notSupportedAttrString] is/are not supported by backend. " + - s"Unsupported attributes will not be part of job executions.") + s"Unsupported attributes will not be part of job executions." + ) } } } - override def validate(): Future[Unit] = { + override def validate(): Future[Unit] = Future.fromTry( for { _ <- validateWorkflowOptions() _ <- checkForUnsupportedRuntimeAttributes() } yield () ) - } override protected lazy val workflowDescriptor: BackendWorkflowDescriptor = standardParams.workflowDescriptor diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala index e2618818e50..d317af2ada2 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardInitializationData.scala @@ -6,8 +6,7 @@ import cromwell.backend.io.{JobPaths, WorkflowPaths} import scala.concurrent.ExecutionContext -class StandardInitializationData -( +class StandardInitializationData( val workflowPaths: WorkflowPaths, val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, val standardExpressionFunctionsClass: Class[_ <: StandardExpressionFunctions] @@ -17,7 +16,10 @@ class StandardInitializationData private lazy val standardExpressionFunctionsConstructor = standardExpressionFunctionsClass.getConstructor(classOf[StandardExpressionFunctionsParams]) - def expressionFunctions(jobPaths: JobPaths, ioActorProxy: ActorRef, ec: ExecutionContext): StandardExpressionFunctions = { + def expressionFunctions(jobPaths: JobPaths, + ioActorProxy: ActorRef, + ec: ExecutionContext + ): StandardExpressionFunctions = { val pathBuilders = jobPaths.workflowPaths.pathBuilders val standardParams = DefaultStandardExpressionFunctionsParams(pathBuilders, jobPaths.callContext, ioActorProxy, ec) standardExpressionFunctionsConstructor.newInstance(standardParams) diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardJobExecutionActorParams.scala b/backend/src/main/scala/cromwell/backend/standard/StandardJobExecutionActorParams.scala index ac670617bb9..a595f589647 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardJobExecutionActorParams.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardJobExecutionActorParams.scala @@ -1,12 +1,18 @@ package cromwell.backend.standard import akka.actor.ActorRef -import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendJobDescriptor, MinimumRuntimeSettings} +import cromwell.backend.{ + BackendConfigurationDescriptor, + BackendInitializationData, + BackendJobDescriptor, + MinimumRuntimeSettings +} /** * Base trait for params passed to both the sync and async backend actors. */ trait StandardJobExecutionActorParams { + /** The service registry actor for key/value and metadata. */ def serviceRegistryActor: ActorRef diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala b/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala index 64f79f5ad7d..aec03977ad9 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardLifecycleActorFactory.scala @@ -16,6 +16,7 @@ import scala.concurrent.ExecutionContext * May be extended for using the standard sync/async backend pattern. */ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { + /** * Config values for the backend, and a pointer to the global config. * @@ -59,41 +60,65 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { * * @return the cache hit copying class. */ - lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = Option(classOf[DefaultStandardCacheHitCopyingActor]) + lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = Option( + classOf[DefaultStandardCacheHitCopyingActor] + ) /** * Returns the cache hit copying class. * * @return the cache hit copying class. */ - lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option(classOf[DefaultStandardFileHashingActor]) + lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option( + classOf[DefaultStandardFileHashingActor] + ) /** * Returns the finalization class. * * @return the finalization class. */ - lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] = Option(classOf[StandardFinalizationActor]) + lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] = Option( + classOf[StandardFinalizationActor] + ) - override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, restart: Boolean): Option[Props] = { + override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restart: Boolean + ): Option[Props] = { val params = workflowInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, restart) val props = Props(initializationActorClass, params).withDispatcher(Dispatcher.BackendDispatcher) Option(props) } - def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, restarting: Boolean): StandardInitializationActorParams = { - DefaultInitializationActorParams(workflowDescriptor, ioActor, calls, serviceRegistryActor, configurationDescriptor, restarting) - } + def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restarting: Boolean + ): StandardInitializationActorParams = + DefaultInitializationActorParams(workflowDescriptor, + ioActor, + calls, + serviceRegistryActor, + configurationDescriptor, + restarting + ) override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor, initializationDataOption: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActorOption: Option[ActorRef]): Props = { - val params = jobExecutionActorParams(jobDescriptor, initializationDataOption, serviceRegistryActor, - ioActor, backendSingletonActorOption) + backendSingletonActorOption: Option[ActorRef] + ): Props = { + val params = jobExecutionActorParams(jobDescriptor, + initializationDataOption, + serviceRegistryActor, + ioActor, + backendSingletonActorOption + ) Props(new StandardSyncExecutionActor(params)).withDispatcher(Dispatcher.BackendDispatcher) } @@ -101,25 +126,35 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { initializationDataOption: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActorOption: Option[ActorRef]): StandardSyncExecutionActorParams = { - DefaultStandardSyncExecutionActorParams(jobIdKey, serviceRegistryActor, ioActor, jobDescriptor, configurationDescriptor, - initializationDataOption, backendSingletonActorOption, asyncExecutionActorClass, MinimumRuntimeSettings()) - } + backendSingletonActorOption: Option[ActorRef] + ): StandardSyncExecutionActorParams = + DefaultStandardSyncExecutionActorParams( + jobIdKey, + serviceRegistryActor, + ioActor, + jobDescriptor, + configurationDescriptor, + initializationDataOption, + backendSingletonActorOption, + asyncExecutionActorClass, + MinimumRuntimeSettings() + ) - override def fileHashingActorProps: - Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props] = { - fileHashingActorClassOption map { - standardFileHashingActor => fileHashingActorInner(standardFileHashingActor) _ + override def fileHashingActorProps + : Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props] = + fileHashingActorClassOption map { standardFileHashingActor => + fileHashingActorInner(standardFileHashingActor) _ } - } - - def fileHashingActorInner(standardFileHashingActor: Class[_ <: StandardFileHashingActor]) - (jobDescriptor: BackendJobDescriptor, - initializationDataOption: Option[BackendInitializationData], - serviceRegistryActor: ActorRef, - ioActor: ActorRef, - fileHashCacheActor: Option[ActorRef]): Props = { - val params = fileHashingActorParams(jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, fileHashCacheActor) + + def fileHashingActorInner(standardFileHashingActor: Class[_ <: StandardFileHashingActor])( + jobDescriptor: BackendJobDescriptor, + initializationDataOption: Option[BackendInitializationData], + serviceRegistryActor: ActorRef, + ioActor: ActorRef, + fileHashCacheActor: Option[ActorRef] + ): Props = { + val params = + fileHashingActorParams(jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, fileHashCacheActor) Props(standardFileHashingActor, params).withDispatcher(BackendDispatcher) } @@ -127,26 +162,38 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { initializationDataOption: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - fileHashCacheActor: Option[ActorRef]): StandardFileHashingActorParams = { - DefaultStandardFileHashingActorParams( - jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, configurationDescriptor, fileHashCacheActor) - } + fileHashCacheActor: Option[ActorRef] + ): StandardFileHashingActorParams = + DefaultStandardFileHashingActorParams(jobDescriptor, + initializationDataOption, + serviceRegistryActor, + ioActor, + configurationDescriptor, + fileHashCacheActor + ) - override def cacheHitCopyingActorProps: - Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Int, Option[BlacklistCache]) => Props] = { - cacheHitCopyingActorClassOption map { - standardCacheHitCopyingActor => cacheHitCopyingActorInner(standardCacheHitCopyingActor) _ + override def cacheHitCopyingActorProps: Option[ + (BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Int, Option[BlacklistCache]) => Props + ] = + cacheHitCopyingActorClassOption map { standardCacheHitCopyingActor => + cacheHitCopyingActorInner(standardCacheHitCopyingActor) _ } - } - def cacheHitCopyingActorInner(standardCacheHitCopyingActor: Class[_ <: StandardCacheHitCopyingActor]) - (jobDescriptor: BackendJobDescriptor, - initializationDataOption: Option[BackendInitializationData], - serviceRegistryActor: ActorRef, - ioActor: ActorRef, - cacheCopyAttempt: Int, - blacklistCache: Option[BlacklistCache]): Props = { - val params = cacheHitCopyingActorParams(jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, cacheCopyAttempt, blacklistCache) + def cacheHitCopyingActorInner(standardCacheHitCopyingActor: Class[_ <: StandardCacheHitCopyingActor])( + jobDescriptor: BackendJobDescriptor, + initializationDataOption: Option[BackendInitializationData], + serviceRegistryActor: ActorRef, + ioActor: ActorRef, + cacheCopyAttempt: Int, + blacklistCache: Option[BlacklistCache] + ): Props = { + val params = cacheHitCopyingActorParams(jobDescriptor, + initializationDataOption, + serviceRegistryActor, + ioActor, + cacheCopyAttempt, + blacklistCache + ) Props(standardCacheHitCopyingActor, params).withDispatcher(BackendDispatcher) } @@ -155,71 +202,94 @@ trait StandardLifecycleActorFactory extends BackendLifecycleActorFactory { serviceRegistryActor: ActorRef, ioActor: ActorRef, cacheCopyAttempt: Int, - blacklistCache: Option[BlacklistCache]): StandardCacheHitCopyingActorParams = { - DefaultStandardCacheHitCopyingActorParams( - jobDescriptor, initializationDataOption, serviceRegistryActor, ioActor, configurationDescriptor, cacheCopyAttempt, blacklistCache) - } + blacklistCache: Option[BlacklistCache] + ): StandardCacheHitCopyingActorParams = + DefaultStandardCacheHitCopyingActorParams(jobDescriptor, + initializationDataOption, + serviceRegistryActor, + ioActor, + configurationDescriptor, + cacheCopyAttempt, + blacklistCache + ) - override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, - initializationData: Option[BackendInitializationData]): Option[Props] = { + override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs, + initializationData: Option[BackendInitializationData] + ): Option[Props] = finalizationActorClassOption map { finalizationActorClass => - val params = workflowFinalizationActorParams(workflowDescriptor, ioActor, calls, jobExecutionMap, workflowOutputs, - initializationData) + val params = workflowFinalizationActorParams(workflowDescriptor, + ioActor, + calls, + jobExecutionMap, + workflowOutputs, + initializationData + ) Props(finalizationActorClass, params).withDispatcher(BackendDispatcher) } - } - def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, - initializationDataOption: Option[BackendInitializationData]): - StandardFinalizationActorParams = { - DefaultStandardFinalizationActorParams(workflowDescriptor, calls, jobExecutionMap, workflowOutputs, - initializationDataOption, configurationDescriptor) - } + def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs, + initializationDataOption: Option[BackendInitializationData] + ): StandardFinalizationActorParams = + DefaultStandardFinalizationActorParams(workflowDescriptor, + calls, + jobExecutionMap, + workflowOutputs, + initializationDataOption, + configurationDescriptor + ) override def expressionLanguageFunctions(workflowDescriptor: BackendWorkflowDescriptor, jobKey: BackendJobDescriptorKey, initializationDataOption: Option[BackendInitializationData], ioActorProxy: ActorRef, - ec: ExecutionContext): - IoFunctionSet = { + ec: ExecutionContext + ): IoFunctionSet = { val standardInitializationData = BackendInitializationData.as[StandardInitializationData](initializationDataOption) val jobPaths = standardInitializationData.workflowPaths.toJobPaths(jobKey, workflowDescriptor) standardInitializationData.expressionFunctions(jobPaths, ioActorProxy, ec) } - + override def pathBuilders(initializationDataOption: Option[BackendInitializationData]) = { val standardInitializationData = BackendInitializationData.as[StandardInitializationData](initializationDataOption) standardInitializationData.workflowPaths.pathBuilders - } + } - override def getExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, backendConfig: Config, - initializationData: Option[BackendInitializationData]): Path = { + override def getExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, + backendConfig: Config, + initializationData: Option[BackendInitializationData] + ): Path = initializationData match { case Some(data) => data.asInstanceOf[StandardInitializationData].workflowPaths.executionRoot case None => super.getExecutionRootPath(workflowDescriptor, backendConfig, initializationData) } - } - override def getWorkflowExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, backendConfig: Config, - initializationData: Option[BackendInitializationData]): Path = { + override def getWorkflowExecutionRootPath(workflowDescriptor: BackendWorkflowDescriptor, + backendConfig: Config, + initializationData: Option[BackendInitializationData] + ): Path = initializationData match { case Some(data) => data.asInstanceOf[StandardInitializationData].workflowPaths.workflowRoot case None => super.getWorkflowExecutionRootPath(workflowDescriptor, backendConfig, initializationData) } - } - override def runtimeAttributeDefinitions(initializationDataOption: Option[BackendInitializationData]): Set[RuntimeAttributeDefinition] = { - val initializationData = BackendInitializationData. - as[StandardInitializationData](initializationDataOption) + override def runtimeAttributeDefinitions( + initializationDataOption: Option[BackendInitializationData] + ): Set[RuntimeAttributeDefinition] = { + val initializationData = BackendInitializationData.as[StandardInitializationData](initializationDataOption) initializationData.runtimeAttributesBuilder.definitions.toSet } override def dockerHashCredentials(workflowDescriptor: BackendWorkflowDescriptor, - initializationDataOption: Option[BackendInitializationData], - ): List[Any] = { + initializationDataOption: Option[BackendInitializationData] + ): List[Any] = BackendDockerConfiguration.build(configurationDescriptor.backendConfig).dockerCredentials.toList - } } diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala index 99e81fbf8b2..794602887e4 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardSyncExecutionActor.scala @@ -13,12 +13,12 @@ import scala.concurrent.{Future, Promise} import scala.util.control.NoStackTrace trait StandardSyncExecutionActorParams extends StandardJobExecutionActorParams { + /** The class for creating an async backend. */ def asyncJobExecutionActorClass: Class[_ <: StandardAsyncExecutionActor] } -case class DefaultStandardSyncExecutionActorParams -( +case class DefaultStandardSyncExecutionActorParams( override val jobIdKey: String, override val serviceRegistryActor: ActorRef, override val ioActor: ActorRef, @@ -52,7 +52,7 @@ case class DefaultStandardSyncExecutionActorParams * - Asynchronous actor completes the promise with a success or failure. */ class StandardSyncExecutionActor(val standardParams: StandardSyncExecutionActorParams) - extends BackendJobExecutionActor { + extends BackendJobExecutionActor { override val jobDescriptor: BackendJobDescriptor = standardParams.jobDescriptor override val configurationDescriptor: BackendConfigurationDescriptor = standardParams.configurationDescriptor @@ -61,10 +61,9 @@ class StandardSyncExecutionActor(val standardParams: StandardSyncExecutionActorP context.become(startup orElse receive) - private def startup: Receive = { - case AbortJobCommand => - context.parent ! JobAbortedResponse(jobDescriptor.key) - context.stop(self) + private def startup: Receive = { case AbortJobCommand => + context.parent ! JobAbortedResponse(jobDescriptor.key) + context.stop(self) } private def running(executor: ActorRef): Receive = { @@ -78,7 +77,7 @@ class StandardSyncExecutionActor(val standardParams: StandardSyncExecutionActorP completionPromise.tryFailure(e) throw new RuntimeException(s"Failure attempting to look up job id for key ${jobDescriptor.key}", e) } - + private def recovering(executor: ActorRef): Receive = running(executor).orElse { case KvPair(key, jobId) if key.key == jobIdKey => // Successful operation ID lookup. @@ -131,20 +130,17 @@ class StandardSyncExecutionActor(val standardParams: StandardSyncExecutionActorP serviceRegistryActor ! kvGet completionPromise.future } - - override def recover: Future[BackendJobExecutionResponse] = { + + override def recover: Future[BackendJobExecutionResponse] = onRestart(recovering) - } - - override def reconnectToAborting: Future[BackendJobExecutionResponse] = { + + override def reconnectToAborting: Future[BackendJobExecutionResponse] = onRestart(reconnectingToAbort) - } - override def reconnect: Future[BackendJobExecutionResponse] = { + override def reconnect: Future[BackendJobExecutionResponse] = onRestart(reconnecting) - } - def createAsyncParams(): StandardAsyncExecutionActorParams = { + def createAsyncParams(): StandardAsyncExecutionActorParams = DefaultStandardAsyncExecutionActorParams( standardParams.jobIdKey, standardParams.serviceRegistryActor, @@ -156,16 +152,14 @@ class StandardSyncExecutionActor(val standardParams: StandardSyncExecutionActorP completionPromise, standardParams.minimumRuntimeSettings ) - } def createAsyncProps(): Props = { val asyncParams = createAsyncParams() Props(standardParams.asyncJobExecutionActorClass, asyncParams) } - def createAsyncRefName(): String = { + def createAsyncRefName(): String = standardParams.asyncJobExecutionActorClass.getSimpleName - } def createAsyncRef(): ActorRef = { val props = createAsyncProps().withDispatcher(Dispatcher.BackendDispatcher) @@ -173,16 +167,18 @@ class StandardSyncExecutionActor(val standardParams: StandardSyncExecutionActorP context.actorOf(props, name) } - override def abort(): Unit = { + override def abort(): Unit = throw new UnsupportedOperationException("Abort is implemented via a custom receive of the message AbortJobCommand.") - } // Supervision strategy: if the async actor throws an exception, stop the actor and fail the job. - def jobFailingDecider: Decider = { - case exception: Exception => - completionPromise.tryFailure( - new RuntimeException(s"${createAsyncRefName()} failed and didn't catch its exception. This condition has been handled and the job will be marked as failed.", exception) with NoStackTrace) - Stop + def jobFailingDecider: Decider = { case exception: Exception => + completionPromise.tryFailure( + new RuntimeException( + s"${createAsyncRefName()} failed and didn't catch its exception. This condition has been handled and the job will be marked as failed.", + exception + ) with NoStackTrace + ) + Stop } override val supervisorStrategy: OneForOneStrategy = OneForOneStrategy()(jobFailingDecider) diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala b/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala index 4160d9a8522..77f890b4a19 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilder.scala @@ -17,8 +17,7 @@ import cromwell.backend.validation._ */ object StandardValidatedRuntimeAttributesBuilder { - private case class StandardValidatedRuntimeAttributesBuilderImpl - ( + private case class StandardValidatedRuntimeAttributesBuilderImpl( override val requiredValidations: Seq[RuntimeAttributesValidation[_]], override val customValidations: Seq[RuntimeAttributesValidation[_]] ) extends StandardValidatedRuntimeAttributesBuilder @@ -41,8 +40,8 @@ object StandardValidatedRuntimeAttributesBuilder { } private def withValidations(builder: StandardValidatedRuntimeAttributesBuilder, - customValidations: Seq[RuntimeAttributesValidation[_]]): - StandardValidatedRuntimeAttributesBuilder = { + customValidations: Seq[RuntimeAttributesValidation[_]] + ): StandardValidatedRuntimeAttributesBuilder = { val required = builder.requiredValidations val custom = builder.customValidations ++ customValidations StandardValidatedRuntimeAttributesBuilderImpl(custom, required) @@ -50,19 +49,18 @@ object StandardValidatedRuntimeAttributesBuilder { } sealed trait StandardValidatedRuntimeAttributesBuilder extends ValidatedRuntimeAttributesBuilder { + /** * Returns a new builder with the additional validation(s). * * @param validation Additional validation. * @return New builder with the validation. */ - final def withValidation(validation: RuntimeAttributesValidation[_]*): - StandardValidatedRuntimeAttributesBuilder = { + final def withValidation(validation: RuntimeAttributesValidation[_]*): StandardValidatedRuntimeAttributesBuilder = StandardValidatedRuntimeAttributesBuilder.withValidations(this, validation) - } /** Returns all the validations, those required for the standard backend, plus custom addons for the subclass. */ - override final lazy val validations: Seq[RuntimeAttributesValidation[_]] = requiredValidations ++ customValidations + final override lazy val validations: Seq[RuntimeAttributesValidation[_]] = requiredValidations ++ customValidations private[standard] def requiredValidations: Seq[RuntimeAttributesValidation[_]] diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/BlacklistCache.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/BlacklistCache.scala index ff3248123ff..fa5207c99c6 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/BlacklistCache.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/BlacklistCache.scala @@ -11,19 +11,20 @@ case object UntestedCacheResult extends BlacklistStatus sealed abstract class BlacklistCache(bucketCacheConfig: CacheConfig, hitCacheConfig: CacheConfig, - val name: Option[String]) { + val name: Option[String] +) { val bucketCache = { // Queries to the bucket blacklist cache return UntestedCacheResult by default. val unknownLoader = new CacheLoader[String, BlacklistStatus]() { override def load(key: String): BlacklistStatus = UntestedCacheResult } - CacheBuilder. - newBuilder(). - concurrencyLevel(bucketCacheConfig.concurrency). - maximumSize(bucketCacheConfig.size). - expireAfterWrite(bucketCacheConfig.ttl.length, bucketCacheConfig.ttl.unit). - build[String, BlacklistStatus](unknownLoader) + CacheBuilder + .newBuilder() + .concurrencyLevel(bucketCacheConfig.concurrency) + .maximumSize(bucketCacheConfig.size) + .expireAfterWrite(bucketCacheConfig.ttl.length, bucketCacheConfig.ttl.unit) + .build[String, BlacklistStatus](unknownLoader) } val hitCache = { @@ -32,12 +33,12 @@ sealed abstract class BlacklistCache(bucketCacheConfig: CacheConfig, override def load(key: CallCachingEntryId): BlacklistStatus = UntestedCacheResult } - CacheBuilder. - newBuilder(). - concurrencyLevel(hitCacheConfig.concurrency). - maximumSize(hitCacheConfig.size). - expireAfterWrite(hitCacheConfig.ttl.length, hitCacheConfig.ttl.unit). - build[CallCachingEntryId, BlacklistStatus](unknownLoader) + CacheBuilder + .newBuilder() + .concurrencyLevel(hitCacheConfig.concurrency) + .maximumSize(hitCacheConfig.size) + .expireAfterWrite(hitCacheConfig.ttl.length, hitCacheConfig.ttl.unit) + .build[CallCachingEntryId, BlacklistStatus](unknownLoader) } def getBlacklistStatus(hit: CallCachingEntryId): BlacklistStatus = hitCache.get(hit) @@ -53,8 +54,8 @@ sealed abstract class BlacklistCache(bucketCacheConfig: CacheConfig, def whitelist(bucket: String): Unit = bucketCache.put(bucket, GoodCacheResult) } -class RootWorkflowBlacklistCache(bucketCacheConfig: CacheConfig, hitCacheConfig: CacheConfig) extends - BlacklistCache(bucketCacheConfig = bucketCacheConfig, hitCacheConfig = hitCacheConfig, name = None) +class RootWorkflowBlacklistCache(bucketCacheConfig: CacheConfig, hitCacheConfig: CacheConfig) + extends BlacklistCache(bucketCacheConfig = bucketCacheConfig, hitCacheConfig = hitCacheConfig, name = None) -class GroupingBlacklistCache(bucketCacheConfig: CacheConfig, hitCacheConfig: CacheConfig, val group: String) extends - BlacklistCache(bucketCacheConfig = bucketCacheConfig, hitCacheConfig = hitCacheConfig, name = Option(group)) +class GroupingBlacklistCache(bucketCacheConfig: CacheConfig, hitCacheConfig: CacheConfig, val group: String) + extends BlacklistCache(bucketCacheConfig = bucketCacheConfig, hitCacheConfig = hitCacheConfig, name = Option(group)) diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManager.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManager.scala index a22631aeeb4..f1c6a988021 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManager.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManager.scala @@ -47,7 +47,7 @@ class CallCachingBlacklistManager(rootConfig: Config, logger: LoggingAdapter) { import CallCachingBlacklistManager.Defaults.Groupings._ for { _ <- blacklistGroupingWorkflowOptionKey - groupingsOption = rootConfig.as[Option[Config]] ("call-caching.blacklist-cache.groupings") + groupingsOption = rootConfig.as[Option[Config]]("call-caching.blacklist-cache.groupings") conf = CacheConfig.config(groupingsOption, defaultConcurrency = Concurrency, defaultSize = Size, defaultTtl = Ttl) } yield conf } @@ -74,7 +74,10 @@ class CallCachingBlacklistManager(rootConfig: Config, logger: LoggingAdapter) { // If configuration allows, build a cache of blacklist groupings to BlacklistCaches. private val blacklistGroupingsCache: Option[LoadingCache[String, BlacklistCache]] = { - def buildBlacklistGroupingsCache(groupingConfig: CacheConfig, bucketConfig: CacheConfig, hitConfig: CacheConfig): LoadingCache[String, BlacklistCache] = { + def buildBlacklistGroupingsCache(groupingConfig: CacheConfig, + bucketConfig: CacheConfig, + hitConfig: CacheConfig + ): LoadingCache[String, BlacklistCache] = { val emptyBlacklistCacheLoader = new CacheLoader[String, BlacklistCache]() { override def load(key: String): BlacklistCache = new GroupingBlacklistCache( bucketCacheConfig = bucketConfig, @@ -83,12 +86,12 @@ class CallCachingBlacklistManager(rootConfig: Config, logger: LoggingAdapter) { ) } - CacheBuilder. - newBuilder(). - concurrencyLevel(groupingConfig.concurrency). - maximumSize(groupingConfig.size). - expireAfterWrite(groupingConfig.ttl.length, groupingConfig.ttl.unit). - build[String, BlacklistCache](emptyBlacklistCacheLoader) + CacheBuilder + .newBuilder() + .concurrencyLevel(groupingConfig.concurrency) + .maximumSize(groupingConfig.size) + .expireAfterWrite(groupingConfig.ttl.length, groupingConfig.ttl.unit) + .build[String, BlacklistCache](emptyBlacklistCacheLoader) } for { @@ -121,8 +124,13 @@ class CallCachingBlacklistManager(rootConfig: Config, logger: LoggingAdapter) { val maybeCache = groupBlacklistCache orElse rootWorkflowBlacklistCache maybeCache collect { case group: GroupingBlacklistCache => - logger.info("Workflow {} using group blacklist cache '{}' containing blacklist status for {} hits and {} buckets.", - workflow.id, group.group, group.hitCache.size(), group.bucketCache.size()) + logger.info( + "Workflow {} using group blacklist cache '{}' containing blacklist status for {} hits and {} buckets.", + workflow.id, + group.group, + group.hitCache.size(), + group.bucketCache.size() + ) case _: RootWorkflowBlacklistCache => logger.info("Workflow {} using root workflow blacklist cache.", workflow.id) } diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/CopyingActorBlacklistCacheSupport.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/CopyingActorBlacklistCacheSupport.scala index 8ad88ae4f49..d90bba7ee90 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/CopyingActorBlacklistCacheSupport.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/CopyingActorBlacklistCacheSupport.scala @@ -4,7 +4,6 @@ import cromwell.backend.BackendCacheHitCopyingActor.CopyOutputsCommand import cromwell.core.io.{IoCommand, IoCopyCommand} import cromwell.services.CallCaching.CallCachingEntryId - object CopyingActorBlacklistCacheSupport { trait HasFormatting { def metricFormat: String = getClass.getName.toLowerCase.split('$').last @@ -52,13 +51,12 @@ trait CopyingActorBlacklistCacheSupport { } def publishBlacklistMetric(verb: Verb, entityType: EntityType, value: BlacklistStatus): Unit = { - val metricPath = NonEmptyList.of( - "job", - "callcaching", "blacklist", verb.metricFormat, entityType.metricFormat, value.toString) + val metricPath = + NonEmptyList.of("job", "callcaching", "blacklist", verb.metricFormat, entityType.metricFormat, value.toString) increment(metricPath) } - def blacklistAndMetricHit(blacklistCache: BlacklistCache, hit: CallCachingEntryId): Unit = { + def blacklistAndMetricHit(blacklistCache: BlacklistCache, hit: CallCachingEntryId): Unit = blacklistCache.getBlacklistStatus(hit) match { case UntestedCacheResult => blacklistCache.blacklist(hit) @@ -71,13 +69,13 @@ trait CopyingActorBlacklistCacheSupport { // mark the hit as BadCacheResult and log this strangeness. log.warning( "Cache hit {} found in GoodCacheResult blacklist state, but cache hit copying has failed for permissions reasons. Overwriting status to BadCacheResult state.", - hit.id) + hit.id + ) blacklistCache.blacklist(hit) publishBlacklistMetric(Write, Hit, value = BadCacheResult) } - } - def blacklistAndMetricBucket(blacklistCache: BlacklistCache, bucket: String): Unit = { + def blacklistAndMetricBucket(blacklistCache: BlacklistCache, bucket: String): Unit = blacklistCache.getBlacklistStatus(bucket) match { case UntestedCacheResult => blacklistCache.blacklist(bucket) @@ -90,13 +88,13 @@ trait CopyingActorBlacklistCacheSupport { // mark the bucket as BadCacheResult and log this strangeness. log.warning( "Bucket {} found in GoodCacheResult blacklist state, but cache hit copying has failed for permissions reasons. Overwriting status to BadCacheResult state.", - bucket) + bucket + ) blacklistCache.blacklist(bucket) publishBlacklistMetric(Write, Bucket, value = BadCacheResult) } - } - def whitelistAndMetricHit(blacklistCache: BlacklistCache, hit: CallCachingEntryId): Unit = { + def whitelistAndMetricHit(blacklistCache: BlacklistCache, hit: CallCachingEntryId): Unit = blacklistCache.getBlacklistStatus(hit) match { case UntestedCacheResult => blacklistCache.whitelist(hit) @@ -107,11 +105,11 @@ trait CopyingActorBlacklistCacheSupport { // Don't overwrite this to GoodCacheResult, hopefully there are less weird cache hits out there. log.warning( "Cache hit {} found in BadCacheResult blacklist state, not overwriting to GoodCacheResult despite successful copy.", - hit.id) + hit.id + ) } - } - def whitelistAndMetricBucket(blacklistCache: BlacklistCache, bucket: String): Unit = { + def whitelistAndMetricBucket(blacklistCache: BlacklistCache, bucket: String): Unit = blacklistCache.getBlacklistStatus(bucket) match { case UntestedCacheResult => blacklistCache.whitelist(bucket) @@ -122,11 +120,11 @@ trait CopyingActorBlacklistCacheSupport { // of a successful copy. Don't overwrite this to GoodCacheResult, hopefully there are less weird cache hits out there. log.warning( "Bucket {} found in BadCacheResult blacklist state, not overwriting to GoodCacheResult despite successful copy.", - bucket) + bucket + ) } - } - def publishBlacklistReadMetrics(command: CopyOutputsCommand, cacheHit: CallCachingEntryId, cacheReadType: Product) = { + def publishBlacklistReadMetrics(command: CopyOutputsCommand, cacheHit: CallCachingEntryId, cacheReadType: Product) = for { c <- standardParams.blacklistCache hitBlacklistStatus = c.getBlacklistStatus(cacheHit) @@ -139,7 +137,6 @@ trait CopyingActorBlacklistCacheSupport { bucketBlacklistStatus = c.getBlacklistStatus(prefix) _ = publishBlacklistMetric(Read, Bucket, bucketBlacklistStatus) } yield () - } def isSourceBlacklisted(command: CopyOutputsCommand): Boolean = { val path = sourcePathFromCopyOutputsCommand(command) @@ -150,10 +147,9 @@ trait CopyingActorBlacklistCacheSupport { } yield value == BadCacheResult).getOrElse(false) } - def isSourceBlacklisted(hit: CallCachingEntryId): Boolean = { + def isSourceBlacklisted(hit: CallCachingEntryId): Boolean = (for { cache <- standardParams.blacklistCache value = cache.getBlacklistStatus(hit) } yield value == BadCacheResult).getOrElse(false) - } } diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala index c3d1452660e..3ab77bdf778 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala @@ -11,8 +11,10 @@ import cromwell.core.WorkflowId import cromwell.core.actor.RobustClientHelper.RequestTimeout import cromwell.core.io._ - -class RootWorkflowFileHashCacheActor private[callcaching](override val ioActor: ActorRef, workflowId: WorkflowId) extends Actor with ActorLogging with IoClientHelper { +class RootWorkflowFileHashCacheActor private[callcaching] (override val ioActor: ActorRef, workflowId: WorkflowId) + extends Actor + with ActorLogging + with IoClientHelper { case class FileHashRequester(replyTo: ActorRef, fileHashContext: FileHashContext, ioCommand: IoCommand[_]) sealed trait FileHashValue @@ -25,8 +27,9 @@ class RootWorkflowFileHashCacheActor private[callcaching](override val ioActor: // Hashing failed. case class FileHashFailure(error: String) extends FileHashValue - val cache: LoadingCache[String, FileHashValue] = CacheBuilder.newBuilder().build( - new CacheLoader[String, FileHashValue] { + val cache: LoadingCache[String, FileHashValue] = CacheBuilder + .newBuilder() + .build(new CacheLoader[String, FileHashValue] { override def load(key: String): FileHashValue = FileHashValueNotRequested }) @@ -66,44 +69,49 @@ class RootWorkflowFileHashCacheActor private[callcaching](override val ioActor: requesters foreach { case FileHashRequester(replyTo, fileHashContext, ioCommand) => replyTo ! Tuple2(fileHashContext, IoFailure(ioCommand, failure.failure)) } - cache.put(hashContext.file, FileHashFailure(s"Error hashing file '${hashContext.file}': ${failure.failure.getMessage}")) + cache.put(hashContext.file, + FileHashFailure(s"Error hashing file '${hashContext.file}': ${failure.failure.getMessage}") + ) } case other => log.warning(s"Root workflow file hash caching actor received unexpected message: $other") } // Invoke the supplied block on the happy path, handle unexpected states for IoSuccess and IoFailure with common code. - private def handleHashResult(ioAck: IoAck[_], fileHashContext: FileHashContext) - (notifyRequestersAndCacheValue: List[FileHashRequester] => Unit): Unit = { + private def handleHashResult(ioAck: IoAck[_], fileHashContext: FileHashContext)( + notifyRequestersAndCacheValue: List[FileHashRequester] => Unit + ): Unit = cache.get(fileHashContext.file) match { case FileHashValueRequested(requesters) => notifyRequestersAndCacheValue(requesters.toList) case FileHashValueNotRequested => log.info(msgIoAckWithNoRequesters.format(fileHashContext.file)) notifyRequestersAndCacheValue(List.empty[FileHashRequester]) case _ => - // IoAck arrived when hash result is already saved in cache. This is a result of benign race condition. - // No further action is required. + // IoAck arrived when hash result is already saved in cache. This is a result of benign race condition. + // No further action is required. } - } - override protected def onTimeout(message: Any, to: ActorRef): Unit = { + override protected def onTimeout(message: Any, to: ActorRef): Unit = message match { case (fileHashContext: FileHashContext, _) => // Send this message to all requestors. cache.get(fileHashContext.file) match { case FileHashValueRequested(requesters) => - requesters.toList foreach { case FileHashRequester(replyTo, requestContext, ioCommand) => replyTo ! RequestTimeout(Tuple2(requestContext, ioCommand), replyTo) } + requesters.toList foreach { case FileHashRequester(replyTo, requestContext, ioCommand) => + replyTo ! RequestTimeout(Tuple2(requestContext, ioCommand), replyTo) + } // Allow for the possibility of trying again on a timeout. cache.put(fileHashContext.file, FileHashValueNotRequested) case FileHashValueNotRequested => - // Due to race condition, timeout came after the actual response. This is fine and no further action required. + // Due to race condition, timeout came after the actual response. This is fine and no further action required. case v => log.info(msgTimeoutAfterIoAck.format(v, fileHashContext.file)) } case other => - log.error(s"Programmer error! Root workflow file hash caching actor received unexpected timeout message: $other") + log.error( + s"Programmer error! Root workflow file hash caching actor received unexpected timeout message: $other" + ) } - } override def preRestart(reason: Throwable, message: Option[Any]): Unit = { log.error(reason, s"RootWorkflowFileHashCacheActor for workflow '$workflowId' is unexpectedly being restarted") @@ -121,5 +129,7 @@ object RootWorkflowFileHashCacheActor { case class IoHashCommandWithContext(ioHashCommand: IoHashCommand, fileHashContext: FileHashContext) - def props(ioActor: ActorRef, workflowId: WorkflowId): Props = Props(new RootWorkflowFileHashCacheActor(ioActor, workflowId)) + def props(ioActor: ActorRef, workflowId: WorkflowId): Props = Props( + new RootWorkflowFileHashCacheActor(ioActor, workflowId) + ) } diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala index f1662db858b..3c052f69f8e 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardCacheHitCopyingActor.scala @@ -9,7 +9,12 @@ import cromwell.backend.io.JobPaths import cromwell.backend.standard.StandardCachingActorHelper import cromwell.backend.standard.callcaching.CopyingActorBlacklistCacheSupport._ import cromwell.backend.standard.callcaching.StandardCacheHitCopyingActor._ -import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendJobDescriptor, MetricableCacheCopyErrorCategory} +import cromwell.backend.{ + BackendConfigurationDescriptor, + BackendInitializationData, + BackendJobDescriptor, + MetricableCacheCopyErrorCategory +} import cromwell.core.CallOutputs import cromwell.core.io._ import cromwell.core.logging.JobLogging @@ -46,8 +51,7 @@ trait StandardCacheHitCopyingActorParams { } /** A default implementation of the cache hit copying params. */ -case class DefaultStandardCacheHitCopyingActorParams -( +case class DefaultStandardCacheHitCopyingActorParams( override val jobDescriptor: BackendJobDescriptor, override val backendInitializationDataOption: Option[BackendInitializationData], override val serviceRegistryActor: ActorRef, @@ -81,32 +85,33 @@ object StandardCacheHitCopyingActor { newDetritus: DetritusMap, cacheHit: CallCachingEntryId, returnCode: Option[Int] - ) { + ) { /** * Removes the command from commandsToWaitFor * returns a pair of the new state data and CommandSetState giving information about what to do next */ - def commandComplete(command: IoCommand[_]): (StandardCacheHitCopyingActorData, CommandSetState) = commandsToWaitFor match { - // If everything was already done send back current data and AllCommandsDone - case Nil => (this, AllCommandsDone) - case lastSubset :: Nil => - val updatedSubset = lastSubset - command - // If the last subset is now empty, we're done - if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = List.empty), AllCommandsDone) - // otherwise update commandsToWaitFor and keep waiting - else (this.copy(commandsToWaitFor = List(updatedSubset)), StillWaiting) - case currentSubset :: otherSubsets => - val updatedSubset = currentSubset - command - // This subset is done but there are other ones, remove it from commandsToWaitFor and return the next round of commands - if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = otherSubsets), NextSubSet(otherSubsets.head)) - // otherwise update the head subset and keep waiting - else (this.copy(commandsToWaitFor = List(updatedSubset) ++ otherSubsets), StillWaiting) - } + def commandComplete(command: IoCommand[_]): (StandardCacheHitCopyingActorData, CommandSetState) = + commandsToWaitFor match { + // If everything was already done send back current data and AllCommandsDone + case Nil => (this, AllCommandsDone) + case lastSubset :: Nil => + val updatedSubset = lastSubset - command + // If the last subset is now empty, we're done + if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = List.empty), AllCommandsDone) + // otherwise update commandsToWaitFor and keep waiting + else (this.copy(commandsToWaitFor = List(updatedSubset)), StillWaiting) + case currentSubset :: otherSubsets => + val updatedSubset = currentSubset - command + // This subset is done but there are other ones, remove it from commandsToWaitFor and return the next round of commands + if (updatedSubset.isEmpty) (this.copy(commandsToWaitFor = otherSubsets), NextSubSet(otherSubsets.head)) + // otherwise update the head subset and keep waiting + else (this.copy(commandsToWaitFor = List(updatedSubset) ++ otherSubsets), StillWaiting) + } } // Internal ADT to keep track of command set states - private[callcaching] sealed trait CommandSetState + sealed private[callcaching] trait CommandSetState private[callcaching] case object StillWaiting extends CommandSetState private[callcaching] case object AllCommandsDone extends CommandSetState private[callcaching] case class NextSubSet(commands: Set[IoCommand[_]]) extends CommandSetState @@ -114,17 +119,23 @@ object StandardCacheHitCopyingActor { private val BucketRegex: Regex = "^gs://([^/]+).*".r } -class DefaultStandardCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams) +class DefaultStandardCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) + extends StandardCacheHitCopyingActor(standardParams) /** * Standard implementation of a BackendCacheHitCopyingActor. */ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHitCopyingActorParams) - extends FSM[StandardCacheHitCopyingActorState, Option[StandardCacheHitCopyingActorData]] - with JobLogging with StandardCachingActorHelper with IoClientHelper with CromwellInstrumentationActor with CopyingActorBlacklistCacheSupport { + extends FSM[StandardCacheHitCopyingActorState, Option[StandardCacheHitCopyingActorData]] + with JobLogging + with StandardCachingActorHelper + with IoClientHelper + with CromwellInstrumentationActor + with CopyingActorBlacklistCacheSupport { override lazy val jobDescriptor: BackendJobDescriptor = standardParams.jobDescriptor - override lazy val backendInitializationDataOption: Option[BackendInitializationData] = standardParams.backendInitializationDataOption + override lazy val backendInitializationDataOption: Option[BackendInitializationData] = + standardParams.backendInitializationDataOption override lazy val serviceRegistryActor: ActorRef = standardParams.serviceRegistryActor override lazy val configurationDescriptor: BackendConfigurationDescriptor = standardParams.configurationDescriptor protected val commandBuilder: IoCommandBuilder = DefaultIoCommandBuilder @@ -142,78 +153,78 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit /** Override this method if you want to provide an alternative way to duplicate files than copying them. */ protected def duplicate(copyPairs: Set[PathPair]): Option[Try[Unit]] = None - when(Idle) { - case Event(command @ CopyOutputsCommand(simpletons, jobDetritus, cacheHit, returnCode), None) => - val (nextState, cacheReadType) = - if (isSourceBlacklisted(cacheHit)) { - // We don't want to log this because blacklisting is a common and expected occurrence. - (failAndStop(BlacklistSkip(MetricableCacheCopyErrorCategory.HitBlacklisted)), ReadHitOnly) - } else if (isSourceBlacklisted(command)) { - // We don't want to log this because blacklisting is a common and expected occurrence. - (failAndStop(BlacklistSkip(MetricableCacheCopyErrorCategory.BucketBlacklisted)), ReadHitAndBucket) - } else { - // Try to make a Path of the callRootPath from the detritus - val next = lookupSourceCallRootPath(jobDetritus) match { - case Success(sourceCallRootPath) => - - // process simpletons and detritus to get updated paths and corresponding IoCommands - val processed = for { - (destinationCallOutputs, simpletonIoCommands) <- processSimpletons(simpletons, sourceCallRootPath) - (destinationDetritus, detritusIoCommands) <- processDetritus(jobDetritus) - } yield (destinationCallOutputs, destinationDetritus, simpletonIoCommands ++ detritusIoCommands) - - processed match { - case Success((destinationCallOutputs, destinationDetritus, detritusAndOutputsIoCommands)) => - duplicate(ioCommandsToCopyPairs(detritusAndOutputsIoCommands)) match { - // Use the duplicate override if exists - case Some(Success(_)) => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus) - case Some(Failure(failure)) => - // Something went wrong in the custom duplication code. We consider this loggable because it's most likely a user-permission error: - failAndStop(CopyAttemptError(failure)) - // Otherwise send the first round of IoCommands (file outputs and detritus) if any - case None if detritusAndOutputsIoCommands.nonEmpty => - detritusAndOutputsIoCommands foreach sendIoCommand - - // Add potential additional commands to the list - val additionalCommandsTry = - additionalIoCommands( - sourceCallRootPath = sourceCallRootPath, - originalSimpletons = simpletons, - newOutputs = destinationCallOutputs, - originalDetritus = jobDetritus, - newDetritus = destinationDetritus, - ) - additionalCommandsTry match { - case Success(additionalCommands) => - val allCommands = List(detritusAndOutputsIoCommands) ++ additionalCommands - goto(WaitingForIoResponses) using - Option(StandardCacheHitCopyingActorData( + when(Idle) { case Event(command @ CopyOutputsCommand(simpletons, jobDetritus, cacheHit, returnCode), None) => + val (nextState, cacheReadType) = + if (isSourceBlacklisted(cacheHit)) { + // We don't want to log this because blacklisting is a common and expected occurrence. + (failAndStop(BlacklistSkip(MetricableCacheCopyErrorCategory.HitBlacklisted)), ReadHitOnly) + } else if (isSourceBlacklisted(command)) { + // We don't want to log this because blacklisting is a common and expected occurrence. + (failAndStop(BlacklistSkip(MetricableCacheCopyErrorCategory.BucketBlacklisted)), ReadHitAndBucket) + } else { + // Try to make a Path of the callRootPath from the detritus + val next = lookupSourceCallRootPath(jobDetritus) match { + case Success(sourceCallRootPath) => + // process simpletons and detritus to get updated paths and corresponding IoCommands + val processed = for { + (destinationCallOutputs, simpletonIoCommands) <- processSimpletons(simpletons, sourceCallRootPath) + (destinationDetritus, detritusIoCommands) <- processDetritus(jobDetritus) + } yield (destinationCallOutputs, destinationDetritus, simpletonIoCommands ++ detritusIoCommands) + + processed match { + case Success((destinationCallOutputs, destinationDetritus, detritusAndOutputsIoCommands)) => + duplicate(ioCommandsToCopyPairs(detritusAndOutputsIoCommands)) match { + // Use the duplicate override if exists + case Some(Success(_)) => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus) + case Some(Failure(failure)) => + // Something went wrong in the custom duplication code. We consider this loggable because it's most likely a user-permission error: + failAndStop(CopyAttemptError(failure)) + // Otherwise send the first round of IoCommands (file outputs and detritus) if any + case None if detritusAndOutputsIoCommands.nonEmpty => + detritusAndOutputsIoCommands foreach sendIoCommand + + // Add potential additional commands to the list + val additionalCommandsTry = + additionalIoCommands( + sourceCallRootPath = sourceCallRootPath, + originalSimpletons = simpletons, + newOutputs = destinationCallOutputs, + originalDetritus = jobDetritus, + newDetritus = destinationDetritus + ) + additionalCommandsTry match { + case Success(additionalCommands) => + val allCommands = List(detritusAndOutputsIoCommands) ++ additionalCommands + goto(WaitingForIoResponses) using + Option( + StandardCacheHitCopyingActorData( commandsToWaitFor = allCommands, newJobOutputs = destinationCallOutputs, newDetritus = destinationDetritus, cacheHit = cacheHit, - returnCode = returnCode, - )) - // Something went wrong in generating duplication commands. - // We consider this a loggable error because we don't expect this to happen: - case Failure(failure) => failAndStop(CopyAttemptError(failure)) - } - case _ => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus) - } - - // Something went wrong in generating duplication commands. We consider this loggable error because we don't expect this to happen: - case Failure(failure) => failAndStop(CopyAttemptError(failure)) - } - - // Something went wrong in looking up the call root... loggable because we don't expect this to happen: - case Failure(failure) => failAndStop(CopyAttemptError(failure)) - } - (next, ReadHitAndBucket) + returnCode = returnCode + ) + ) + // Something went wrong in generating duplication commands. + // We consider this a loggable error because we don't expect this to happen: + case Failure(failure) => failAndStop(CopyAttemptError(failure)) + } + case _ => succeedAndStop(returnCode, destinationCallOutputs, destinationDetritus) + } + + // Something went wrong in generating duplication commands. We consider this loggable error because we don't expect this to happen: + case Failure(failure) => failAndStop(CopyAttemptError(failure)) + } + + // Something went wrong in looking up the call root... loggable because we don't expect this to happen: + case Failure(failure) => failAndStop(CopyAttemptError(failure)) } + (next, ReadHitAndBucket) + } - publishBlacklistReadMetrics(command, cacheHit, cacheReadType) + publishBlacklistReadMetrics(command, cacheHit, cacheReadType) - nextState + nextState } when(WaitingForIoResponses) { @@ -293,7 +304,7 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit _ = blacklistAndMetricHit(cache, data.cacheHit) prefix <- extractBlacklistPrefix(path) _ = blacklistAndMetricBucket(cache, prefix) - } yield() + } yield () andThen } @@ -309,8 +320,18 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit def succeedAndStop(returnCode: Option[Int], copiedJobOutputs: CallOutputs, detritusMap: DetritusMap): State = { import cromwell.services.metadata.MetadataService.implicits.MetadataAutoPutter - serviceRegistryActor.putMetadata(jobDescriptor.workflowDescriptor.id, Option(jobDescriptor.key), startMetadataKeyValues) - context.parent ! JobSucceededResponse(jobDescriptor.key, returnCode, copiedJobOutputs, Option(detritusMap), Seq.empty, None, resultGenerationMode = CallCached) + serviceRegistryActor.putMetadata(jobDescriptor.workflowDescriptor.id, + Option(jobDescriptor.key), + startMetadataKeyValues + ) + context.parent ! JobSucceededResponse(jobDescriptor.key, + returnCode, + copiedJobOutputs, + Option(detritusMap), + Seq.empty, + None, + resultGenerationMode = CallCached + ) context stop self stay() } @@ -323,7 +344,10 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit /** If there are no responses pending this behaves like `failAndStop`, otherwise this goes to `FailedState` and waits * for all the pending responses to come back before stopping. */ - def failAndAwaitPendingResponses(failure: CacheCopyFailure, command: IoCommand[_], data: StandardCacheHitCopyingActorData): State = { + def failAndAwaitPendingResponses(failure: CacheCopyFailure, + command: IoCommand[_], + data: StandardCacheHitCopyingActorData + ): State = { context.parent ! CopyingOutputsFailedResponse(jobDescriptor.key, standardParams.cacheCopyAttempt, failure) val (newData, commandState) = data.commandComplete(command) @@ -344,12 +368,16 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit stay() } - protected def lookupSourceCallRootPath(sourceJobDetritusFiles: Map[String, String]): Try[Path] = { + protected def lookupSourceCallRootPath(sourceJobDetritusFiles: Map[String, String]): Try[Path] = sourceJobDetritusFiles.get(JobPaths.CallRootPathKey) match { case Some(source) => getPath(source) - case None => Failure(new RuntimeException(s"${JobPaths.CallRootPathKey} wasn't found for call ${jobDescriptor.taskCall.fullyQualifiedName}")) + case None => + Failure( + new RuntimeException( + s"${JobPaths.CallRootPathKey} wasn't found for call ${jobDescriptor.taskCall.fullyQualifiedName}" + ) + ) } - } private def ioCommandsToCopyPairs(commands: Set[IoCommand[_]]): Set[PathPair] = commands collect { case copyCommand: IoCopyCommand => copyCommand.source -> copyCommand.destination @@ -358,18 +386,22 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit /** * Returns a pair of the list of simpletons with copied paths, and copy commands necessary to perform those copies. */ - protected def processSimpletons(womValueSimpletons: Seq[WomValueSimpleton], sourceCallRootPath: Path): Try[(CallOutputs, Set[IoCommand[_]])] = Try { - val (destinationSimpletons, ioCommands): (List[WomValueSimpleton], Set[IoCommand[_]]) = womValueSimpletons.toList.foldMap({ - case WomValueSimpleton(key, wdlFile: WomSingleFile) => - val sourcePath = getPath(wdlFile.value).get - val destinationPath = PathCopier.getDestinationFilePath(sourceCallRootPath, sourcePath, destinationCallRootPath) - - val destinationSimpleton = WomValueSimpleton(key, WomSingleFile(destinationPath.pathAsString)) - - // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - List(destinationSimpleton) -> Set(commandBuilder.copyCommand(sourcePath, destinationPath).get) - case nonFileSimpleton => (List(nonFileSimpleton), Set.empty[IoCommand[_]]) - }) + protected def processSimpletons(womValueSimpletons: Seq[WomValueSimpleton], + sourceCallRootPath: Path + ): Try[(CallOutputs, Set[IoCommand[_]])] = Try { + val (destinationSimpletons, ioCommands): (List[WomValueSimpleton], Set[IoCommand[_]]) = + womValueSimpletons.toList.foldMap { + case WomValueSimpleton(key, wdlFile: WomSingleFile) => + val sourcePath = getPath(wdlFile.value).get + val destinationPath = + PathCopier.getDestinationFilePath(sourceCallRootPath, sourcePath, destinationCallRootPath) + + val destinationSimpleton = WomValueSimpleton(key, WomSingleFile(destinationPath.pathAsString)) + + // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap + List(destinationSimpleton) -> Set(commandBuilder.copyCommand(sourcePath, destinationPath).get) + case nonFileSimpleton => (List(nonFileSimpleton), Set.empty[IoCommand[_]]) + } (WomValueBuilder.toJobOutputs(jobDescriptor.taskCall.outputPorts, destinationSimpletons), ioCommands) } @@ -377,7 +409,7 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit /** * Returns the file (and ONLY the file detritus) intersection between the cache hit and this call. */ - protected final def detritusFileKeys(sourceJobDetritusFiles: Map[String, String]): Set[String] = { + final protected def detritusFileKeys(sourceJobDetritusFiles: Map[String, String]): Set[String] = { val sourceKeys = sourceJobDetritusFiles.keySet val destinationKeys = destinationJobDetritusPaths.keySet sourceKeys.intersect(destinationKeys).filterNot(_ == JobPaths.CallRootPathKey) @@ -386,21 +418,22 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit /** * Returns a pair of the detritus with copied paths, and copy commands necessary to perform those copies. */ - protected def processDetritus(sourceJobDetritusFiles: Map[String, String]): Try[(Map[String, Path], Set[IoCommand[_]])] = Try { + protected def processDetritus( + sourceJobDetritusFiles: Map[String, String] + ): Try[(Map[String, Path], Set[IoCommand[_]])] = Try { val fileKeys = detritusFileKeys(sourceJobDetritusFiles) val zero = (Map.empty[String, Path], Set.empty[IoCommand[_]]) - val (destinationDetritus, ioCommands) = fileKeys.foldLeft(zero)({ - case ((detrituses, commands), detritus) => - val sourcePath = getPath(sourceJobDetritusFiles(detritus)).get - val destinationPath = destinationJobDetritusPaths(detritus) + val (destinationDetritus, ioCommands) = fileKeys.foldLeft(zero) { case ((detrituses, commands), detritus) => + val sourcePath = getPath(sourceJobDetritusFiles(detritus)).get + val destinationPath = destinationJobDetritusPaths(detritus) - val newDetrituses = detrituses + (detritus -> destinationPath) + val newDetrituses = detrituses + (detritus -> destinationPath) // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap (newDetrituses, commands + commandBuilder.copyCommand(sourcePath, destinationPath).get) - }) + } (destinationDetritus + (JobPaths.CallRootPathKey -> destinationCallRootPath), ioCommands) } @@ -412,13 +445,16 @@ abstract class StandardCacheHitCopyingActor(val standardParams: StandardCacheHit protected def additionalIoCommands(sourceCallRootPath: Path, originalSimpletons: Seq[WomValueSimpleton], newOutputs: CallOutputs, - originalDetritus: Map[String, String], - newDetritus: Map[String, Path]): Try[List[Set[IoCommand[_]]]] = Success(Nil) + originalDetritus: Map[String, String], + newDetritus: Map[String, Path] + ): Try[List[Set[IoCommand[_]]]] = Success(Nil) override protected def onTimeout(message: Any, to: ActorRef): Unit = { val exceptionMessage = message match { - case copyCommand: IoCopyCommand => s"The Cache hit copying actor timed out waiting for a response to copy ${copyCommand.source.pathAsString} to ${copyCommand.destination.pathAsString}" - case touchCommand: IoTouchCommand => s"The Cache hit copying actor timed out waiting for a response to touch ${touchCommand.file.pathAsString}" + case copyCommand: IoCopyCommand => + s"The Cache hit copying actor timed out waiting for a response to copy ${copyCommand.source.pathAsString} to ${copyCommand.destination.pathAsString}" + case touchCommand: IoTouchCommand => + s"The Cache hit copying actor timed out waiting for a response to touch ${touchCommand.file.pathAsString}" case other => s"The Cache hit copying actor timed out waiting for an unknown I/O operation: $other" } diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala index 78fdc078cb9..67199a7a062 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/StandardFileHashingActor.scala @@ -34,8 +34,7 @@ trait StandardFileHashingActorParams { } /** A default implementation of the cache hit copying params. */ -case class DefaultStandardFileHashingActorParams -( +case class DefaultStandardFileHashingActorParams( override val jobDescriptor: BackendJobDescriptor, override val backendInitializationDataOption: Option[BackendInitializationData], override val serviceRegistryActor: ActorRef, @@ -46,7 +45,8 @@ case class DefaultStandardFileHashingActorParams case class FileHashContext(hashKey: HashKey, file: String) -class DefaultStandardFileHashingActor(standardParams: StandardFileHashingActorParams) extends StandardFileHashingActor(standardParams) { +class DefaultStandardFileHashingActor(standardParams: StandardFileHashingActorParams) + extends StandardFileHashingActor(standardParams) { override val ioCommandBuilder: IoCommandBuilder = DefaultIoCommandBuilder } @@ -54,14 +54,20 @@ object StandardFileHashingActor { case class FileHashingFunction(work: (SingleFileHashRequest, LoggingAdapter) => Try[String]) sealed trait BackendSpecificHasherCommand { def jobKey: JobKey } - final case class SingleFileHashRequest(jobKey: JobKey, hashKey: HashKey, file: WomFile, initializationData: Option[BackendInitializationData]) extends BackendSpecificHasherCommand + final case class SingleFileHashRequest(jobKey: JobKey, + hashKey: HashKey, + file: WomFile, + initializationData: Option[BackendInitializationData] + ) extends BackendSpecificHasherCommand sealed trait BackendSpecificHasherResponse extends SuccessfulHashResultMessage - case class FileHashResponse(hashResult: HashResult) extends BackendSpecificHasherResponse { override def hashes = Set(hashResult) } + case class FileHashResponse(hashResult: HashResult) extends BackendSpecificHasherResponse { + override def hashes = Set(hashResult) + } } abstract class StandardFileHashingActor(standardParams: StandardFileHashingActorParams) - extends Actor + extends Actor with ActorLogging with JobLogging with IoClientHelper @@ -69,19 +75,21 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor with Timers { override lazy val ioActor: ActorRef = standardParams.ioActor override lazy val jobDescriptor: BackendJobDescriptor = standardParams.jobDescriptor - override lazy val backendInitializationDataOption: Option[BackendInitializationData] = standardParams.backendInitializationDataOption + override lazy val backendInitializationDataOption: Option[BackendInitializationData] = + standardParams.backendInitializationDataOption override lazy val serviceRegistryActor: ActorRef = standardParams.serviceRegistryActor override lazy val configurationDescriptor: BackendConfigurationDescriptor = standardParams.configurationDescriptor protected def ioCommandBuilder: IoCommandBuilder = DefaultIoCommandBuilder def customHashStrategy(fileRequest: SingleFileHashRequest): Option[Try[String]] = None - + def fileHashingReceive: Receive = { // Hash Request case fileRequest: SingleFileHashRequest => customHashStrategy(fileRequest) match { - case Some(Success(result)) => context.parent ! FileHashResponse(HashResult(fileRequest.hashKey, HashValue(result))) + case Some(Success(result)) => + context.parent ! FileHashResponse(HashResult(fileRequest.hashKey, HashValue(result))) case Some(Failure(failure)) => context.parent ! HashingFailedMessage(fileRequest.file.value, failure) case None => asyncHashing(fileRequest, context.parent) } @@ -93,7 +101,7 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor case (fileHashRequest: FileHashContext, IoSuccess(_, other)) => context.parent ! HashingFailedMessage( fileHashRequest.file, - new Exception(s"Hash function supposedly succeeded but responded with '$other' instead of a string hash"), + new Exception(s"Hash function supposedly succeeded but responded with '$other' instead of a string hash") ) // Hash Failure @@ -124,9 +132,9 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor } } - override def receive: Receive = ioReceive orElse fileHashingReceive + override def receive: Receive = ioReceive orElse fileHashingReceive - override protected def onTimeout(message: Any, to: ActorRef): Unit = { + override protected def onTimeout(message: Any, to: ActorRef): Unit = message match { case (_, ioHashCommand: IoHashCommand) => val fileAsString = ioHashCommand.file.pathAsString @@ -137,5 +145,4 @@ abstract class StandardFileHashingActor(standardParams: StandardFileHashingActor log.warning(s"Async File hashing actor received unexpected timeout message: $other") context.parent ! HashingServiceUnvailable } - } } diff --git a/backend/src/main/scala/cromwell/backend/standard/package.scala b/backend/src/main/scala/cromwell/backend/standard/package.scala index 089efd4f614..b25d74ec8e7 100644 --- a/backend/src/main/scala/cromwell/backend/standard/package.scala +++ b/backend/src/main/scala/cromwell/backend/standard/package.scala @@ -12,7 +12,7 @@ package object standard { def unapply(arg: StandardAdHocValue): Option[LocalizedAdHocValue] = arg.select[LocalizedAdHocValue] } } - + // This is used to represent an AdHocValue that might have been localized type StandardAdHocValue = AdHocValue :+: LocalizedAdHocValue :+: CNil } diff --git a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala index 06bc3d56c0a..c314a660122 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCode.scala @@ -10,18 +10,18 @@ object ContinueOnReturnCode { * Decides if a call/job continues upon a specific return code. */ sealed trait ContinueOnReturnCode { + /** * Returns true if the call is a success based on the return code. * * @param returnCode Return code from the process / script. * @return True if the call is a success. */ - final def continueFor(returnCode: Int): Boolean = { + final def continueFor(returnCode: Int): Boolean = this match { case ContinueOnReturnCodeFlag(continue) => continue || returnCode == 0 case ContinueOnReturnCodeSet(returnCodes) => returnCodes.contains(returnCode) } - } } /** diff --git a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala index aaa2754f3e1..14dfb369d77 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ContinueOnReturnCodeValidation.scala @@ -24,9 +24,10 @@ import scala.util.Try */ object ContinueOnReturnCodeValidation { lazy val instance: RuntimeAttributesValidation[ContinueOnReturnCode] = new ContinueOnReturnCodeValidation - def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[ContinueOnReturnCode] = instance.withDefault( - configDefaultWdlValue(runtimeConfig) getOrElse WomInteger(0)) - def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WomValue] = instance.configDefaultWomValue(runtimeConfig) + def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[ContinueOnReturnCode] = + instance.withDefault(configDefaultWdlValue(runtimeConfig) getOrElse WomInteger(0)) + def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WomValue] = + instance.configDefaultWomValue(runtimeConfig) } class ContinueOnReturnCodeValidation extends RuntimeAttributesValidation[ContinueOnReturnCode] { @@ -40,7 +41,7 @@ class ContinueOnReturnCodeValidation extends RuntimeAttributesValidation[Continu case WomString(value) if Try(value.toBoolean).isSuccess => ContinueOnReturnCodeFlag(value.toBoolean).validNel case WomString(value) if Try(value.toInt).isSuccess => ContinueOnReturnCodeSet(Set(value.toInt)).validNel case WomInteger(value) => ContinueOnReturnCodeSet(Set(value)).validNel - case value@WomArray(_, seq) => + case value @ WomArray(_, seq) => val errorOrInts: ErrorOr[List[Int]] = (seq.toList map validateInt).sequence[ErrorOr, Int] errorOrInts match { case Valid(ints) => ContinueOnReturnCodeSet(ints.toSet).validNel @@ -54,8 +55,8 @@ class ContinueOnReturnCodeValidation extends RuntimeAttributesValidation[Continu case WomString(value) if Try(value.toBoolean).isSuccess => true case WomInteger(_) => true case WomArray(WomArrayType(WomStringType), elements) => - elements forall { - value => Try(value.valueString.toInt).isSuccess + elements forall { value => + Try(value.valueString.toInt).isSuccess } case WomArray(WomArrayType(WomIntegerType), _) => true } diff --git a/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala b/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala index b6c3fe0112b..eb8afc17c12 100644 --- a/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/DockerValidation.scala @@ -26,7 +26,7 @@ class DockerValidation extends StringRuntimeAttributesValidation(RuntimeAttribut override protected def invalidValueMessage(value: WomValue): String = super.missingValueMessage // NOTE: Docker's current test specs don't like WdlInteger, etc. auto converted to WdlString. - override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { - case WomString(value) => value.validNel + override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { case WomString(value) => + value.validNel } } diff --git a/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala b/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala index 8ac39e50d16..7b55f9657fa 100644 --- a/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/FailOnStderrValidation.scala @@ -18,9 +18,10 @@ import wom.values._ object FailOnStderrValidation { lazy val instance: RuntimeAttributesValidation[Boolean] = new FailOnStderrValidation - def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = instance.withDefault( - configDefaultWdlValue(runtimeConfig) getOrElse WomBoolean(false)) - def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WomValue] = instance.configDefaultWomValue(runtimeConfig) + def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = + instance.withDefault(configDefaultWdlValue(runtimeConfig) getOrElse WomBoolean(false)) + def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WomValue] = + instance.configDefaultWomValue(runtimeConfig) } class FailOnStderrValidation extends BooleanRuntimeAttributesValidation(RuntimeAttributesKeys.FailOnStderrKey) { diff --git a/backend/src/main/scala/cromwell/backend/validation/InformationValidation.scala b/backend/src/main/scala/cromwell/backend/validation/InformationValidation.scala index 39b9f0b3031..e8c9dd1d7b3 100644 --- a/backend/src/main/scala/cromwell/backend/validation/InformationValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/InformationValidation.scala @@ -25,18 +25,32 @@ import scala.util.{Failure, Success} * `withDefault` can be used to create a validation that defaults to a particular size. */ object InformationValidation { - def instance(attributeName: String = RuntimeAttributesKeys.MemoryKey, defaultUnit: MemoryUnit, allowZero: Boolean = false): RuntimeAttributesValidation[MemorySize] = + def instance(attributeName: String = RuntimeAttributesKeys.MemoryKey, + defaultUnit: MemoryUnit, + allowZero: Boolean = false + ): RuntimeAttributesValidation[MemorySize] = new InformationValidation(attributeName, defaultUnit, allowZero) - def optional(attributeName: String = RuntimeAttributesKeys.MemoryKey, defaultUnit: MemoryUnit, allowZero: Boolean = false): OptionalRuntimeAttributesValidation[MemorySize] = + def optional(attributeName: String = RuntimeAttributesKeys.MemoryKey, + defaultUnit: MemoryUnit, + allowZero: Boolean = false + ): OptionalRuntimeAttributesValidation[MemorySize] = instance(attributeName, defaultUnit, allowZero).optional - def configDefaultString(attributeName: String = RuntimeAttributesKeys.MemoryKey, config: Option[Config], defaultUnit: MemoryUnit, allowZero: Boolean = false): Option[String] = + def configDefaultString(attributeName: String = RuntimeAttributesKeys.MemoryKey, + config: Option[Config], + defaultUnit: MemoryUnit, + allowZero: Boolean = false + ): Option[String] = instance(attributeName, defaultUnit, allowZero).configDefaultValue(config) - def withDefaultMemory(attributeName: String = RuntimeAttributesKeys.MemoryKey, memorySize: String, defaultUnit: MemoryUnit, allowZero: Boolean = false): RuntimeAttributesValidation[MemorySize] = { + def withDefaultMemory(attributeName: String = RuntimeAttributesKeys.MemoryKey, + memorySize: String, + defaultUnit: MemoryUnit, + allowZero: Boolean = false + ): RuntimeAttributesValidation[MemorySize] = MemorySize.parse(memorySize) match { case Success(memory) => instance(attributeName, defaultUnit, allowZero).withDefault(WomLong(memory.bytes.toLong)) - case Failure(_) => instance(attributeName, defaultUnit, allowZero).withDefault(BadDefaultAttribute(WomString(memorySize.toString))) + case Failure(_) => + instance(attributeName, defaultUnit, allowZero).withDefault(BadDefaultAttribute(WomString(memorySize.toString))) } - } private[validation] val wrongAmountFormat = "Expecting %s runtime attribute value greater than 0 but got %s" @@ -44,39 +58,56 @@ object InformationValidation { "Expecting %s runtime attribute to be an Integer or String with format '8 GB'." + " Exception: %s" - private[validation] def validateString(attributeName: String, wdlString: WomString, allowZero: Boolean): ErrorOr[MemorySize] = + private[validation] def validateString(attributeName: String, + wdlString: WomString, + allowZero: Boolean + ): ErrorOr[MemorySize] = validateString(attributeName, wdlString.value, allowZero) - private[validation] def validateString(attributeName: String, value: String, allowZero: Boolean): ErrorOr[MemorySize] = { + private[validation] def validateString(attributeName: String, + value: String, + allowZero: Boolean + ): ErrorOr[MemorySize] = MemorySize.parse(value) match { - case scala.util.Success(memorySize: MemorySize) if memorySize.amount > 0 || (memorySize.amount == 0 && allowZero) => + case scala.util.Success(memorySize: MemorySize) + if memorySize.amount > 0 || (memorySize.amount == 0 && allowZero) => memorySize.to(MemoryUnit.GB).validNel case scala.util.Success(memorySize: MemorySize) => wrongAmountFormat.format(attributeName, memorySize.amount).invalidNel case scala.util.Failure(throwable) => wrongTypeFormat.format(attributeName, throwable.getMessage).invalidNel } - } - private[validation] def validateInteger(attributeName: String, wdlInteger: WomInteger, defaultUnit: MemoryUnit, allowZero: Boolean): ErrorOr[MemorySize] = + private[validation] def validateInteger(attributeName: String, + wdlInteger: WomInteger, + defaultUnit: MemoryUnit, + allowZero: Boolean + ): ErrorOr[MemorySize] = validateInteger(attributeName, wdlInteger.value, defaultUnit, allowZero) - private[validation] def validateInteger(attributeName: String, value: Int, defaultUnit: MemoryUnit, allowZero: Boolean): ErrorOr[MemorySize] = { + private[validation] def validateInteger(attributeName: String, + value: Int, + defaultUnit: MemoryUnit, + allowZero: Boolean + ): ErrorOr[MemorySize] = if (value < 0 || (value == 0 && !allowZero)) wrongAmountFormat.format(attributeName, value).invalidNel else MemorySize(value.toDouble, defaultUnit).to(MemoryUnit.GB).validNel - } - def validateLong(attributeName: String, value: Long, defaultUnit: MemoryUnit, allowZero: Boolean): ErrorOr[MemorySize] = { + def validateLong(attributeName: String, + value: Long, + defaultUnit: MemoryUnit, + allowZero: Boolean + ): ErrorOr[MemorySize] = if (value < 0 || (value == 0 && !allowZero)) wrongAmountFormat.format(attributeName, value).invalidNel else MemorySize(value.toDouble, defaultUnit).to(MemoryUnit.GB).validNel - } } -class InformationValidation(attributeName: String, defaultUnit: MemoryUnit, allowZero: Boolean = false) extends RuntimeAttributesValidation[MemorySize] { +class InformationValidation(attributeName: String, defaultUnit: MemoryUnit, allowZero: Boolean = false) + extends RuntimeAttributesValidation[MemorySize] { import InformationValidation._ diff --git a/backend/src/main/scala/cromwell/backend/validation/MaxRetriesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/MaxRetriesValidation.scala index fdfd6467a85..f2c9ba9522e 100644 --- a/backend/src/main/scala/cromwell/backend/validation/MaxRetriesValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/MaxRetriesValidation.scala @@ -21,8 +21,8 @@ object MaxRetriesValidation { lazy val instance: RuntimeAttributesValidation[Int] = new MaxRetriesValidation(MaxRetriesKey) lazy val optional: OptionalRuntimeAttributesValidation[Int] = instance.optional - def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = instance.withDefault( - configDefaultWomValue(runtimeConfig) getOrElse WomInteger(0)) + def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = + instance.withDefault(configDefaultWomValue(runtimeConfig) getOrElse WomInteger(0)) def configDefaultWomValue(config: Option[Config]): Option[WomValue] = instance.configDefaultWomValue(config) } diff --git a/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala b/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala index 32be28026f0..299e043ec8f 100644 --- a/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/MemoryValidation.scala @@ -25,16 +25,22 @@ import scala.util.{Failure, Success} object MemoryValidation { def instance(attributeName: String = RuntimeAttributesKeys.MemoryKey): RuntimeAttributesValidation[MemorySize] = new MemoryValidation(attributeName) - def optional(attributeName: String = RuntimeAttributesKeys.MemoryKey): OptionalRuntimeAttributesValidation[MemorySize] = + def optional( + attributeName: String = RuntimeAttributesKeys.MemoryKey + ): OptionalRuntimeAttributesValidation[MemorySize] = instance(attributeName).optional - def configDefaultString(attributeName: String = RuntimeAttributesKeys.MemoryKey, config: Option[Config]): Option[String] = + def configDefaultString(attributeName: String = RuntimeAttributesKeys.MemoryKey, + config: Option[Config] + ): Option[String] = instance(attributeName).configDefaultValue(config) - def withDefaultMemory(attributeName: String = RuntimeAttributesKeys.MemoryKey, memorySize: String): RuntimeAttributesValidation[MemorySize] = { + def withDefaultMemory(attributeName: String = RuntimeAttributesKeys.MemoryKey, + memorySize: String + ): RuntimeAttributesValidation[MemorySize] = MemorySize.parse(memorySize) match { case Success(memory) => instance(attributeName).withDefault(WomLong(memory.bytes.toLong)) case Failure(_) => instance(attributeName).withDefault(BadDefaultAttribute(WomString(memorySize.toString))) } - } } -class MemoryValidation(attributeName: String = RuntimeAttributesKeys.MemoryKey) extends InformationValidation(attributeName, MemoryUnit.Bytes) +class MemoryValidation(attributeName: String = RuntimeAttributesKeys.MemoryKey) + extends InformationValidation(attributeName, MemoryUnit.Bytes) diff --git a/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala index d4dc78f83b2..af41bf8ad52 100644 --- a/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/PrimitiveRuntimeAttributesValidation.scala @@ -44,24 +44,24 @@ sealed trait PrimitiveRuntimeAttributesValidation[A, B <: WomPrimitive] extends protected def validateCoercedValue(womValue: B): ErrorOr[A] } -class BooleanRuntimeAttributesValidation(override val key: String) extends - PrimitiveRuntimeAttributesValidation[Boolean, WomBoolean] { +class BooleanRuntimeAttributesValidation(override val key: String) + extends PrimitiveRuntimeAttributesValidation[Boolean, WomBoolean] { override val womType = WomBooleanType override protected def validateCoercedValue(womValue: WomBoolean): ErrorOr[Boolean] = womValue.value.validNel } -class FloatRuntimeAttributesValidation(override val key: String) extends - PrimitiveRuntimeAttributesValidation[Double, WomFloat] { +class FloatRuntimeAttributesValidation(override val key: String) + extends PrimitiveRuntimeAttributesValidation[Double, WomFloat] { override val womType = WomFloatType override protected def validateCoercedValue(womValue: WomFloat): ErrorOr[Double] = womValue.value.validNel } -class IntRuntimeAttributesValidation(override val key: String) extends - PrimitiveRuntimeAttributesValidation[Int, WomInteger] { +class IntRuntimeAttributesValidation(override val key: String) + extends PrimitiveRuntimeAttributesValidation[Int, WomInteger] { override val womType = WomIntegerType @@ -70,18 +70,19 @@ class IntRuntimeAttributesValidation(override val key: String) extends override protected def typeString: String = "an Integer" } -class PositiveIntRuntimeAttributesValidation(override val key: String) extends - PrimitiveRuntimeAttributesValidation[Int Refined Positive, WomInteger] { +class PositiveIntRuntimeAttributesValidation(override val key: String) + extends PrimitiveRuntimeAttributesValidation[Int Refined Positive, WomInteger] { override val womType = WomIntegerType - override protected def validateCoercedValue(womValue: WomInteger): ErrorOr[Int Refined Positive] = refineV[Positive](womValue.value).leftMap(NonEmptyList.one).toValidated + override protected def validateCoercedValue(womValue: WomInteger): ErrorOr[Int Refined Positive] = + refineV[Positive](womValue.value).leftMap(NonEmptyList.one).toValidated override protected def typeString: String = "an Integer" } -class StringRuntimeAttributesValidation(override val key: String) extends - PrimitiveRuntimeAttributesValidation[String, WomString] { +class StringRuntimeAttributesValidation(override val key: String) + extends PrimitiveRuntimeAttributesValidation[String, WomString] { override val womType = WomStringType diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala index d46bc7a66d5..706cc1336e8 100644 --- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala +++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala @@ -12,29 +12,32 @@ import scala.util.{Failure, Try} object RuntimeAttributesDefault { - def workflowOptionsDefault(options: WorkflowOptions, mapping: Map[String, Iterable[WomType]]): - Try[Map[String, WomValue]] = { + def workflowOptionsDefault(options: WorkflowOptions, + mapping: Map[String, Iterable[WomType]] + ): Try[Map[String, WomValue]] = options.defaultRuntimeOptions flatMap { attrs => - TryUtil.sequenceMap(attrs collect { - case (k, v) if mapping.contains(k) => - val maybeTriedValue = mapping(k) map { _.coerceRawValue(v) } find { _.isSuccess } getOrElse { - Failure(new RuntimeException(s"Could not parse JsonValue $v to valid WomValue for runtime attribute $k")) - } - k -> maybeTriedValue - }, "Failed to coerce default runtime options") - } recover { - case _: OptionNotFoundException => Map.empty[String, WomValue] + TryUtil.sequenceMap( + attrs collect { + case (k, v) if mapping.contains(k) => + val maybeTriedValue = mapping(k) map { _.coerceRawValue(v) } find { _.isSuccess } getOrElse { + Failure(new RuntimeException(s"Could not parse JsonValue $v to valid WomValue for runtime attribute $k")) + } + k -> maybeTriedValue + }, + "Failed to coerce default runtime options" + ) + } recover { case _: OptionNotFoundException => + Map.empty[String, WomValue] } - } /** * Traverse defaultsList in order, and for each of them add the missing (and only missing) runtime attributes. */ - def withDefaults(attrs: EvaluatedRuntimeAttributes, defaultsList: List[EvaluatedRuntimeAttributes]): EvaluatedRuntimeAttributes = { - defaultsList.foldLeft(attrs)((acc, default) => { - acc ++ default.view.filterKeys(!acc.keySet.contains(_)) - }) - } + def withDefaults(attrs: EvaluatedRuntimeAttributes, + defaultsList: List[EvaluatedRuntimeAttributes] + ): EvaluatedRuntimeAttributes = + defaultsList.foldLeft(attrs)((acc, default) => acc ++ default.view.filterKeys(!acc.keySet.contains(_))) - def noValueFoundFor[A](attribute: String): ValidatedNel[String, A] = s"Can't find an attribute value for key $attribute".invalidNel + def noValueFoundFor[A](attribute: String): ValidatedNel[String, A] = + s"Can't find an attribute value for key $attribute".invalidNel } diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala index 65f2119a64b..5fa52dac53a 100644 --- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala @@ -26,61 +26,56 @@ object RuntimeAttributesValidation { if (unrecognized.nonEmpty) logger.warn(s"Unrecognized runtime attribute keys: $unrecognized") } - def validateDocker(docker: Option[WomValue], onMissingKey: => ErrorOr[Option[String]]): ErrorOr[Option[String]] = { + def validateDocker(docker: Option[WomValue], onMissingKey: => ErrorOr[Option[String]]): ErrorOr[Option[String]] = validateWithValidation(docker, DockerValidation.instance.optional, onMissingKey) - } - def validateFailOnStderr(value: Option[WomValue], onMissingKey: => ErrorOr[Boolean]): ErrorOr[Boolean] = { + def validateFailOnStderr(value: Option[WomValue], onMissingKey: => ErrorOr[Boolean]): ErrorOr[Boolean] = validateWithValidation(value, FailOnStderrValidation.instance, onMissingKey) - } def validateContinueOnReturnCode(value: Option[WomValue], - onMissingKey: => ErrorOr[ContinueOnReturnCode]): ErrorOr[ContinueOnReturnCode] = { + onMissingKey: => ErrorOr[ContinueOnReturnCode] + ): ErrorOr[ContinueOnReturnCode] = validateWithValidation(value, ContinueOnReturnCodeValidation.instance, onMissingKey) - } - def validateMemory(value: Option[WomValue], onMissingKey: => ErrorOr[MemorySize]): ErrorOr[MemorySize] = { + def validateMemory(value: Option[WomValue], onMissingKey: => ErrorOr[MemorySize]): ErrorOr[MemorySize] = validateWithValidation(value, MemoryValidation.instance(), onMissingKey) - } - def validateCpu(cpu: Option[WomValue], onMissingKey: => ErrorOr[Int Refined Positive]): ErrorOr[Int Refined Positive] = { + def validateCpu(cpu: Option[WomValue], + onMissingKey: => ErrorOr[Int Refined Positive] + ): ErrorOr[Int Refined Positive] = validateWithValidation(cpu, CpuValidation.instance, onMissingKey) - } - def validateMaxRetries(maxRetries: Option[WomValue], onMissingKey: => ErrorOr[Int]): ErrorOr[Int] = { + def validateMaxRetries(maxRetries: Option[WomValue], onMissingKey: => ErrorOr[Int]): ErrorOr[Int] = validateWithValidation(maxRetries, MaxRetriesValidation.instance, onMissingKey) - } private def validateWithValidation[T](valueOption: Option[WomValue], validation: RuntimeAttributesValidation[T], - onMissingValue: => ErrorOr[T]): ErrorOr[T] = { + onMissingValue: => ErrorOr[T] + ): ErrorOr[T] = valueOption match { case Some(value) => validation.validateValue.applyOrElse(value, (_: Any) => validation.invalidValueFailure(value)) case None => onMissingValue } - } - def validateInt(value: WomValue): ErrorOr[Int] = { + def validateInt(value: WomValue): ErrorOr[Int] = WomIntegerType.coerceRawValue(value) match { case scala.util.Success(WomInteger(i)) => i.intValue.validNel case _ => s"Could not coerce ${value.valueString} into an integer".invalidNel } - } - def validateBoolean(value: WomValue): ErrorOr[Boolean] = { + def validateBoolean(value: WomValue): ErrorOr[Boolean] = WomBooleanType.coerceRawValue(value) match { case scala.util.Success(WomBoolean(b)) => b.booleanValue.validNel case _ => s"Could not coerce ${value.valueString} into a boolean".invalidNel } - } - def parseMemoryString(k: String, s: WomString): ErrorOr[MemorySize] = { + def parseMemoryString(k: String, s: WomString): ErrorOr[MemorySize] = InformationValidation.validateString(k, s, allowZero = false) - } def withDefault[ValidatedType](validation: RuntimeAttributesValidation[ValidatedType], - default: WomValue): RuntimeAttributesValidation[ValidatedType] = { + default: WomValue + ): RuntimeAttributesValidation[ValidatedType] = new RuntimeAttributesValidation[ValidatedType] { override def key: String = validation.key @@ -101,10 +96,10 @@ object RuntimeAttributesValidation { override protected def staticDefaultOption = Option(default) } - } def withUsedInCallCaching[ValidatedType](validation: RuntimeAttributesValidation[ValidatedType], - usedInCallCachingValue: Boolean): RuntimeAttributesValidation[ValidatedType] = { + usedInCallCachingValue: Boolean + ): RuntimeAttributesValidation[ValidatedType] = new RuntimeAttributesValidation[ValidatedType] { override def key: String = validation.key @@ -125,10 +120,10 @@ object RuntimeAttributesValidation { override protected def staticDefaultOption = validation.staticDefaultOption } - } - def optional[ValidatedType](validation: RuntimeAttributesValidation[ValidatedType]): - OptionalRuntimeAttributesValidation[ValidatedType] = { + def optional[ValidatedType]( + validation: RuntimeAttributesValidation[ValidatedType] + ): OptionalRuntimeAttributesValidation[ValidatedType] = new OptionalRuntimeAttributesValidation[ValidatedType] { override def key: String = validation.key @@ -149,7 +144,6 @@ object RuntimeAttributesValidation { override protected def staticDefaultOption = validation.staticDefaultOption } - } /** * Returns the value from the attributes, unpacking options, and converting them to string values suitable for @@ -181,9 +175,9 @@ object RuntimeAttributesValidation { * @throws ClassCastException if the validation is called on an optional validation. */ def extract[A](runtimeAttributesValidation: RuntimeAttributesValidation[A], - validatedRuntimeAttributes: ValidatedRuntimeAttributes): A = { + validatedRuntimeAttributes: ValidatedRuntimeAttributes + ): A = extract(runtimeAttributesValidation.key, validatedRuntimeAttributes) - } /** * Returns the value from the attributes matching the key. @@ -192,14 +186,15 @@ object RuntimeAttributesValidation { * @param validatedRuntimeAttributes The values to search. * @return The value matching the key. */ - def extract[A](key: String, - validatedRuntimeAttributes: ValidatedRuntimeAttributes): A = { + def extract[A](key: String, validatedRuntimeAttributes: ValidatedRuntimeAttributes): A = { val value = extractOption(key, validatedRuntimeAttributes) value match { // NOTE: Some(innerValue) aka Some.unapply() throws a `ClassCastException` to `Nothing$` as it can't tell the type case some: Some[_] => some.get.asInstanceOf[A] - case None => throw new RuntimeException( - s"$key not found in runtime attributes ${validatedRuntimeAttributes.attributes.keys}") + case None => + throw new RuntimeException( + s"$key not found in runtime attributes ${validatedRuntimeAttributes.attributes.keys}" + ) } } @@ -211,9 +206,9 @@ object RuntimeAttributesValidation { * @return The Some(value) matching the key or None. */ def extractOption[A](runtimeAttributesValidation: RuntimeAttributesValidation[A], - validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[A] = { + validatedRuntimeAttributes: ValidatedRuntimeAttributes + ): Option[A] = extractOption(runtimeAttributesValidation.key, validatedRuntimeAttributes) - } /** * Returns Some(value) from the attributes matching the key, or None. @@ -234,13 +229,12 @@ object RuntimeAttributesValidation { * @tparam A The type to cast the unpacked value. * @return The Some(value) matching the key or None. */ - final def unpackOption[A](value: Any): Option[A] = { + final def unpackOption[A](value: Any): Option[A] = value match { case None => None case Some(innerValue) => unpackOption(innerValue) case _ => Option(value.asInstanceOf[A]) } - } } /** @@ -251,13 +245,13 @@ case class BadDefaultAttribute(badDefaultValue: WomValue) extends WomValue { val womType = WomStringType } - /** * Performs a validation on a runtime attribute and returns some value. * * @tparam ValidatedType The type of the validated value. */ trait RuntimeAttributesValidation[ValidatedType] { + /** * Returns the key of the runtime attribute. * @@ -297,8 +291,8 @@ trait RuntimeAttributesValidation[ValidatedType] { * * @return true if the value can be validated. */ - protected def validateExpression: PartialFunction[WomValue, Boolean] = { - case womValue => coercion.exists(_ == womValue.womType) + protected def validateExpression: PartialFunction[WomValue, Boolean] = { case womValue => + coercion.exists(_ == womValue.womType) } /** @@ -322,7 +316,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * * @return Wrapped invalidValueMessage. */ - protected final def invalidValueFailure(value: WomValue): ErrorOr[ValidatedType] = + final protected def invalidValueFailure(value: WomValue): ErrorOr[ValidatedType] = invalidValueMessage(value).invalidNel /** @@ -337,7 +331,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * * @return Wrapped missingValueMessage. */ - protected final lazy val missingValueFailure: ErrorOr[ValidatedType] = missingValueMessage.invalidNel + final protected lazy val missingValueFailure: ErrorOr[ValidatedType] = missingValueMessage.invalidNel /** * Runs this validation on the value matching key. @@ -347,12 +341,11 @@ trait RuntimeAttributesValidation[ValidatedType] { * @param values The full set of values. * @return The error or valid value for this key. */ - def validate(values: Map[String, WomValue]): ErrorOr[ValidatedType] = { + def validate(values: Map[String, WomValue]): ErrorOr[ValidatedType] = values.get(key) match { case Some(value) => validateValue.applyOrElse(value, (_: Any) => invalidValueFailure(value)) case None => validateNone } - } /** * Used during initialization, returning true if the expression __may be__ valid. @@ -371,7 +364,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * @param wdlExpressionMaybe The optional expression. * @return True if the expression may be evaluated. */ - def validateOptionalWomValue(wdlExpressionMaybe: Option[WomValue]): Boolean = { + def validateOptionalWomValue(wdlExpressionMaybe: Option[WomValue]): Boolean = wdlExpressionMaybe match { case None => staticDefaultOption.isDefined || validateNone.isValid case Some(wdlExpression: WdlExpression) => @@ -381,9 +374,8 @@ trait RuntimeAttributesValidation[ValidatedType] { } case Some(womValue) => validateExpression.applyOrElse(womValue, (_: Any) => false) } - } - def validateOptionalWomExpression(womExpressionMaybe: Option[WomExpression]): Boolean = { + def validateOptionalWomExpression(womExpressionMaybe: Option[WomExpression]): Boolean = womExpressionMaybe match { case None => staticDefaultOption.isDefined || validateNone.isValid case Some(womExpression) => @@ -392,7 +384,7 @@ trait RuntimeAttributesValidation[ValidatedType] { case Invalid(_) => true // If we can't evaluate it, we'll let it pass for now... } } - } + /** * Indicates whether this runtime attribute should be used in call caching calculations. * @@ -410,7 +402,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * Returns an optional version of this validation. */ final lazy val optional: OptionalRuntimeAttributesValidation[ValidatedType] = - RuntimeAttributesValidation.optional(this) + RuntimeAttributesValidation.optional(this) /** * Returns a version of this validation with the default value. @@ -432,7 +424,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * @param optionalRuntimeConfig Optional default runtime attributes config of a particular backend. * @return The new version of this validation. */ - final def configDefaultWomValue(optionalRuntimeConfig: Option[Config]): Option[WomValue] = { + final def configDefaultWomValue(optionalRuntimeConfig: Option[Config]): Option[WomValue] = optionalRuntimeConfig collect { case config if config.hasPath(key) => val value = config.getValue(key).unwrapped() @@ -442,13 +434,11 @@ trait RuntimeAttributesValidation[ValidatedType] { BadDefaultAttribute(WomString(value.toString)) } } - } - final def configDefaultValue(optionalRuntimeConfig: Option[Config]): Option[String] = { + final def configDefaultValue(optionalRuntimeConfig: Option[Config]): Option[String] = optionalRuntimeConfig collect { case config if config.hasPath(key) => config.getValue(key).unwrapped().toString } - } /* Methods below provide aliases to expose protected methods to the package. @@ -457,15 +447,15 @@ trait RuntimeAttributesValidation[ValidatedType] { access the protected values, except the `validation` package that uses these back doors. */ - private[validation] final lazy val validateValuePackagePrivate = validateValue + final private[validation] lazy val validateValuePackagePrivate = validateValue - private[validation] final lazy val validateExpressionPackagePrivate = validateExpression + final private[validation] lazy val validateExpressionPackagePrivate = validateExpression - private[validation] final def invalidValueMessagePackagePrivate(value: WomValue) = invalidValueMessage(value) + final private[validation] def invalidValueMessagePackagePrivate(value: WomValue) = invalidValueMessage(value) - private[validation] final lazy val missingValueMessagePackagePrivate = missingValueMessage + final private[validation] lazy val missingValueMessagePackagePrivate = missingValueMessage - private[validation] final lazy val usedInCallCachingPackagePrivate = usedInCallCaching + final private[validation] lazy val usedInCallCachingPackagePrivate = usedInCallCaching } /** @@ -474,6 +464,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * @tparam ValidatedType The type of the validated value. */ trait OptionalRuntimeAttributesValidation[ValidatedType] extends RuntimeAttributesValidation[Option[ValidatedType]] { + /** * Validates the wdl value. * @@ -484,13 +475,12 @@ trait OptionalRuntimeAttributesValidation[ValidatedType] extends RuntimeAttribut */ protected def validateOption: PartialFunction[WomValue, ErrorOr[ValidatedType]] - override final protected lazy val validateValue = new PartialFunction[WomValue, ErrorOr[Option[ValidatedType]]] { + final override protected lazy val validateValue = new PartialFunction[WomValue, ErrorOr[Option[ValidatedType]]] { override def isDefinedAt(womValue: WomValue): Boolean = validateOption.isDefinedAt(womValue) - override def apply(womValue: WomValue): Validated[NonEmptyList[String], Option[ValidatedType]] = { + override def apply(womValue: WomValue): Validated[NonEmptyList[String], Option[ValidatedType]] = validateOption.apply(womValue).map(Option.apply) - } } - override final protected lazy val validateNone: ErrorOr[None.type] = None.validNel[String] + final override protected lazy val validateNone: ErrorOr[None.type] = None.validNel[String] } diff --git a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala index 6e199c4c4fe..d1a6af6f58d 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala @@ -38,18 +38,14 @@ trait ValidatedRuntimeAttributesBuilder { /** * Returns validators suitable for BackendWorkflowInitializationActor.runtimeAttributeValidators. */ - final lazy val validatorMap: Map[String, Option[WomExpression] => Boolean] = { - validations.map(validation => - validation.key -> validation.validateOptionalWomExpression _ - ).toMap - } + final lazy val validatorMap: Map[String, Option[WomExpression] => Boolean] = + validations.map(validation => validation.key -> validation.validateOptionalWomExpression _).toMap /** * Returns a map of coercions suitable for RuntimeAttributesDefault.workflowOptionsDefault. */ - final lazy val coercionMap: Map[String, Iterable[WomType]] = { + final lazy val coercionMap: Map[String, Iterable[WomType]] = validations.map(validation => validation.key -> validation.coercion).toMap - } def unsupportedKeys(keys: Seq[String]): Seq[String] = keys.diff(validationKeys) @@ -61,11 +57,12 @@ trait ValidatedRuntimeAttributesBuilder { val runtimeAttributesErrorOr: ErrorOr[ValidatedRuntimeAttributes] = validate(attrs) runtimeAttributesErrorOr match { case Valid(runtimeAttributes) => runtimeAttributes - case Invalid(nel) => throw new RuntimeException with MessageAggregation with NoStackTrace { - override def exceptionContext: String = "Runtime attribute validation failed" + case Invalid(nel) => + throw new RuntimeException with MessageAggregation with NoStackTrace { + override def exceptionContext: String = "Runtime attribute validation failed" - override def errorMessages: Iterable[String] = nel.toList - } + override def errorMessages: Iterable[String] = nel.toList + } } } @@ -73,8 +70,8 @@ trait ValidatedRuntimeAttributesBuilder { val listOfKeysToErrorOrAnys: List[(String, ErrorOr[Any])] = validations.map(validation => validation.key -> validation.validate(values)).toList - val listOfErrorOrKeysToAnys: List[ErrorOr[(String, Any)]] = listOfKeysToErrorOrAnys map { - case (key, errorOrAny) => errorOrAny map { any => (key, any) } + val listOfErrorOrKeysToAnys: List[ErrorOr[(String, Any)]] = listOfKeysToErrorOrAnys map { case (key, errorOrAny) => + errorOrAny map { any => (key, any) } } import cats.syntax.traverse._ diff --git a/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala b/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala index ec3644674bb..325060aa4f6 100644 --- a/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala +++ b/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala @@ -3,4 +3,5 @@ package cromwell.backend.validation.exception import common.exception.MessageAggregation case class ValidationAggregatedException(override val exceptionContext: String, - override val errorMessages: Iterable[String]) extends MessageAggregation + override val errorMessages: Iterable[String] +) extends MessageAggregation diff --git a/backend/src/main/scala/cromwell/backend/wfs/DefaultWorkflowPathBuilder.scala b/backend/src/main/scala/cromwell/backend/wfs/DefaultWorkflowPathBuilder.scala index 43af11d8732..c112b8d5b6b 100644 --- a/backend/src/main/scala/cromwell/backend/wfs/DefaultWorkflowPathBuilder.scala +++ b/backend/src/main/scala/cromwell/backend/wfs/DefaultWorkflowPathBuilder.scala @@ -2,7 +2,6 @@ package cromwell.backend.wfs import cromwell.core.path.DefaultPathBuilder - object DefaultWorkflowPathBuilder extends WorkflowPathBuilder { override def pathBuilderOption(params: WorkflowFileSystemProviderParams) = Option(DefaultPathBuilder) } diff --git a/backend/src/main/scala/cromwell/backend/wfs/WorkflowPathBuilder.scala b/backend/src/main/scala/cromwell/backend/wfs/WorkflowPathBuilder.scala index bd39ae2c911..c15ac945c12 100644 --- a/backend/src/main/scala/cromwell/backend/wfs/WorkflowPathBuilder.scala +++ b/backend/src/main/scala/cromwell/backend/wfs/WorkflowPathBuilder.scala @@ -1,7 +1,7 @@ package cromwell.backend.wfs import com.typesafe.config.Config -import cromwell.backend.io.{WorkflowPathsWithDocker, WorkflowPaths} +import cromwell.backend.io.{WorkflowPaths, WorkflowPathsWithDocker} import cromwell.backend.{BackendConfigurationDescriptor, BackendWorkflowDescriptor} import cromwell.core.WorkflowOptions import cromwell.core.path.PathBuilder @@ -11,14 +11,16 @@ import scala.concurrent.ExecutionContext object WorkflowPathBuilder { def workflowPaths(configurationDescriptor: BackendConfigurationDescriptor, workflowDescriptor: BackendWorkflowDescriptor, - pathBuilders: List[PathBuilder]): WorkflowPaths = { + pathBuilders: List[PathBuilder] + ): WorkflowPaths = new WorkflowPathsWithDocker(workflowDescriptor, configurationDescriptor.backendConfig, pathBuilders) - } } -final case class WorkflowFileSystemProviderParams(fileSystemConfig: Config, globalConfig: Config, +final case class WorkflowFileSystemProviderParams(fileSystemConfig: Config, + globalConfig: Config, workflowOptions: WorkflowOptions, - fileSystemExecutionContext: ExecutionContext) + fileSystemExecutionContext: ExecutionContext +) trait WorkflowPathBuilder { def pathBuilderOption(params: WorkflowFileSystemProviderParams): Option[PathBuilder] diff --git a/backend/src/test/scala/cromwell/backend/BackendSpec.scala b/backend/src/test/scala/cromwell/backend/BackendSpec.scala index 6c4c7a654e5..835abec8eb3 100644 --- a/backend/src/test/scala/cromwell/backend/BackendSpec.scala +++ b/backend/src/test/scala/cromwell/backend/BackendSpec.scala @@ -3,7 +3,12 @@ package cromwell.backend import _root_.wdl.draft2.model._ import _root_.wdl.transforms.draft2.wdlom2wom.WdlDraft2WomExecutableMakers._ import common.exception.AggregatedException -import cromwell.backend.BackendJobExecutionActor.{BackendJobExecutionResponse, JobFailedNonRetryableResponse, JobFailedRetryableResponse, JobSucceededResponse} +import cromwell.backend.BackendJobExecutionActor.{ + BackendJobExecutionResponse, + JobFailedNonRetryableResponse, + JobFailedRetryableResponse, + JobSucceededResponse +} import cromwell.backend.io.TestWorkflows._ import cromwell.core.callcaching.NoDocker import cromwell.core.labels.Labels @@ -25,18 +30,17 @@ trait BackendSpec extends ScalaFutures with Matchers with ScaledTimeSpans { implicit val defaultPatience: PatienceConfig = PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = Span(500, Millis)) - def testWorkflow(workflow: TestWorkflow, - backend: BackendJobExecutionActor): Unit = { + def testWorkflow(workflow: TestWorkflow, backend: BackendJobExecutionActor): Unit = executeJobAndAssertOutputs(backend, workflow.expectedResponse) - } def buildWorkflowDescriptor(workflowSource: WorkflowSource, inputFileAsJson: Option[String], options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])), runtime: String = "", - labels: Labels = Labels.empty): BackendWorkflowDescriptor = { - val wdlNamespace = WdlNamespaceWithWorkflow.load(workflowSource.replaceAll("RUNTIME", runtime), - Seq.empty[Draft2ImportResolver]).get + labels: Labels = Labels.empty + ): BackendWorkflowDescriptor = { + val wdlNamespace = + WdlNamespaceWithWorkflow.load(workflowSource.replaceAll("RUNTIME", runtime), Seq.empty[Draft2ImportResolver]).get val executable = wdlNamespace.toWomExecutable(inputFileAsJson, NoIoFunctionSet, strictValidation = true) match { case Left(errors) => fail(s"Fail to build wom executable: ${errors.toList.mkString(", ")}") case Right(e) => e @@ -45,7 +49,7 @@ trait BackendSpec extends ScalaFutures with Matchers with ScaledTimeSpans { BackendWorkflowDescriptor( WorkflowId.randomId(), executable.entryPoint, - executable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}), + executable.resolvedExecutableInputs.flatMap { case (port, v) => v.select[WomValue] map { port -> _ } }, options, labels, HogGroup("foo"), @@ -55,68 +59,84 @@ trait BackendSpec extends ScalaFutures with Matchers with ScaledTimeSpans { } def buildWdlWorkflowDescriptor(workflowSource: WorkflowSource, - inputFileAsJson: Option[String] = None, - options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])), - runtime: String = "", - labels: Labels = Labels.empty): BackendWorkflowDescriptor = { - + inputFileAsJson: Option[String] = None, + options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])), + runtime: String = "", + labels: Labels = Labels.empty + ): BackendWorkflowDescriptor = buildWorkflowDescriptor(workflowSource, inputFileAsJson, options, runtime, labels) - } - def fqnWdlMapToDeclarationMap(m: Map[String, WomValue]): Map[InputDefinition, WomValue] = { - m map { - case (fqn, v) => - val mockDeclaration = RequiredInputDefinition(fqn, v.womType) - mockDeclaration -> v + def fqnWdlMapToDeclarationMap(m: Map[String, WomValue]): Map[InputDefinition, WomValue] = + m map { case (fqn, v) => + val mockDeclaration = RequiredInputDefinition(fqn, v.womType) + mockDeclaration -> v } - } - def fqnMapToDeclarationMap(m: Map[OutputPort, WomValue]): Map[InputDefinition, WomValue] = { - m map { - case (outputPort, womValue) => RequiredInputDefinition(outputPort.name, womValue.womType) -> womValue + def fqnMapToDeclarationMap(m: Map[OutputPort, WomValue]): Map[InputDefinition, WomValue] = + m map { case (outputPort, womValue) => + RequiredInputDefinition(outputPort.name, womValue.womType) -> womValue } - } def jobDescriptorFromSingleCallWorkflow(workflowDescriptor: BackendWorkflowDescriptor, inputs: Map[String, WomValue], options: WorkflowOptions, - runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition]): BackendJobDescriptor = { - val call = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t}).get + runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition] + ): BackendJobDescriptor = { + val call = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => t }.get val jobKey = BackendJobDescriptorKey(call, None, 1) val inputDeclarations: Map[InputDefinition, WomValue] = call.inputDefinitionMappings.map { - case (inputDef, resolved) => inputDef -> - resolved.select[WomValue].orElse( - resolved.select[WomExpression] - .map( - _.evaluateValue(inputs, NoIoFunctionSet).getOrElse(fail("Can't evaluate input")) + case (inputDef, resolved) => + inputDef -> + resolved + .select[WomValue] + .orElse( + resolved + .select[WomExpression] + .map( + _.evaluateValue(inputs, NoIoFunctionSet).getOrElse(fail("Can't evaluate input")) + ) ) - ).orElse( - resolved.select[OutputPort] flatMap { - case known if workflowDescriptor.knownValues.contains(known) => Option(workflowDescriptor.knownValues(known)) - case hasDefault if hasDefault.graphNode.isInstanceOf[OptionalGraphInputNodeWithDefault] => - Option(hasDefault.graphNode.asInstanceOf[OptionalGraphInputNodeWithDefault].default - .evaluateValue(inputs, NoIoFunctionSet).getOrElse(fail("Can't evaluate input"))) - case _ => None - } - ).getOrElse { - inputs(inputDef.name) - } + .orElse( + resolved.select[OutputPort] flatMap { + case known if workflowDescriptor.knownValues.contains(known) => + Option(workflowDescriptor.knownValues(known)) + case hasDefault if hasDefault.graphNode.isInstanceOf[OptionalGraphInputNodeWithDefault] => + Option( + hasDefault.graphNode + .asInstanceOf[OptionalGraphInputNodeWithDefault] + .default + .evaluateValue(inputs, NoIoFunctionSet) + .getOrElse(fail("Can't evaluate input")) + ) + case _ => None + } + ) + .getOrElse { + inputs(inputDef.name) + } }.toMap - val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(call.callable.runtimeAttributes, NoIoFunctionSet, Map.empty).getOrElse(fail("Failed to evaluate runtime attributes")) // .get is OK here because this is a test - val runtimeAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes) + val evaluatedAttributes = RuntimeAttributeDefinition + .evaluateRuntimeAttributes(call.callable.runtimeAttributes, NoIoFunctionSet, Map.empty) + .getOrElse(fail("Failed to evaluate runtime attributes")) // .get is OK here because this is a test + val runtimeAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes) BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, None, Map.empty) } def jobDescriptorFromSingleCallWorkflow(wdl: WorkflowSource, options: WorkflowOptions, - runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition]): BackendJobDescriptor = { + runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition] + ): BackendJobDescriptor = { val workflowDescriptor = buildWdlWorkflowDescriptor(wdl) - val call = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t}).get + val call = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => t }.get val jobKey = BackendJobDescriptorKey(call, None, 1) val inputDeclarations = fqnMapToDeclarationMap(workflowDescriptor.knownValues) - val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(call.callable.runtimeAttributes, NoIoFunctionSet, inputDeclarations).getOrElse(fail("Failed to evaluate runtime attributes")) // .get is OK here because this is a test - val runtimeAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes) + val evaluatedAttributes = RuntimeAttributeDefinition + .evaluateRuntimeAttributes(call.callable.runtimeAttributes, NoIoFunctionSet, inputDeclarations) + .getOrElse(fail("Failed to evaluate runtime attributes")) // .get is OK here because this is a test + val runtimeAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes) BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, None, Map.empty) } @@ -124,25 +144,33 @@ trait BackendSpec extends ScalaFutures with Matchers with ScaledTimeSpans { runtime: String, attempt: Int, options: WorkflowOptions, - runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition]): BackendJobDescriptor = { + runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition] + ): BackendJobDescriptor = { val workflowDescriptor = buildWdlWorkflowDescriptor(wdl, runtime = runtime) - val call = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t}).get + val call = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => t }.get val jobKey = BackendJobDescriptorKey(call, None, attempt) val inputDeclarations = fqnMapToDeclarationMap(workflowDescriptor.knownValues) - val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(call.callable.runtimeAttributes, NoIoFunctionSet, inputDeclarations).getOrElse(fail("Failed to evaluate runtime attributes")) // .get is OK here because this is a test - val runtimeAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes) + val evaluatedAttributes = RuntimeAttributeDefinition + .evaluateRuntimeAttributes(call.callable.runtimeAttributes, NoIoFunctionSet, inputDeclarations) + .getOrElse(fail("Failed to evaluate runtime attributes")) // .get is OK here because this is a test + val runtimeAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, options)(evaluatedAttributes) BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, None, Map.empty) } def assertResponse(executionResponse: BackendJobExecutionResponse, - expectedResponse: BackendJobExecutionResponse): Unit = { + expectedResponse: BackendJobExecutionResponse + ): Unit = { (executionResponse, expectedResponse) match { - case (JobSucceededResponse(_, _, responseOutputs, _, _, _, _), JobSucceededResponse(_, _, expectedOutputs, _, _, _, _)) => + case (JobSucceededResponse(_, _, responseOutputs, _, _, _, _), + JobSucceededResponse(_, _, expectedOutputs, _, _, _, _) + ) => responseOutputs.outputs.size shouldBe expectedOutputs.outputs.size - responseOutputs.outputs foreach { - case (fqn, out) => - val expectedOut = expectedOutputs.outputs.collectFirst({case (p, v) if p.name == fqn.name => v}) - expectedOut.getOrElse(fail(s"Output ${fqn.name} not found in ${expectedOutputs.outputs.map(_._1.name)}")).valueString shouldBe out.valueString + responseOutputs.outputs foreach { case (fqn, out) => + val expectedOut = expectedOutputs.outputs.collectFirst { case (p, v) if p.name == fqn.name => v } + expectedOut + .getOrElse(fail(s"Output ${fqn.name} not found in ${expectedOutputs.outputs.map(_._1.name)}")) + .valueString shouldBe out.valueString } case (JobFailedNonRetryableResponse(_, failure, _), JobFailedNonRetryableResponse(_, expectedFailure, _)) => failure.getClass shouldBe expectedFailure.getClass @@ -157,19 +185,20 @@ trait BackendSpec extends ScalaFutures with Matchers with ScaledTimeSpans { private def concatenateCauseMessages(t: Throwable): String = t match { case null => "" - case ae: AggregatedException => ae.getMessage + " " + ae.throwables.map(innerT => concatenateCauseMessages(innerT.getCause)).mkString("\n") + case ae: AggregatedException => + ae.getMessage + " " + ae.throwables.map(innerT => concatenateCauseMessages(innerT.getCause)).mkString("\n") case other: Throwable => other.getMessage + concatenateCauseMessages(t.getCause) } def executeJobAndAssertOutputs(backend: BackendJobExecutionActor, - expectedResponse: BackendJobExecutionResponse): Unit = { + expectedResponse: BackendJobExecutionResponse + ): Unit = whenReady(backend.execute) { executionResponse => assertResponse(executionResponse, expectedResponse) } - } def firstJobDescriptorKey(workflowDescriptor: BackendWorkflowDescriptor): BackendJobDescriptorKey = { - val call = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t}).get + val call = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => t }.get BackendJobDescriptorKey(call, None, 1) } } diff --git a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala index 55772d1de90..4fc1efd7381 100644 --- a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala @@ -19,23 +19,23 @@ import wom.values._ import scala.concurrent.Future import scala.util.Try - -class BackendWorkflowInitializationActorSpec extends TestKitSuite - with AnyFlatSpecLike with Matchers with TableDrivenPropertyChecks { +class BackendWorkflowInitializationActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with TableDrivenPropertyChecks { behavior of "BackendWorkflowInitializationActorSpec" - val testPredicateBackendWorkflowInitializationActorRef: - TestActorRef[TestPredicateBackendWorkflowInitializationActor] = + val testPredicateBackendWorkflowInitializationActorRef + : TestActorRef[TestPredicateBackendWorkflowInitializationActor] = TestActorRef[TestPredicateBackendWorkflowInitializationActor] - val testPredicateBackendWorkflowInitializationActor: - TestPredicateBackendWorkflowInitializationActor = + val testPredicateBackendWorkflowInitializationActor: TestPredicateBackendWorkflowInitializationActor = testPredicateBackendWorkflowInitializationActorRef.underlyingActor - val testContinueOnReturnCode: Option[WomValue] => Boolean = { + val testContinueOnReturnCode: Option[WomValue] => Boolean = testPredicateBackendWorkflowInitializationActor.continueOnReturnCodePredicate(valueRequired = false) - } val optionalConfig: Option[Config] = Option(TestConfig.optionalRuntimeConfig) @@ -66,18 +66,22 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite "womValue", WomString(""), WomString("z"), - WomFloat(0.0D), + WomFloat(0.0d), WomArray(WomArrayType(WomBooleanType), Seq(WomBoolean(true))), - WomArray(WomArrayType(WomFloatType), Seq(WomFloat(0.0D))) + WomArray(WomArrayType(WomFloatType), Seq(WomFloat(0.0d))) ) forAll(booleanRows) { value => val womValue = WomBoolean(value) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.toOption.get should be(ContinueOnReturnCodeFlag(value)) } @@ -86,9 +90,13 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WomString(value.toString) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.toOption.get should be(ContinueOnReturnCodeFlag(value)) } @@ -97,7 +105,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WdlExpression.fromString(value.toString) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) // NOTE: expressions are never valid to validate } @@ -105,9 +115,13 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WomInteger(value) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } @@ -116,9 +130,13 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WomString(value.toString) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } @@ -127,7 +145,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WdlExpression.fromString(value.toString) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) // NOTE: expressions are never valid to validate } @@ -135,9 +155,13 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(value))) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } @@ -146,9 +170,13 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WomArray(WomArrayType(WomStringType), Seq(WomString(value.toString))) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } @@ -157,7 +185,9 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WomArray(WomArrayType(WdlExpressionType), Seq(WdlExpression.fromString(value.toString))) val result = false testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) // NOTE: expressions are never valid to validate } @@ -165,16 +195,22 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val womValue = WdlExpression.fromString(expression) val result = true testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) // NOTE: expressions are never valid to validate } forAll(invalidWdlValueRows) { womValue => val result = false testContinueOnReturnCode(Option(womValue)) should be(result) - ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be(result) + ContinueOnReturnCodeValidation.default(optionalConfig).validateOptionalWomValue(Option(womValue)) should be( + result + ) val valid = - ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) + ContinueOnReturnCodeValidation + .default(optionalConfig) + .validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) valid.toEither.swap.toOption.get.toList should contain theSameElementsAs List( "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" @@ -197,17 +233,18 @@ class TestPredicateBackendWorkflowInitializationActor extends BackendWorkflowIni override protected def coerceDefaultRuntimeAttributes(options: WorkflowOptions): Try[Map[String, WomValue]] = throw new UnsupportedOperationException("coerceDefaultRuntimeAttributes") - override def beforeAll(): Future[Option[BackendInitializationData]] = throw new UnsupportedOperationException("beforeAll") + override def beforeAll(): Future[Option[BackendInitializationData]] = throw new UnsupportedOperationException( + "beforeAll" + ) override def validate(): Future[Unit] = throw new UnsupportedOperationException("validate") override protected def workflowDescriptor: BackendWorkflowDescriptor = throw new UnsupportedOperationException("workflowDescriptor") - override protected def configurationDescriptor: BackendConfigurationDescriptor = BackendConfigurationDescriptor(TestConfig.sampleBackendRuntimeConfig, ConfigFactory.empty()) + override protected def configurationDescriptor: BackendConfigurationDescriptor = + BackendConfigurationDescriptor(TestConfig.sampleBackendRuntimeConfig, ConfigFactory.empty()) - override def continueOnReturnCodePredicate(valueRequired: Boolean) - (wdlExpressionMaybe: Option[WomValue]): Boolean = { + override def continueOnReturnCodePredicate(valueRequired: Boolean)(wdlExpressionMaybe: Option[WomValue]): Boolean = super.continueOnReturnCodePredicate(valueRequired)(wdlExpressionMaybe) - } } diff --git a/backend/src/test/scala/cromwell/backend/MemorySizeSpec.scala b/backend/src/test/scala/cromwell/backend/MemorySizeSpec.scala index ac3c591f5df..57acada06d3 100644 --- a/backend/src/test/scala/cromwell/backend/MemorySizeSpec.scala +++ b/backend/src/test/scala/cromwell/backend/MemorySizeSpec.scala @@ -66,7 +66,7 @@ class MemorySizeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers memorySize.to(newUnit) shouldEqual result } } - + it should "round trip" in { List( "2 GB" diff --git a/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala b/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala index 0959f92577b..997fd6f5df5 100644 --- a/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala @@ -24,7 +24,7 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc behavior of "OutputEvaluator" private val FutureTimeout = 20.seconds - final implicit val blockingEc: ExecutionContextExecutor = ExecutionContext.fromExecutor( + implicit final val blockingEc: ExecutionContextExecutor = ExecutionContext.fromExecutor( Executors.newCachedThreadPool() ) @@ -32,50 +32,56 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc private def o1Expression = new WomExpression { override def sourceString: String = "o1" override def inputs: Set[String] = Set("input") - override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { + override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = Validated.fromOption(inputValues.get("input"), NonEmptyList.one("Can't find a value for 'input'")) - } - override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = throw new UnsupportedOperationException - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = throw new UnsupportedOperationException + override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = + throw new UnsupportedOperationException + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = throw new UnsupportedOperationException } // Depends on a previous output private def o2Expression = new WomExpression { override def sourceString: String = "o2" override def inputs: Set[String] = Set("o1") - override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { + override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = Validated.fromOption(inputValues.get("o1"), NonEmptyList.one("Can't find a value for 'o1'")) - } - override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = throw new UnsupportedOperationException - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = throw new UnsupportedOperationException + override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = + throw new UnsupportedOperationException + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = throw new UnsupportedOperationException } private def invalidWomExpression1 = new WomExpression { override def sourceString: String = "invalid1" override def inputs: Set[String] = Set.empty - override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { + override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = "Invalid expression 1".invalidNel - } - override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = { + override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = "Invalid expression 1".invalidNel - } - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = { + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = "Invalid expression 1".invalidNel - } } private def invalidWomExpression2 = new WomExpression { override def sourceString: String = "invalid2" override def inputs: Set[String] = Set.empty - override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { + override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = "Invalid expression 2".invalidNel - } - override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = { + override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = "Invalid expression 2".invalidNel - } - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = { + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = "Invalid expression 2".invalidNel - } } val exception = new Exception("Expression evaluation exception") @@ -83,15 +89,15 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc private def throwingWomExpression = new WomExpression { override def sourceString: String = "throwing" override def inputs: Set[String] = Set.empty - override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { + override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = throw exception - } - override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = { + override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = throw exception - } - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = { + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = throw exception - } } val mockInputs: Map[InputDefinition, WomValue] = Map( @@ -99,7 +105,7 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ) it should "evaluate valid jobs outputs" in { - val mockOutputs = List ( + val mockOutputs = List( OutputDefinition("o1", WomIntegerType, o1Expression), OutputDefinition("o2", WomIntegerType, o2Expression) ) @@ -109,16 +115,19 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val jobDescriptor = BackendJobDescriptor(null, key, null, mockInputs, null, None, null) Await.result(OutputEvaluator.evaluateOutputs(jobDescriptor, NoIoFunctionSet), FutureTimeout) match { - case ValidJobOutputs(outputs) => outputs shouldBe CallOutputs(Map( - jobDescriptor.taskCall.outputPorts.find(_.name == "o1").get -> WomInteger(5), - jobDescriptor.taskCall.outputPorts.find(_.name == "o2").get -> WomInteger(5) - )) + case ValidJobOutputs(outputs) => + outputs shouldBe CallOutputs( + Map( + jobDescriptor.taskCall.outputPorts.find(_.name == "o1").get -> WomInteger(5), + jobDescriptor.taskCall.outputPorts.find(_.name == "o2").get -> WomInteger(5) + ) + ) case _ => fail("Failed to evaluate outputs") } } it should "return an InvalidJobOutputs if the evaluation returns ErrorOrs" in { - val mockOutputs = List ( + val mockOutputs = List( OutputDefinition("o1", WomIntegerType, o1Expression), OutputDefinition("invalid1", WomIntegerType, invalidWomExpression1), OutputDefinition("invalid2", WomIntegerType, invalidWomExpression2) @@ -129,15 +138,17 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val jobDescriptor = BackendJobDescriptor(null, key, null, mockInputs, null, None, null) Await.result(OutputEvaluator.evaluateOutputs(jobDescriptor, NoIoFunctionSet), FutureTimeout) match { - case InvalidJobOutputs(errors) => errors shouldBe NonEmptyList.of( - "Bad output 'invalid1': Invalid expression 1", "Bad output 'invalid2': Invalid expression 2" - ) + case InvalidJobOutputs(errors) => + errors shouldBe NonEmptyList.of( + "Bad output 'invalid1': Invalid expression 1", + "Bad output 'invalid2': Invalid expression 2" + ) case _ => fail("Output evaluation should have failed") } } it should "return an JobOutputsEvaluationException if the evaluation throws an exception" in { - val mockOutputs = List ( + val mockOutputs = List( OutputDefinition("o1", WomIntegerType, o1Expression), OutputDefinition("invalid1", WomIntegerType, throwingWomExpression) ) diff --git a/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala b/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala index 99bec0baebd..91b6ab06617 100644 --- a/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala +++ b/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala @@ -19,12 +19,14 @@ class RuntimeAttributeValidationSpec extends AnyFlatSpec with Matchers with Scal val defaultValue = womValue.asWomExpression val validator: Option[WomExpression] => Boolean = _.contains(defaultValue) assert( - BackendWorkflowInitializationActor.validateRuntimeAttributes( - taskName = taskName, - defaultRuntimeAttributes = defaultRuntimeAttributes, - runtimeAttributes = Map.empty, - runtimeAttributeValidators = Map((attributeName, validator)), - ).isValid + BackendWorkflowInitializationActor + .validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = defaultRuntimeAttributes, + runtimeAttributes = Map.empty, + runtimeAttributeValidators = Map((attributeName, validator)) + ) + .isValid ) } @@ -33,12 +35,14 @@ class RuntimeAttributeValidationSpec extends AnyFlatSpec with Matchers with Scal val defaultRuntimeAttributes = Map(attributeName -> womValue) assert( - BackendWorkflowInitializationActor.validateRuntimeAttributes( - taskName = taskName, - defaultRuntimeAttributes = defaultRuntimeAttributes, - runtimeAttributes = Map.empty, - runtimeAttributeValidators = Map((attributeName, (_: Option[WomExpression]) => false)), - ).isInvalid + BackendWorkflowInitializationActor + .validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = defaultRuntimeAttributes, + runtimeAttributes = Map.empty, + runtimeAttributeValidators = Map((attributeName, (_: Option[WomExpression]) => false)) + ) + .isInvalid ) } @@ -49,47 +53,51 @@ class RuntimeAttributeValidationSpec extends AnyFlatSpec with Matchers with Scal val validator: Option[WomExpression] => Boolean = _.contains(runtimeWomExpression) assert( - BackendWorkflowInitializationActor.validateRuntimeAttributes( - taskName = taskName, - defaultRuntimeAttributes = defaultRuntimeAttributes, - runtimeAttributes = runtimeAttributes, - runtimeAttributeValidators = Map((attributeName, validator)), - ).isValid + BackendWorkflowInitializationActor + .validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = defaultRuntimeAttributes, + runtimeAttributes = runtimeAttributes, + runtimeAttributeValidators = Map((attributeName, validator)) + ) + .isValid ) } it should "fail validation if no setting is present but it should be" in forAll { (taskName: String, attributeName: String) => - val validator: Option[WomExpression] => Boolean = { case None => false case Some(x) => throw new RuntimeException(s"expecting the runtime validator to receive a None but got $x") } assert( - BackendWorkflowInitializationActor.validateRuntimeAttributes( - taskName = taskName, - defaultRuntimeAttributes = Map.empty, - runtimeAttributes = Map.empty, - runtimeAttributeValidators = Map((attributeName, validator)), - ).isInvalid + BackendWorkflowInitializationActor + .validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = Map.empty, + runtimeAttributes = Map.empty, + runtimeAttributeValidators = Map((attributeName, validator)) + ) + .isInvalid ) } it should "use the taskName and attribute name in correct places for failures" in forAll { (taskName: String, attributeName: String) => - val validator: Option[WomExpression] => Boolean = { case None => false case Some(x) => throw new RuntimeException(s"expecting the runtime validator to receive a None but got $x") } - BackendWorkflowInitializationActor.validateRuntimeAttributes(taskName, Map.empty, Map.empty, Map((attributeName,validator))).fold( - { errors => - val error = errors.toList.head - withClue("attribute name should be set correctly")(error.runtimeAttributeName shouldBe attributeName) - withClue("task name should be set correctly")(error.jobTag shouldBe taskName) - }, - _ => fail("expecting validation to fail!") - ) + BackendWorkflowInitializationActor + .validateRuntimeAttributes(taskName, Map.empty, Map.empty, Map((attributeName, validator))) + .fold( + { errors => + val error = errors.toList.head + withClue("attribute name should be set correctly")(error.runtimeAttributeName shouldBe attributeName) + withClue("task name should be set correctly")(error.jobTag shouldBe taskName) + }, + _ => fail("expecting validation to fail!") + ) } } diff --git a/backend/src/test/scala/cromwell/backend/TestConfig.scala b/backend/src/test/scala/cromwell/backend/TestConfig.scala index 42050dbe323..82e81818d13 100644 --- a/backend/src/test/scala/cromwell/backend/TestConfig.scala +++ b/backend/src/test/scala/cromwell/backend/TestConfig.scala @@ -30,7 +30,8 @@ object TestConfig { lazy val sampleBackendRuntimeConfig = ConfigFactory.parseString(sampleBackendRuntimeConfigString) - lazy val allRuntimeAttrsConfig = ConfigFactory.parseString(allBackendRuntimeAttrsString).getConfig("default-runtime-attributes") + lazy val allRuntimeAttrsConfig = + ConfigFactory.parseString(allBackendRuntimeAttrsString).getConfig("default-runtime-attributes") lazy val optionalRuntimeConfig = sampleBackendRuntimeConfig.getConfig("default-runtime-attributes") diff --git a/backend/src/test/scala/cromwell/backend/io/DirectoryFunctionsSpec.scala b/backend/src/test/scala/cromwell/backend/io/DirectoryFunctionsSpec.scala index 1a8a101acf8..94a5b00627c 100644 --- a/backend/src/test/scala/cromwell/backend/io/DirectoryFunctionsSpec.scala +++ b/backend/src/test/scala/cromwell/backend/io/DirectoryFunctionsSpec.scala @@ -18,10 +18,11 @@ class DirectoryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M override def copyFile(source: String, destination: String) = throw new UnsupportedOperationException() override def glob(pattern: String) = throw new UnsupportedOperationException() override def size(path: String) = throw new UnsupportedOperationException() - override def readFile(path: String, maxBytes: Option[Int], failOnOverflow: Boolean) = throw new UnsupportedOperationException() + override def readFile(path: String, maxBytes: Option[Int], failOnOverflow: Boolean) = + throw new UnsupportedOperationException() override def pathFunctions = throw new UnsupportedOperationException() override def writeFile(path: String, content: String) = throw new UnsupportedOperationException() - override implicit def ec = throw new UnsupportedOperationException() + implicit override def ec = throw new UnsupportedOperationException() override def createTemporaryDirectory(name: Option[String]) = throw new UnsupportedOperationException() override def asyncIo = throw new UnsupportedOperationException() } @@ -32,13 +33,12 @@ class DirectoryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M val innerDir = (rootDir / "innerDir").createDirectories() val link = innerDir / "linkToRootDirInInnerDir" link.symbolicLinkTo(rootDir) - - def listRecursively(path: String)(visited: Vector[String] = Vector.empty): Iterator[String] = { + + def listRecursively(path: String)(visited: Vector[String] = Vector.empty): Iterator[String] = Await.result(functions.listDirectory(path)(visited), Duration.Inf) flatMap { case IoFile(v) => List(v) case IoDirectory(v) => List(v) ++ listRecursively(v)(visited :+ path) } - } listRecursively(rootDir.pathAsString)().toList shouldBe List(innerDir, link).map(_.pathAsString) } diff --git a/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala index 286ed796311..21709bfda58 100644 --- a/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala +++ b/backend/src/test/scala/cromwell/backend/io/JobPathsSpec.scala @@ -26,7 +26,7 @@ class JobPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wi | } """.stripMargin - val backendConfig = ConfigFactory.parseString(configString) + val backendConfig = ConfigFactory.parseString(configString) val defaultBackendConfigDescriptor = BackendConfigurationDescriptor(backendConfig, TestConfig.globalConfig) "JobPaths" should "provide correct paths for a job" in { @@ -55,8 +55,9 @@ class JobPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wi fullPath(s"/cromwell-executions/wf_hello/$id/call-hello") jobPaths.callExecutionDockerRoot.pathAsString shouldBe fullPath(s"/cromwell-executions/wf_hello/$id/call-hello/execution") - jobPaths.toDockerPath(DefaultPathBuilder.get( - s"local-cromwell-executions/wf_hello/$id/call-hello/execution/stdout")).pathAsString shouldBe + jobPaths + .toDockerPath(DefaultPathBuilder.get(s"local-cromwell-executions/wf_hello/$id/call-hello/execution/stdout")) + .pathAsString shouldBe fullPath(s"/cromwell-executions/wf_hello/$id/call-hello/execution/stdout") jobPaths.toDockerPath(DefaultPathBuilder.get("/cromwell-executions/dock/path")).pathAsString shouldBe fullPath("/cromwell-executions/dock/path") diff --git a/backend/src/test/scala/cromwell/backend/io/TestWorkflows.scala b/backend/src/test/scala/cromwell/backend/io/TestWorkflows.scala index 8e280eadf4e..f956607a573 100644 --- a/backend/src/test/scala/cromwell/backend/io/TestWorkflows.scala +++ b/backend/src/test/scala/cromwell/backend/io/TestWorkflows.scala @@ -7,25 +7,26 @@ object TestWorkflows { case class TestWorkflow(workflowDescriptor: BackendWorkflowDescriptor, config: BackendConfigurationDescriptor, - expectedResponse: BackendJobExecutionResponse) + expectedResponse: BackendJobExecutionResponse + ) val HelloWorld = s""" - |task hello { - | String addressee = "you " - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - |} + |task hello { + | String addressee = "you " + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + |} """.stripMargin val GoodbyeWorld = @@ -46,25 +47,25 @@ object TestWorkflows { val InputFiles = s""" - |task localize { - | File inputFileFromJson - | File inputFileFromCallInputs - | command { - | cat $${inputFileFromJson} - | echo "" - | cat $${inputFileFromCallInputs} - | } - | output { - | Array[String] out = read_lines(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_localize { - | File workflowFile - | call localize { input: inputFileFromCallInputs = workflowFile } - |} + |task localize { + | File inputFileFromJson + | File inputFileFromCallInputs + | command { + | cat $${inputFileFromJson} + | echo "" + | cat $${inputFileFromCallInputs} + | } + | output { + | Array[String] out = read_lines(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_localize { + | File workflowFile + | call localize { input: inputFileFromCallInputs = workflowFile } + |} """.stripMargin val Sleep20 = @@ -83,25 +84,25 @@ object TestWorkflows { val Scatter = s""" - |task scattering { - | Int intNumber - | command { - | echo $${intNumber} - | } - | output { - | Int out = read_string(stdout()) - | } - |} - | - |workflow wf_scattering { - | Array[Int] numbers = [1, 2, 3] - | scatter (i in numbers) { - | call scattering { input: intNumber = i } - | } - |} + |task scattering { + | Int intNumber + | command { + | echo $${intNumber} + | } + | output { + | Int out = read_string(stdout()) + | } + |} + | + |workflow wf_scattering { + | Array[Int] numbers = [1, 2, 3] + | scatter (i in numbers) { + | call scattering { input: intNumber = i } + | } + |} """.stripMargin - val OutputProcess = { + val OutputProcess = """ |task localize { | File inputFile @@ -121,9 +122,8 @@ object TestWorkflows { | call localize |} """.stripMargin - } - val MissingOutputProcess = { + val MissingOutputProcess = """ |task localize { | command { @@ -137,5 +137,4 @@ object TestWorkflows { | call localize |} """.stripMargin - } } diff --git a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala index cb53276f499..790d42a7887 100644 --- a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala +++ b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala @@ -16,16 +16,15 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche def createConfig(values: Map[String, String]): Config = { val config = mock[Config] - values.foreach { - case (key: String, value: String) => - when(config.hasPath(key)).thenReturn(true) - when(config.getString(key)).thenReturn(value) + values.foreach { case (key: String, value: String) => + when(config.hasPath(key)).thenReturn(true) + when(config.getString(key)).thenReturn(value) } config } def rootConfig(root: Option[String], dockerRoot: Option[String]): Config = { - val values: Map[String,String] = root.map("root" -> _).toMap ++ dockerRoot.map("dockerRoot" -> _).toMap + val values: Map[String, String] = root.map("root" -> _).toMap ++ dockerRoot.map("dockerRoot" -> _).toMap createConfig(values) } @@ -82,9 +81,12 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche val expectedDockerRoot = dockerRoot.getOrElse(WorkflowPathsWithDocker.DefaultDockerRoot) workflowPaths.workflowRoot.pathAsString shouldBe - DefaultPathBuilder.get( - s"$expectedRoot/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" - ).toAbsolutePath.pathAsString + DefaultPathBuilder + .get( + s"$expectedRoot/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" + ) + .toAbsolutePath + .pathAsString workflowPaths.dockerWorkflowRoot.pathAsString shouldBe s"$expectedDockerRoot/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" () } diff --git a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala index 0497c8a1ba1..bd3024eda9a 100644 --- a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala @@ -14,37 +14,36 @@ import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsValue} import wom.RuntimeAttributesKeys._ import wom.values._ -class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers - with MockSugar { +class StandardValidatedRuntimeAttributesBuilderSpec + extends AnyWordSpecLike + with CromwellTimeoutSpec + with Matchers + with MockSugar { val HelloWorld: String = s""" - |task hello { - | String addressee = "you" - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | - | RUNTIME - |} - | - |workflow hello { - | call hello - |} + |task hello { + | String addressee = "you" + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | + | RUNTIME + |} + | + |workflow hello { + | call hello + |} """.stripMargin + val defaultRuntimeAttributes: Map[String, Any] = + Map(DockerKey -> None, FailOnStderrKey -> false, ContinueOnReturnCodeKey -> ContinueOnReturnCodeSet(Set(0))) - val defaultRuntimeAttributes: Map[String, Any] = Map( - DockerKey -> None, - FailOnStderrKey -> false, - ContinueOnReturnCodeKey -> ContinueOnReturnCodeSet(Set(0))) - - def workflowOptionsWithDefaultRuntimeAttributes(defaults: Map[String, JsValue]): WorkflowOptions = { + def workflowOptionsWithDefaultRuntimeAttributes(defaults: Map[String, JsValue]): WorkflowOptions = WorkflowOptions(JsObject(Map("default_runtime_attributes" -> JsObject(defaults)))) - } "SharedFileSystemValidatedRuntimeAttributesBuilder" should { "validate when there are no runtime attributes defined" in { @@ -75,8 +74,11 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with var warnings = List.empty[Any] val mockLogger = mock[Logger] mockLogger.warn(anyString).answers((warnings :+= _): Any => Unit) - assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, - includeDockerSupport = false, logger = mockLogger) + assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, + expectedRuntimeAttributes, + includeDockerSupport = false, + logger = mockLogger + ) warnings should contain theSameElementsAs List("Unrecognized runtime attribute keys: docker") } @@ -97,44 +99,58 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with var warnings = List.empty[Any] val mockLogger = mock[Logger] mockLogger.warn(anyString).answers((warnings :+= _): Any => Unit) - assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, - includeDockerSupport = false, logger = mockLogger) + assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, + expectedRuntimeAttributes, + includeDockerSupport = false, + logger = mockLogger + ) warnings should contain theSameElementsAs List("Unrecognized runtime attribute keys: docker") } "fail to validate an invalid failOnStderr entry" in { val runtimeAttributes = Map("failOnStderr" -> WomString("yes")) - assertRuntimeAttributesFailedCreation(runtimeAttributes, - "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") + assertRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'" + ) } "use workflow options as default if failOnStdErr key is missing" in { val expectedRuntimeAttributes = defaultRuntimeAttributes + (FailOnStderrKey -> true) val workflowOptions = workflowOptionsWithDefaultRuntimeAttributes(Map(FailOnStderrKey -> JsBoolean(true))) val runtimeAttributes = Map.empty[String, WomValue] - assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, - workflowOptions = workflowOptions) + assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, + expectedRuntimeAttributes, + workflowOptions = workflowOptions + ) } "validate a valid continueOnReturnCode entry" in { val runtimeAttributes = Map("continueOnReturnCode" -> WomInteger(1)) - val expectedRuntimeAttributes = defaultRuntimeAttributes + (ContinueOnReturnCodeKey -> ContinueOnReturnCodeSet(Set(1))) + val expectedRuntimeAttributes = + defaultRuntimeAttributes + (ContinueOnReturnCodeKey -> ContinueOnReturnCodeSet(Set(1))) assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid continueOnReturnCode entry" in { val runtimeAttributes = Map("continueOnReturnCode" -> WomString("value")) - assertRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + assertRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } "use workflow options as default if continueOnReturnCode key is missing" in { val expectedRuntimeAttributes = defaultRuntimeAttributes + (ContinueOnReturnCodeKey -> ContinueOnReturnCodeSet(Set(1, 2))) val workflowOptions = workflowOptionsWithDefaultRuntimeAttributes( - Map(ContinueOnReturnCodeKey -> JsArray(Vector(JsNumber(1), JsNumber(2))))) + Map(ContinueOnReturnCodeKey -> JsArray(Vector(JsNumber(1), JsNumber(2)))) + ) val runtimeAttributes = Map.empty[String, WomValue] - assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, - workflowOptions = workflowOptions) + assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, + expectedRuntimeAttributes, + workflowOptions = workflowOptions + ) } } @@ -148,44 +164,52 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with expectedRuntimeAttributes: Map[String, Any], includeDockerSupport: Boolean = true, workflowOptions: WorkflowOptions = emptyWorkflowOptions, - logger: Logger = defaultLogger): Unit = { + logger: Logger = defaultLogger + ): Unit = { val builder = if (includeDockerSupport) { - StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig).withValidation(DockerValidation.optional) + StandardValidatedRuntimeAttributesBuilder + .default(mockBackendRuntimeConfig) + .withValidation(DockerValidation.optional) } else { StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig) } val runtimeAttributeDefinitions = builder.definitions.toSet - val addDefaultsToAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, workflowOptions) _ + val addDefaultsToAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, workflowOptions) _ val validatedRuntimeAttributes = builder.build(addDefaultsToAttributes(runtimeAttributes), logger) - val docker = RuntimeAttributesValidation.extractOption( - DockerValidation.instance, validatedRuntimeAttributes) - val failOnStderr = RuntimeAttributesValidation.extract( - FailOnStderrValidation.instance, validatedRuntimeAttributes) - val continueOnReturnCode = RuntimeAttributesValidation.extract( - ContinueOnReturnCodeValidation.instance, validatedRuntimeAttributes) + val docker = RuntimeAttributesValidation.extractOption(DockerValidation.instance, validatedRuntimeAttributes) + val failOnStderr = RuntimeAttributesValidation.extract(FailOnStderrValidation.instance, validatedRuntimeAttributes) + val continueOnReturnCode = + RuntimeAttributesValidation.extract(ContinueOnReturnCodeValidation.instance, validatedRuntimeAttributes) docker should be(expectedRuntimeAttributes(DockerKey).asInstanceOf[Option[String]]) failOnStderr should be(expectedRuntimeAttributes(FailOnStderrKey).asInstanceOf[Boolean]) continueOnReturnCode should be( - expectedRuntimeAttributes(ContinueOnReturnCodeKey).asInstanceOf[ContinueOnReturnCode]) + expectedRuntimeAttributes(ContinueOnReturnCodeKey).asInstanceOf[ContinueOnReturnCode] + ) () } - private def assertRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], exMsg: String, + private def assertRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], + exMsg: String, supportsDocker: Boolean = true, workflowOptions: WorkflowOptions = emptyWorkflowOptions, - logger: Logger = defaultLogger): Unit = { + logger: Logger = defaultLogger + ): Unit = { val thrown = the[RuntimeException] thrownBy { val builder = if (supportsDocker) { - StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig).withValidation(DockerValidation.optional) + StandardValidatedRuntimeAttributesBuilder + .default(mockBackendRuntimeConfig) + .withValidation(DockerValidation.optional) } else { StandardValidatedRuntimeAttributesBuilder.default(mockBackendRuntimeConfig) } val runtimeAttributeDefinitions = builder.definitions.toSet - val addDefaultsToAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, workflowOptions) _ + val addDefaultsToAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, workflowOptions) _ builder.build(addDefaultsToAttributes(runtimeAttributes), logger) } diff --git a/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala b/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala index fdfa7f97e04..f50cba5834c 100644 --- a/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/callcaching/CallCachingBlacklistManagerSpec.scala @@ -8,11 +8,10 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import spray.json._ - class CallCachingBlacklistManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "CallCachingBlacklistManager" - //noinspection RedundantDefaultArgument + // noinspection RedundantDefaultArgument val workflowSourcesNoGrouping = WorkflowSourceFilesWithoutImports( workflowSource = None, workflowUrl = None, diff --git a/backend/src/test/scala/cromwell/backend/standard/callcaching/RootWorkflowHashCacheActorSpec.scala b/backend/src/test/scala/cromwell/backend/standard/callcaching/RootWorkflowHashCacheActorSpec.scala index f546dd63630..0b462c74b81 100644 --- a/backend/src/test/scala/cromwell/backend/standard/callcaching/RootWorkflowHashCacheActorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/callcaching/RootWorkflowHashCacheActorSpec.scala @@ -13,8 +13,7 @@ import org.scalatest.flatspec.AnyFlatSpecLike import scala.concurrent.duration._ -class RootWorkflowHashCacheActorSpec extends TestKitSuite with ImplicitSender - with AnyFlatSpecLike { +class RootWorkflowHashCacheActorSpec extends TestKitSuite with ImplicitSender with AnyFlatSpecLike { private val fakeWorkflowId = WorkflowId.randomId() private val fakeFileName = "fakeFileName" @@ -25,17 +24,24 @@ class RootWorkflowHashCacheActorSpec extends TestKitSuite with ImplicitSender props = Props(new RootWorkflowFileHashCacheActor(ioActorProbe.ref, fakeWorkflowId) { override lazy val defaultIoTimeout: FiniteDuration = 1.second }), - name = "rootWorkflowFileHashCacheActor-without-timer", + name = "rootWorkflowFileHashCacheActor-without-timer" ) - val ioHashCommandWithContext = IoHashCommandWithContext(DefaultIoHashCommand(DefaultPathBuilder.build("").get), FileHashContext(HashKey(checkForHitOrMiss = false, List.empty), fakeFileName)) + val ioHashCommandWithContext = + IoHashCommandWithContext(DefaultIoHashCommand(DefaultPathBuilder.build("").get), + FileHashContext(HashKey(checkForHitOrMiss = false, List.empty), fakeFileName) + ) rootWorkflowFileHashCacheActor ! ioHashCommandWithContext - //wait for timeout + // wait for timeout Thread.sleep(2000) EventFilter.info(msgIoAckWithNoRequesters.format(fakeFileName), occurrences = 1).intercept { - ioActorProbe.send(rootWorkflowFileHashCacheActor, ioHashCommandWithContext.fileHashContext -> IoSuccess(ioHashCommandWithContext.ioHashCommand, "Successful result")) + ioActorProbe.send(rootWorkflowFileHashCacheActor, + ioHashCommandWithContext.fileHashContext -> IoSuccess(ioHashCommandWithContext.ioHashCommand, + "Successful result" + ) + ) } } @@ -46,17 +52,32 @@ class RootWorkflowHashCacheActorSpec extends TestKitSuite with ImplicitSender // Effectively disabling automatic timeout firing here. We'll send RequestTimeout ourselves override lazy val defaultIoTimeout: FiniteDuration = 1.hour }), - "rootWorkflowFileHashCacheActor-with-timer", + "rootWorkflowFileHashCacheActor-with-timer" ) - val ioHashCommandWithContext = IoHashCommandWithContext(DefaultIoHashCommand(DefaultPathBuilder.build("").get), FileHashContext(HashKey(checkForHitOrMiss = false, List.empty), fakeFileName)) + val ioHashCommandWithContext = + IoHashCommandWithContext(DefaultIoHashCommand(DefaultPathBuilder.build("").get), + FileHashContext(HashKey(checkForHitOrMiss = false, List.empty), fakeFileName) + ) rootWorkflowFileHashCacheActor ! ioHashCommandWithContext val hashVal = "Success" - EventFilter.info(msgTimeoutAfterIoAck.format(s"FileHashSuccess($hashVal)", ioHashCommandWithContext.fileHashContext.file), occurrences = 1).intercept { - ioActorProbe.send(rootWorkflowFileHashCacheActor, (ioHashCommandWithContext.fileHashContext, IoSuccess(ioHashCommandWithContext.ioHashCommand, hashVal))) - Thread.sleep(2000) // wait for actor to put value into cache - ioActorProbe.send(rootWorkflowFileHashCacheActor, RequestTimeout(ioHashCommandWithContext.fileHashContext -> ioHashCommandWithContext.ioHashCommand, rootWorkflowFileHashCacheActor)) - } + EventFilter + .info(msgTimeoutAfterIoAck.format(s"FileHashSuccess($hashVal)", ioHashCommandWithContext.fileHashContext.file), + occurrences = 1 + ) + .intercept { + ioActorProbe.send( + rootWorkflowFileHashCacheActor, + (ioHashCommandWithContext.fileHashContext, IoSuccess(ioHashCommandWithContext.ioHashCommand, hashVal)) + ) + Thread.sleep(2000) // wait for actor to put value into cache + ioActorProbe.send( + rootWorkflowFileHashCacheActor, + RequestTimeout(ioHashCommandWithContext.fileHashContext -> ioHashCommandWithContext.ioHashCommand, + rootWorkflowFileHashCacheActor + ) + ) + } } } diff --git a/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala b/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala index c7463cc50a0..2c7ec3ba726 100644 --- a/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala @@ -18,8 +18,12 @@ import scala.concurrent.duration._ import scala.util.control.NoStackTrace import scala.util.{Failure, Try} -class StandardFileHashingActorSpec extends TestKitSuite with ImplicitSender - with AnyFlatSpecLike with Matchers with MockSugar { +class StandardFileHashingActorSpec + extends TestKitSuite + with ImplicitSender + with AnyFlatSpecLike + with Matchers + with MockSugar { behavior of "StandardFileHashingActor" @@ -127,20 +131,21 @@ object StandardFileHashingActorSpec { def defaultParams(): StandardFileHashingActorParams = defaultParams(testing, testing, testing, testing, testing) - def ioActorParams(ioActor: ActorRef): StandardFileHashingActorParams = { - defaultParams(withJobDescriptor = testing, + def ioActorParams(ioActor: ActorRef): StandardFileHashingActorParams = + defaultParams( + withJobDescriptor = testing, withConfigurationDescriptor = testing, withIoActor = ioActor, withServiceRegistryActor = testing, - withBackendInitializationDataOption = testing) - } + withBackendInitializationDataOption = testing + ) def defaultParams(withJobDescriptor: => BackendJobDescriptor, withConfigurationDescriptor: => BackendConfigurationDescriptor, withIoActor: => ActorRef, withServiceRegistryActor: => ActorRef, withBackendInitializationDataOption: => Option[BackendInitializationData] - ): StandardFileHashingActorParams = new StandardFileHashingActorParams { + ): StandardFileHashingActorParams = new StandardFileHashingActorParams { override def jobDescriptor: BackendJobDescriptor = withJobDescriptor @@ -150,10 +155,10 @@ object StandardFileHashingActorSpec { override def serviceRegistryActor: ActorRef = withServiceRegistryActor - override def backendInitializationDataOption: Option[BackendInitializationData] = withBackendInitializationDataOption + override def backendInitializationDataOption: Option[BackendInitializationData] = + withBackendInitializationDataOption override def fileHashCachingActor: Option[ActorRef] = None } } - diff --git a/backend/src/test/scala/cromwell/backend/validation/ContinueOnReturnCodeSpec.scala b/backend/src/test/scala/cromwell/backend/validation/ContinueOnReturnCodeSpec.scala index faf835c6343..e0ff75d3176 100644 --- a/backend/src/test/scala/cromwell/backend/validation/ContinueOnReturnCodeSpec.scala +++ b/backend/src/test/scala/cromwell/backend/validation/ContinueOnReturnCodeSpec.scala @@ -9,12 +9,12 @@ import org.scalatest.wordspec.AnyWordSpecLike class ContinueOnReturnCodeSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers with BeforeAndAfterAll { "Checking for return codes" should { "continue on expected return code flags" in { - val flagTests = Table( - ("flag", "returnCode", "expectedContinue"), - (true, 0, true), - (true, 1, true), - (false, 0, true), - (false, 1, false)) + val flagTests = Table(("flag", "returnCode", "expectedContinue"), + (true, 0, true), + (true, 1, true), + (false, 0, true), + (false, 1, false) + ) forAll(flagTests) { (flag, returnCode, expectedContinue) => ContinueOnReturnCodeFlag(flag).continueFor(returnCode) should be(expectedContinue) @@ -30,7 +30,8 @@ class ContinueOnReturnCodeSpec extends AnyWordSpecLike with CromwellTimeoutSpec (Set(1), 1, true), (Set(0, 1), 0, true), (Set(0, 1), 1, true), - (Set(0, 1), 2, false)) + (Set(0, 1), 2, false) + ) forAll(setTests) { (set, returnCode, expectedContinue) => ContinueOnReturnCodeSet(set).continueFor(returnCode) should be(expectedContinue) diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala index 766384b46fd..6123ecfe53b 100644 --- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala +++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesDefaultSpec.scala @@ -21,9 +21,7 @@ class RuntimeAttributesDefaultSpec extends AnyFlatSpec with CromwellTimeoutSpec ) it should "coerce workflow options from Json to WdlValues" in { - val workflowOptions = WorkflowOptions(JsObject( - "default_runtime_attributes" -> JsObject(map)) - ) + val workflowOptions = WorkflowOptions(JsObject("default_runtime_attributes" -> JsObject(map))) val coercionMap: Map[String, Set[WomType]] = Map( "str" -> Set(WomStringType), @@ -43,9 +41,7 @@ class RuntimeAttributesDefaultSpec extends AnyFlatSpec with CromwellTimeoutSpec } it should "only return default values if they're in the coercionMap" in { - val workflowOptions = WorkflowOptions(JsObject( - "default_runtime_attributes" -> JsObject(map)) - ) + val workflowOptions = WorkflowOptions(JsObject("default_runtime_attributes" -> JsObject(map))) val coercionMap: Map[String, Set[WomType]] = Map( "str" -> Set(WomStringType), @@ -69,9 +65,7 @@ class RuntimeAttributesDefaultSpec extends AnyFlatSpec with CromwellTimeoutSpec } it should "throw an exception if a value can't be coerced" in { - val workflowOptions = WorkflowOptions(JsObject( - "default_runtime_attributes" -> JsObject(map)) - ) + val workflowOptions = WorkflowOptions(JsObject("default_runtime_attributes" -> JsObject(map))) val coercionMap: Map[String, Set[WomType]] = Map( "str" -> Set(WomBooleanType), diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala index 1752da9014b..6d3bcb6bafa 100644 --- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala +++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala @@ -12,15 +12,21 @@ import wom.RuntimeAttributesKeys import wom.types._ import wom.values._ -class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers with BeforeAndAfterAll { +class RuntimeAttributesValidationSpec + extends AnyWordSpecLike + with CromwellTimeoutSpec + with Matchers + with BeforeAndAfterAll { val mockBackendRuntimeConfig = TestConfig.allRuntimeAttrsConfig "RuntimeAttributesValidation" should { "return success when tries to validate a valid Docker entry" in { val dockerValue = Some(WomString("someImage")) - val result = RuntimeAttributesValidation.validateDocker(dockerValue, - "Failed to get Docker mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateDocker( + dockerValue, + "Failed to get Docker mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x.get == "someImage") case Invalid(e) => fail(e.toList.mkString(" ")) @@ -38,8 +44,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure (based on defined HoF) when tries to validate a docker entry but it does not contain a value" in { val dockerValue = None - val result = RuntimeAttributesValidation.validateDocker(dockerValue, - "Failed to get Docker mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateDocker( + dockerValue, + "Failed to get Docker mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Failed to get Docker mandatory key from runtime attributes") @@ -48,8 +56,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when there is an invalid docker runtime attribute defined" in { val dockerValue = Some(WomInteger(1)) - val result = RuntimeAttributesValidation.validateDocker(dockerValue, - "Failed to get Docker mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateDocker( + dockerValue, + "Failed to get Docker mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting docker runtime attribute to be a String") @@ -58,8 +68,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a failOnStderr boolean entry" in { val failOnStderrValue = Some(WomBoolean(true)) - val result = RuntimeAttributesValidation.validateFailOnStderr(failOnStderrValue, - "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateFailOnStderr( + failOnStderrValue, + "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -68,8 +80,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a failOnStderr 'true' string entry" in { val failOnStderrValue = Some(WomString("true")) - val result = RuntimeAttributesValidation.validateFailOnStderr(failOnStderrValue, - "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateFailOnStderr( + failOnStderrValue, + "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -78,8 +92,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a failOnStderr 'false' string entry" in { val failOnStderrValue = Some(WomString("false")) - val result = RuntimeAttributesValidation.validateFailOnStderr(failOnStderrValue, - "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateFailOnStderr( + failOnStderrValue, + "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(!x) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -88,11 +104,16 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when there is an invalid failOnStderr runtime attribute defined" in { val failOnStderrValue = Some(WomInteger(1)) - val result = RuntimeAttributesValidation.validateFailOnStderr(failOnStderrValue, - "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateFailOnStderr( + failOnStderrValue, + "Failed to get failOnStderr mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") - case Invalid(e) => assert(e.head == "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") + case Invalid(e) => + assert( + e.head == "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'" + ) } } @@ -107,8 +128,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a continueOnReturnCode boolean entry" in { val continueOnReturnCodeValue = Some(WomBoolean(true)) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x == ContinueOnReturnCodeFlag(true)) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -117,8 +140,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a continueOnReturnCode 'true' string entry" in { val continueOnReturnCodeValue = Some(WomString("true")) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x == ContinueOnReturnCodeFlag(true)) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -127,8 +152,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a continueOnReturnCode 'false' string entry" in { val continueOnReturnCodeValue = Some(WomString("false")) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x == ContinueOnReturnCodeFlag(false)) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -137,8 +164,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a continueOnReturnCode int entry" in { val continueOnReturnCodeValue = Some(WomInteger(12)) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x == ContinueOnReturnCodeSet(Set(12))) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -147,19 +176,25 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when there is an invalid continueOnReturnCode runtime attribute defined" in { val continueOnReturnCodeValue = Some(WomString("yes")) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => - assert(e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + assert( + e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } } "return success when there is a valid integer array in continueOnReturnCode runtime attribute" in { val continueOnReturnCodeValue = Some(WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(2)))) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x == ContinueOnReturnCodeSet(Set(1, 2))) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -167,18 +202,26 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo } "return failure when there is an invalid array in continueOnReturnCode runtime attribute" in { - val continueOnReturnCodeValue = Some(WomArray(WomArrayType(WomStringType), Seq(WomString("one"), WomString("two")))) - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, - "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel) + val continueOnReturnCodeValue = + Some(WomArray(WomArrayType(WomStringType), Seq(WomString("one"), WomString("two")))) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode( + continueOnReturnCodeValue, + "Failed to get continueOnReturnCode mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") - case Invalid(e) => assert(e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + case Invalid(e) => + assert( + e.head == "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } } "return success (based on defined HoF) when tries to validate a continueOnReturnCode entry but it does not contain a value" in { val continueOnReturnCodeValue = None - val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, ContinueOnReturnCodeFlag(false).validNel) + val result = RuntimeAttributesValidation.validateContinueOnReturnCode(continueOnReturnCodeValue, + ContinueOnReturnCodeFlag(false).validNel + ) result match { case Valid(x) => assert(x == ContinueOnReturnCodeFlag(false)) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -188,8 +231,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a valid Integer memory entry" in { val expectedGb = 1 val memoryValue = Some(WomInteger(1 << 30)) - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x.amount == expectedGb) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -198,8 +243,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when tries to validate an invalid Integer memory entry" in { val memoryValue = Some(WomInteger(-1)) - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting memory runtime attribute value greater than 0 but got -1") @@ -209,8 +256,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a valid String memory entry" in { val expectedGb = 2 val memoryValue = Some(WomString("2 GB")) - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x.amount == expectedGb) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -219,8 +268,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when tries to validate an invalid size in String memory entry" in { val memoryValue = Some(WomString("0 GB")) - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting memory runtime attribute value greater than 0 but got 0.0") @@ -229,28 +280,40 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when tries to validate an invalid String memory entry" in { val memoryValue = Some(WomString("value")) - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") - case Invalid(e) => assert(e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: value should be of the form 'X Unit' where X is a number, e.g. 8 GB") + case Invalid(e) => + assert( + e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: value should be of the form 'X Unit' where X is a number, e.g. 8 GB" + ) } } "return failure when tries to validate an invalid memory entry" in { val memoryValue = Some(WomBoolean(true)) - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") - case Invalid(e) => assert(e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: Not supported WDL type value") + case Invalid(e) => + assert( + e.head == "Expecting memory runtime attribute to be an Integer or String with format '8 GB'. Exception: Not supported WDL type value" + ) } } "return failure when tries to validate a non-provided memory entry" in { val memoryValue = None - val result = RuntimeAttributesValidation.validateMemory(memoryValue, - "Failed to get memory mandatory key from runtime attributes".invalidNel) + val result = RuntimeAttributesValidation.validateMemory( + memoryValue, + "Failed to get memory mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Failed to get memory mandatory key from runtime attributes") @@ -259,8 +322,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return success when tries to validate a valid cpu entry" in { val cpuValue = Some(WomInteger(1)) - val result = RuntimeAttributesValidation.validateCpu(cpuValue, - "Failed to get cpu mandatory key from runtime attributes".invalidNel) + val result = + RuntimeAttributesValidation.validateCpu(cpuValue, + "Failed to get cpu mandatory key from runtime attributes".invalidNel + ) result match { case Valid(x) => assert(x.value == 1) case Invalid(e) => fail(e.toList.mkString(" ")) @@ -269,8 +334,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when tries to validate an invalid cpu entry" in { val cpuValue = Some(WomInteger(-1)) - val result = RuntimeAttributesValidation.validateCpu(cpuValue, - "Failed to get cpu mandatory key from runtime attributes".invalidNel) + val result = + RuntimeAttributesValidation.validateCpu(cpuValue, + "Failed to get cpu mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting cpu runtime attribute value greater than 0") @@ -279,8 +346,10 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo "return failure when tries to validate a non-provided cpu entry" in { val cpuValue = None - val result = RuntimeAttributesValidation.validateCpu(cpuValue, - "Failed to get cpu mandatory key from runtime attributes".invalidNel) + val result = + RuntimeAttributesValidation.validateCpu(cpuValue, + "Failed to get cpu mandatory key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Failed to get cpu mandatory key from runtime attributes") @@ -306,26 +375,25 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo } "return default values as BadDefaultAttribute when they can't be coerced to expected WdlTypes" in { - val optionalInvalidAttrsConfig = Option(ConfigFactory.parseString( - """ - |cpu = 1.4 - |failOnStderr = "notReal" - |continueOnReturnCode = 0 + val optionalInvalidAttrsConfig = Option(ConfigFactory.parseString(""" + |cpu = 1.4 + |failOnStderr = "notReal" + |continueOnReturnCode = 0 """.stripMargin)) - val defaultVals = Map( - "cpu" -> CpuValidation.configDefaultWomValue(optionalInvalidAttrsConfig).get, - "failOnStderr" -> FailOnStderrValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get, - "continueOnReturnCode" -> ContinueOnReturnCodeValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get - ) + val defaultVals = Map( + "cpu" -> CpuValidation.configDefaultWomValue(optionalInvalidAttrsConfig).get, + "failOnStderr" -> FailOnStderrValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get, + "continueOnReturnCode" -> ContinueOnReturnCodeValidation.configDefaultWdlValue(optionalInvalidAttrsConfig).get + ) - val expectedDefaultVals = Map( - "cpu" -> BadDefaultAttribute(WomString("1.4")), - "failOnStderr" -> BadDefaultAttribute(WomString("notReal")), - "continueOnReturnCode" -> WomInteger(0) - ) + val expectedDefaultVals = Map( + "cpu" -> BadDefaultAttribute(WomString("1.4")), + "failOnStderr" -> BadDefaultAttribute(WomString("notReal")), + "continueOnReturnCode" -> WomInteger(0) + ) - defaultVals shouldBe expectedDefaultVals + defaultVals shouldBe expectedDefaultVals } "should parse memory successfully" in { @@ -338,14 +406,24 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo | } |""".stripMargin - val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate).getConfig("default-runtime-attributes") + val backendConfig: Config = + ConfigFactory.parseString(backendConfigTemplate).getConfig("default-runtime-attributes") val memoryVal = MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, Some(backendConfig)) val memoryMinVal = MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryMinKey, Some(backendConfig)) val memoryMaxVal = MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryMaxKey, Some(backendConfig)) - MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryKey, memoryVal.get).runtimeAttributeDefinition.factoryDefault shouldBe Some((WomLong(2147483648L))) - MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryMinKey, memoryMinVal.get).runtimeAttributeDefinition.factoryDefault shouldBe Some((WomLong(322122547L))) - MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryMaxKey, memoryMaxVal.get).runtimeAttributeDefinition.factoryDefault shouldBe Some((WomLong(429496729L))) + MemoryValidation + .withDefaultMemory(RuntimeAttributesKeys.MemoryKey, memoryVal.get) + .runtimeAttributeDefinition + .factoryDefault shouldBe Some(WomLong(2147483648L)) + MemoryValidation + .withDefaultMemory(RuntimeAttributesKeys.MemoryMinKey, memoryMinVal.get) + .runtimeAttributeDefinition + .factoryDefault shouldBe Some(WomLong(322122547L)) + MemoryValidation + .withDefaultMemory(RuntimeAttributesKeys.MemoryMaxKey, memoryMaxVal.get) + .runtimeAttributeDefinition + .factoryDefault shouldBe Some(WomLong(429496729L)) } "shouldn't throw up if the value for a default-runtime-attribute key cannot be coerced into an expected WomType" in { @@ -356,25 +434,33 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo | } |""".stripMargin - val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate).getConfig("default-runtime-attributes") + val backendConfig: Config = + ConfigFactory.parseString(backendConfigTemplate).getConfig("default-runtime-attributes") val memoryVal = MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, Some(backendConfig)) - MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryKey, memoryVal.get).runtimeAttributeDefinition.factoryDefault shouldBe Some(BadDefaultAttribute(WomString("blahblah"))) + MemoryValidation + .withDefaultMemory(RuntimeAttributesKeys.MemoryKey, memoryVal.get) + .runtimeAttributeDefinition + .factoryDefault shouldBe Some(BadDefaultAttribute(WomString("blahblah"))) } "should be able to coerce a list of return codes into an WdlArray" in { - val optinalBackendConfig = Option(ConfigFactory.parseString( - s""" - |continueOnReturnCode = [0,1,2] - |""".stripMargin)) + val optinalBackendConfig = Option(ConfigFactory.parseString(s""" + |continueOnReturnCode = [0,1,2] + |""".stripMargin)) - ContinueOnReturnCodeValidation.configDefaultWdlValue(optinalBackendConfig).get shouldBe WomArray(WomArrayType(WomIntegerType), List(WomInteger(0), WomInteger(1), WomInteger(2))) + ContinueOnReturnCodeValidation.configDefaultWdlValue(optinalBackendConfig).get shouldBe WomArray( + WomArrayType(WomIntegerType), + List(WomInteger(0), WomInteger(1), WomInteger(2)) + ) } "return failure when tries to validate an invalid maxRetries entry" in { val maxRetries = Option(WomInteger(-1)) - val result = RuntimeAttributesValidation.validateMaxRetries(maxRetries, - "Failed to get maxRetries key from runtime attributes".invalidNel) + val result = + RuntimeAttributesValidation.validateMaxRetries(maxRetries, + "Failed to get maxRetries key from runtime attributes".invalidNel + ) result match { case Valid(_) => fail("A failure was expected.") case Invalid(e) => assert(e.head == "Expecting maxRetries runtime attribute value greater than or equal to 0") diff --git a/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala b/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala index 1ddebb6a09a..5a6606bce68 100644 --- a/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala @@ -20,7 +20,10 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.Future @DoNotDiscover -abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromwellTracker: Option[CromwellTracker] = None) extends AsyncFlatSpec with Matchers { +abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], + cromwellTracker: Option[CromwellTracker] = None +) extends AsyncFlatSpec + with Matchers { /* NOTE: We need to statically initialize the object so that the exceptions appear here in the class constructor. @@ -47,10 +50,12 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw val duplicateTestNames = allTestsCases .map(_.workflow.testName) .groupBy(identity) - .collect({ case (key, values) if values.lengthCompare(1) > 0 => key }) + .collect { case (key, values) if values.lengthCompare(1) > 0 => key } if (duplicateTestNames.nonEmpty) { - throw new RuntimeException("The following test names are duplicated in more than one test file: " + - duplicateTestNames.mkString(", ")) + throw new RuntimeException( + "The following test names are duplicated in more than one test file: " + + duplicateTestNames.mkString(", ") + ) } allTestsCases } @@ -62,7 +67,9 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw } yield submitResponse // Make tags, but enforce lowercase: - val tags = (testCase.testOptions.tags :+ testCase.workflow.testName :+ testCase.testFormat.name) map { x => Tag(x.toLowerCase) } + val tags = (testCase.testOptions.tags :+ testCase.workflow.testName :+ testCase.testFormat.name) map { x => + Tag(x.toLowerCase) + } val isIgnored = testCase.isIgnored(cromwellBackends) val retries = if (testCase.workflow.retryTestFailures) ErrorReporters.retryAttempts else 0 @@ -110,7 +117,10 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw testName = testCase.workflow.testName + " (draft-2 to 1.0 upgrade)", data = testCase.workflow.data.copy( workflowContent = Option(upgradeResult.stdout.get), // this '.get' catches an error if upgrade fails - zippedImports = Option(upgradedImportsDir.zip()))))(cromwellTracker) // An empty zip appears to be completely harmless, so no special handling + zippedImports = Option(upgradedImportsDir.zip()) + ) + ) + )(cromwellTracker) // An empty zip appears to be completely harmless, so no special handling rootWorkflowFile.delete(swallowIOExceptions = true) upgradedImportsDir.delete(swallowIOExceptions = true) @@ -123,14 +133,15 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw tags: List[Tag], ignore: Boolean, retries: Int, - runTest: => IO[SubmitResponse]): Unit = { + runTest: => IO[SubmitResponse] + ): Unit = { val itShould: ItVerbString = it should testCase.name tags match { case Nil => runOrDont(itShould, ignore, testCase, retries, runTest) case head :: Nil => runOrDont(itShould taggedAs head, ignore, testCase, retries, runTest) - case head :: tail => runOrDont(itShould taggedAs(head, tail: _*), ignore, testCase, retries, runTest) + case head :: tail => runOrDont(itShould taggedAs (head, tail: _*), ignore, testCase, retries, runTest) } } @@ -138,26 +149,26 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw ignore: Boolean, testCase: CentaurTestCase, retries: Int, - runTest: => IO[SubmitResponse]): Unit = { + runTest: => IO[SubmitResponse] + ): Unit = if (ignore) { itVerbString ignore Future.successful(succeed) } else { itVerbString in tryTryAgain(testCase, runTest, retries).unsafeToFuture().map(_ => succeed) } - } private def runOrDont(itVerbStringTaggedAs: ItVerbStringTaggedAs, ignore: Boolean, testCase: CentaurTestCase, retries: Int, - runTest: => IO[SubmitResponse]): Unit = { + runTest: => IO[SubmitResponse] + ): Unit = if (ignore) { itVerbStringTaggedAs ignore Future.successful(succeed) } else { itVerbStringTaggedAs in tryTryAgain(testCase, runTest, retries).unsafeToFuture().map(_ => succeed) } - } /** * Returns an IO effect that will recursively try to run a test. @@ -168,21 +179,27 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw * @param attempt Current zero based attempt. * @return IO effect that will run the test, possibly retrying. */ - private def tryTryAgain(testCase: CentaurTestCase, runTest: => IO[SubmitResponse], retries: Int, attempt: Int = 0): IO[SubmitResponse] = { + private def tryTryAgain(testCase: CentaurTestCase, + runTest: => IO[SubmitResponse], + retries: Int, + attempt: Int = 0 + ): IO[SubmitResponse] = { def maybeRetry(centaurTestException: CentaurTestException): IO[SubmitResponse] = { - def clearCachedResults(workflowId: WorkflowId): IO[Unit] = CromwellDatabaseCallCaching.clearCachedResults(workflowId.toString) + def clearCachedResults(workflowId: WorkflowId): IO[Unit] = + CromwellDatabaseCallCaching.clearCachedResults(workflowId.toString) val testEnvironment = TestEnvironment(testCase, retries, attempt) for { _ <- ErrorReporters.logFailure(testEnvironment, centaurTestException) - r <- if (attempt < retries) { - testCase.submittedWorkflowTracker.cleanUpBeforeRetry(clearCachedResults) *> - tryTryAgain(testCase, runTest, retries, attempt + 1) - } else { - IO.raiseError(centaurTestException) - } + r <- + if (attempt < retries) { + testCase.submittedWorkflowTracker.cleanUpBeforeRetry(clearCachedResults) *> + tryTryAgain(testCase, runTest, retries, attempt + 1) + } else { + IO.raiseError(centaurTestException) + } } yield r } @@ -207,11 +224,10 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw /** * Clean up temporary zip files created for Imports testing. */ - private def cleanUpImports(wfData: WorkflowData) = { + private def cleanUpImports(wfData: WorkflowData) = wfData.zippedImports match { case Some(zipFile) => zipFile.delete(swallowIOExceptions = true) case None => // } - } } diff --git a/centaur/src/it/scala/centaur/AbstractCromwellEngineOrBackendUpgradeTestCaseSpec.scala b/centaur/src/it/scala/centaur/AbstractCromwellEngineOrBackendUpgradeTestCaseSpec.scala index c55c0c5b17a..04f03d1eacf 100644 --- a/centaur/src/it/scala/centaur/AbstractCromwellEngineOrBackendUpgradeTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/AbstractCromwellEngineOrBackendUpgradeTestCaseSpec.scala @@ -13,7 +13,7 @@ import scala.concurrent.Future @DoNotDiscover abstract class AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBackends: List[String]) - extends AbstractCentaurTestCaseSpec(cromwellBackends) + extends AbstractCentaurTestCaseSpec(cromwellBackends) with CentaurTestSuiteShutdown with BeforeAndAfter { @@ -27,15 +27,20 @@ abstract class AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBacken override protected def beforeAll(): Unit = { super.beforeAll() val beforeAllIo = for { - _ <- checkIsEmpty(cromwellDatabase.engineDatabase, cromwellDatabase.engineDatabase.existsJobKeyValueEntries(), testType) - _ <- checkIsEmpty(cromwellDatabase.metadataDatabase, cromwellDatabase.metadataDatabase.existsMetadataEntries(), testType) + _ <- checkIsEmpty(cromwellDatabase.engineDatabase, + cromwellDatabase.engineDatabase.existsJobKeyValueEntries(), + testType + ) + _ <- checkIsEmpty(cromwellDatabase.metadataDatabase, + cromwellDatabase.metadataDatabase.existsMetadataEntries(), + testType + ) } yield () beforeAllIo.unsafeRunSync() } - private def failNotSlick(database: SqlDatabase): IO[Unit] = { + private def failNotSlick(database: SqlDatabase): IO[Unit] = IO.raiseError(new RuntimeException(s"Expected a slick database for ${database.connectionDescription}.")) - } after { val afterIo = for { @@ -54,26 +59,29 @@ abstract class AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBacken def isMatchingUpgradeTest(testCase: CentaurTestCase): Boolean } - object AbstractCromwellEngineOrBackendUpgradeTestCaseSpec { - private def checkIsEmpty(database: SqlDatabase, lookup: => Future[Boolean], testType: => String)(implicit cs: ContextShift[IO]): IO[Unit] = { - IO.fromFuture(IO(lookup)).flatMap(exists => - if (exists) { - IO(Assertions.fail( - s"Database ${database.connectionDescription} contains data. " + - s"$testType tests should only be run on a completely empty database. " + - "You may need to manually drop and recreate the database to continue." - )) - } else { - IO.unit - } - ) - } + private def checkIsEmpty(database: SqlDatabase, lookup: => Future[Boolean], testType: => String)(implicit + cs: ContextShift[IO] + ): IO[Unit] = + IO.fromFuture(IO(lookup)) + .flatMap(exists => + if (exists) { + IO( + Assertions.fail( + s"Database ${database.connectionDescription} contains data. " + + s"$testType tests should only be run on a completely empty database. " + + "You may need to manually drop and recreate the database to continue." + ) + ) + } else { + IO.unit + } + ) private def recreateDatabase(slickDatabase: SlickDatabase)(implicit cs: ContextShift[IO]): IO[Unit] = { import slickDatabase.dataAccess.driver.api._ val schemaName = slickDatabase.databaseConfig.getOrElse("db.cromwell-database-name", "cromwell_test") - //noinspection SqlDialectInspection + // noinspection SqlDialectInspection for { _ <- IO.fromFuture(IO(slickDatabase.database.run(sqlu"""DROP SCHEMA IF EXISTS #$schemaName"""))) _ <- IO.fromFuture(IO(slickDatabase.database.run(sqlu"""CREATE SCHEMA #$schemaName"""))) diff --git a/centaur/src/it/scala/centaur/CentaurTestSuite.scala b/centaur/src/it/scala/centaur/CentaurTestSuite.scala index 1746caee32b..27cb3e0d01b 100644 --- a/centaur/src/it/scala/centaur/CentaurTestSuite.scala +++ b/centaur/src/it/scala/centaur/CentaurTestSuite.scala @@ -16,13 +16,12 @@ object CentaurTestSuite extends StrictLogging { // before we can generate the tests. startCromwell() - def startCromwell(): Unit = { + def startCromwell(): Unit = CentaurConfig.runMode match { case ManagedCromwellServer(preRestart, _, _) => CromwellManager.startCromwell(preRestart) case _ => } - } val cromwellBackends = CentaurCromwellClient.backends.unsafeRunSync().supportedBackends.map(_.toLowerCase) @@ -63,9 +62,8 @@ object CentaurTestSuite extends StrictLogging { trait CentaurTestSuiteShutdown extends Suite with BeforeAndAfterAll { private var shutdownHook: Option[ShutdownHookThread] = _ - override protected def beforeAll() = { - shutdownHook = Option(sys.addShutdownHook { CromwellManager.stopCromwell("JVM Shutdown Hook") }) - } + override protected def beforeAll() = + shutdownHook = Option(sys.addShutdownHook(CromwellManager.stopCromwell("JVM Shutdown Hook"))) override protected def afterAll() = { CromwellManager.stopCromwell("ScalaTest AfterAll") @@ -78,6 +76,6 @@ trait CentaurTestSuiteShutdown extends Suite with BeforeAndAfterAll { * The main centaur test suites, runs sub suites in parallel, but allows better control over the way each nested suite runs. */ class CentaurTestSuite - extends Suites(new SequentialTestCaseSpec(), new ParallelTestCaseSpec()) + extends Suites(new SequentialTestCaseSpec(), new ParallelTestCaseSpec()) with ParallelTestExecution with CentaurTestSuiteShutdown diff --git a/centaur/src/it/scala/centaur/EngineUpgradeTestCaseSpec.scala b/centaur/src/it/scala/centaur/EngineUpgradeTestCaseSpec.scala index a16dda39b49..7a386f9d642 100644 --- a/centaur/src/it/scala/centaur/EngineUpgradeTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/EngineUpgradeTestCaseSpec.scala @@ -4,12 +4,13 @@ import centaur.test.standard.CentaurTestCase import org.scalatest.DoNotDiscover @DoNotDiscover -class EngineUpgradeTestCaseSpec(cromwellBackends: List[String]) extends - AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBackends) { +class EngineUpgradeTestCaseSpec(cromwellBackends: List[String]) + extends AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBackends) { def this() = this(CentaurTestSuite.cromwellBackends) override def testType: String = "Engine upgrade" - override def isMatchingUpgradeTest(testCase: CentaurTestCase): Boolean = CentaurTestSuite.isEngineUpgradeTest(testCase) + override def isMatchingUpgradeTest(testCase: CentaurTestCase): Boolean = + CentaurTestSuite.isEngineUpgradeTest(testCase) } diff --git a/centaur/src/it/scala/centaur/ExternalTestCaseSpec.scala b/centaur/src/it/scala/centaur/ExternalTestCaseSpec.scala index 529a507301a..84ab3e14d9d 100644 --- a/centaur/src/it/scala/centaur/ExternalTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/ExternalTestCaseSpec.scala @@ -5,7 +5,9 @@ import cats.data.Validated.{Invalid, Valid} import centaur.test.standard.CentaurTestCase import com.typesafe.scalalogging.StrictLogging -class ExternalTestCaseSpec(cromwellBackends: List[String]) extends AbstractCentaurTestCaseSpec(cromwellBackends) with StrictLogging { +class ExternalTestCaseSpec(cromwellBackends: List[String]) + extends AbstractCentaurTestCaseSpec(cromwellBackends) + with StrictLogging { def this() = this(CentaurTestSuite.cromwellBackends) @@ -15,11 +17,10 @@ class ExternalTestCaseSpec(cromwellBackends: List[String]) extends AbstractCenta logger.info("No external test to run") } - def runTestFile(testFile: String) = { + def runTestFile(testFile: String) = CentaurTestCase.fromFile(cromwellTracker = None)(File(testFile)) match { case Valid(testCase) => executeStandardTest(testCase) case Invalid(error) => fail(s"Invalid test case: ${error.toList.mkString(", ")}") } - } } diff --git a/centaur/src/it/scala/centaur/PapiUpgradeTestCaseSpec.scala b/centaur/src/it/scala/centaur/PapiUpgradeTestCaseSpec.scala index 300e50d937c..a63030ba40a 100644 --- a/centaur/src/it/scala/centaur/PapiUpgradeTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/PapiUpgradeTestCaseSpec.scala @@ -5,7 +5,7 @@ import org.scalatest.DoNotDiscover @DoNotDiscover class PapiUpgradeTestCaseSpec(cromwellBackends: List[String]) - extends AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBackends) { + extends AbstractCromwellEngineOrBackendUpgradeTestCaseSpec(cromwellBackends) { def this() = this(CentaurTestSuite.cromwellBackends) diff --git a/centaur/src/it/scala/centaur/ParallelTestCaseSpec.scala b/centaur/src/it/scala/centaur/ParallelTestCaseSpec.scala index 456c974537a..a498807b14f 100644 --- a/centaur/src/it/scala/centaur/ParallelTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/ParallelTestCaseSpec.scala @@ -3,15 +3,15 @@ package centaur import centaur.CentaurTestSuite.cromwellTracker import org.scalatest._ - /** * Runs test cases in parallel, this should be the default type for tests unless they would otherwise crosstalk in undesirable * ways with other tests and must be made sequential. */ @DoNotDiscover class ParallelTestCaseSpec(cromwellBackends: List[String]) - extends AbstractCentaurTestCaseSpec(cromwellBackends, cromwellTracker = cromwellTracker) with ParallelTestExecution { - + extends AbstractCentaurTestCaseSpec(cromwellBackends, cromwellTracker = cromwellTracker) + with ParallelTestExecution { + def this() = this(CentaurTestSuite.cromwellBackends) allTestCases.filter(_.testFormat.isParallel) foreach executeStandardTest diff --git a/centaur/src/it/scala/centaur/SequentialTestCaseSpec.scala b/centaur/src/it/scala/centaur/SequentialTestCaseSpec.scala index ab350f89fc1..12287d4686c 100644 --- a/centaur/src/it/scala/centaur/SequentialTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/SequentialTestCaseSpec.scala @@ -8,7 +8,9 @@ import org.scalatest.matchers.should.Matchers * such that the restarting tests execute sequentially to avoid a mayhem of Cromwell restarts */ @DoNotDiscover -class SequentialTestCaseSpec(cromwellBackends: List[String]) extends AbstractCentaurTestCaseSpec(cromwellBackends) with Matchers { +class SequentialTestCaseSpec(cromwellBackends: List[String]) + extends AbstractCentaurTestCaseSpec(cromwellBackends) + with Matchers { def this() = this(CentaurTestSuite.cromwellBackends) diff --git a/centaur/src/it/scala/centaur/WdlUpgradeTestCaseSpec.scala b/centaur/src/it/scala/centaur/WdlUpgradeTestCaseSpec.scala index 474bf0a7327..93f4457d050 100644 --- a/centaur/src/it/scala/centaur/WdlUpgradeTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/WdlUpgradeTestCaseSpec.scala @@ -4,7 +4,9 @@ import org.scalatest.{DoNotDiscover, ParallelTestExecution} @DoNotDiscover class WdlUpgradeTestCaseSpec(cromwellBackends: List[String]) - extends AbstractCentaurTestCaseSpec(cromwellBackends) with ParallelTestExecution with CentaurTestSuiteShutdown { + extends AbstractCentaurTestCaseSpec(cromwellBackends) + with ParallelTestExecution + with CentaurTestSuiteShutdown { def this() = this(CentaurTestSuite.cromwellBackends) diff --git a/centaur/src/it/scala/centaur/callcaching/CromwellDatabaseCallCaching.scala b/centaur/src/it/scala/centaur/callcaching/CromwellDatabaseCallCaching.scala index b7976e64d97..6d3888767b9 100644 --- a/centaur/src/it/scala/centaur/callcaching/CromwellDatabaseCallCaching.scala +++ b/centaur/src/it/scala/centaur/callcaching/CromwellDatabaseCallCaching.scala @@ -11,7 +11,6 @@ object CromwellDatabaseCallCaching extends StrictLogging { private val cromwellDatabase = CromwellDatabase.instance - def clearCachedResults(workflowId: String)(implicit executionContext: ExecutionContext): IO[Unit] = { + def clearCachedResults(workflowId: String)(implicit executionContext: ExecutionContext): IO[Unit] = IO.fromFuture(IO(cromwellDatabase.engineDatabase.invalidateCallCacheEntryIdsForWorkflowId(workflowId))) - } } diff --git a/centaur/src/it/scala/centaur/reporting/AggregatedIo.scala b/centaur/src/it/scala/centaur/reporting/AggregatedIo.scala index e9dacd18987..5b8affee887 100644 --- a/centaur/src/it/scala/centaur/reporting/AggregatedIo.scala +++ b/centaur/src/it/scala/centaur/reporting/AggregatedIo.scala @@ -11,6 +11,7 @@ import cats.syntax.traverse._ * Validation that aggregates multiple throwable errors. */ object AggregatedIo { + /** * Similar to common.validation.ErrorOr#ErrorOr, but retains the stack traces. */ @@ -39,16 +40,16 @@ object AggregatedIo { /** * Creates an aggregated exception for multiple exceptions. */ - class AggregatedException private[reporting](exceptionContext: String, suppressed: List[Throwable]) - extends RuntimeException( - { - val suppressedZipped = suppressed.zipWithIndex - val messages = suppressedZipped map { - case (throwable, index) => s"\n ${index+1}: ${throwable.getMessage}" + class AggregatedException private[reporting] (exceptionContext: String, suppressed: List[Throwable]) + extends RuntimeException( + { + val suppressedZipped = suppressed.zipWithIndex + val messages = suppressedZipped map { case (throwable, index) => + s"\n ${index + 1}: ${throwable.getMessage}" + } + s"$exceptionContext:$messages" } - s"$exceptionContext:$messages" - } - ) { + ) { suppressed foreach addSuppressed } diff --git a/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala b/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala index 9a88a9ac109..b2335e4a794 100644 --- a/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala @@ -14,7 +14,14 @@ import com.google.api.gax.retrying.RetrySettings import com.google.api.services.bigquery.BigqueryScopes import com.google.auth.Credentials import com.google.cloud.bigquery.InsertAllRequest.RowToInsert -import com.google.cloud.bigquery.{BigQuery, BigQueryError, BigQueryOptions, InsertAllRequest, InsertAllResponse, TableId} +import com.google.cloud.bigquery.{ + BigQuery, + BigQueryError, + BigQueryOptions, + InsertAllRequest, + InsertAllResponse, + TableId +} import common.util.TimeUtil._ import common.validation.Validation._ import cromwell.cloudsupport.gcp.GoogleConfiguration @@ -35,8 +42,9 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe override lazy val destination: String = bigQueryProjectOption.map(_ + "/").getOrElse("") + bigQueryDataset - private val retrySettings: RetrySettings = { - RetrySettings.newBuilder() + private val retrySettings: RetrySettings = + RetrySettings + .newBuilder() .setMaxAttempts(3) .setTotalTimeout(Duration.ofSeconds(30)) .setInitialRetryDelay(Duration.ofMillis(100)) @@ -46,7 +54,6 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe .setRpcTimeoutMultiplier(1.1) .setMaxRpcTimeout(Duration.ofSeconds(5)) .build() - } private val bigQueryCredentials: Credentials = GoogleConfiguration .apply(params.rootConfig) @@ -54,7 +61,8 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe .unsafe .credentials(Set(BigqueryScopes.BIGQUERY_INSERTDATA)) - private val bigQuery: BigQuery = BigQueryOptions.newBuilder() + private val bigQuery: BigQuery = BigQueryOptions + .newBuilder() .setRetrySettings(retrySettings) .setCredentials(bigQueryCredentials) .build() @@ -65,21 +73,19 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe private val jobKeyValueTableId = bigQueryTable("job_key_value") private val metadataTableId = bigQueryTable("metadata") - def bigQueryTable(table: String): TableId = { + def bigQueryTable(table: String): TableId = bigQueryProjectOption match { case Some(project) => TableId.of(project, bigQueryDataset, table) case None => TableId.of(bigQueryDataset, table) } - } /** * In this ErrorReporter implementation this method will send information about exceptions of type * CentaurTestException to BigQuery. Exceptions of other types will be ignored. */ - override def logFailure(testEnvironment: TestEnvironment, - ciEnvironment: CiEnvironment, - throwable: Throwable) - (implicit executionContext: ExecutionContext): IO[Unit] = { + override def logFailure(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, throwable: Throwable)(implicit + executionContext: ExecutionContext + ): IO[Unit] = throwable match { case centaurTestException: CentaurTestException => for { @@ -98,57 +104,62 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe case _ => IO.unit // this ErrorReporter only supports exceptions of CentaurTestException type } - } private def sendBigQueryFailure(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, centaurTestException: CentaurTestException, callAttemptFailures: Vector[CallAttemptFailure], jobKeyValueEntries: Seq[JobKeyValueEntry], - metadataEntries: Seq[MetadataEntry]): IO[Unit] = { + metadataEntries: Seq[MetadataEntry] + ): IO[Unit] = { val metadata: IO[List[BigQueryError]] = { - val metadataRows: List[util.List[RowToInsert]] = metadataEntries.map(toMetadataRow).grouped(10000).map(_.asJava).toList + val metadataRows: List[util.List[RowToInsert]] = + metadataEntries.map(toMetadataRow).grouped(10000).map(_.asJava).toList val metadataRequest: List[InsertAllRequest] = metadataRows.map(InsertAllRequest.of(metadataTableId, _)) if (metadataEntries.nonEmpty) - metadataRequest.traverse[IO, List[BigQueryError]](req => IO(bigQuery.insertAll(req)).map(_.getErrors)).map(_.flatten) + metadataRequest + .traverse[IO, List[BigQueryError]](req => IO(bigQuery.insertAll(req)).map(_.getErrors)) + .map(_.flatten) else - IO{Nil} + IO(Nil) } (IO { - val testFailureRow = toTestFailureRow(testEnvironment, ciEnvironment, centaurTestException) - val callAttemptFailureRows = callAttemptFailures.map(toCallAttemptFailureRow).asJava - val jobKeyValueRows = jobKeyValueEntries.map(toJobKeyValueRow).asJava - - val testFailureRequest = InsertAllRequest.of(testFailureTableId, testFailureRow) - val callAttemptFailuresRequest = InsertAllRequest.of(callAttemptFailureTableId, callAttemptFailureRows) - val jobKeyValuesRequest = InsertAllRequest.of(jobKeyValueTableId, jobKeyValueRows) - val testFailureErrors = bigQuery.insertAll(testFailureRequest).getErrors - val callAttemptFailuresErrors = - if (callAttemptFailures.nonEmpty) bigQuery.insertAll(callAttemptFailuresRequest).getErrors else Nil - val jobKeyValuesErrors = - if (jobKeyValueEntries.nonEmpty) bigQuery.insertAll(jobKeyValuesRequest).getErrors else Nil - - testFailureErrors ++ callAttemptFailuresErrors ++ jobKeyValuesErrors - }, metadata).mapN(_ ++ _).flatMap { + val testFailureRow = toTestFailureRow(testEnvironment, ciEnvironment, centaurTestException) + val callAttemptFailureRows = callAttemptFailures.map(toCallAttemptFailureRow).asJava + val jobKeyValueRows = jobKeyValueEntries.map(toJobKeyValueRow).asJava + + val testFailureRequest = InsertAllRequest.of(testFailureTableId, testFailureRow) + val callAttemptFailuresRequest = InsertAllRequest.of(callAttemptFailureTableId, callAttemptFailureRows) + val jobKeyValuesRequest = InsertAllRequest.of(jobKeyValueTableId, jobKeyValueRows) + val testFailureErrors = bigQuery.insertAll(testFailureRequest).getErrors + val callAttemptFailuresErrors = + if (callAttemptFailures.nonEmpty) bigQuery.insertAll(callAttemptFailuresRequest).getErrors else Nil + val jobKeyValuesErrors = + if (jobKeyValueEntries.nonEmpty) bigQuery.insertAll(jobKeyValuesRequest).getErrors else Nil + + testFailureErrors ++ callAttemptFailuresErrors ++ jobKeyValuesErrors + }, + metadata + ).mapN(_ ++ _).flatMap { case errors if errors.isEmpty => IO.unit - case errors => IO.raiseError { - val errorCount = errors.size - val threeErrors = errors.map(String.valueOf).distinct.sorted.take(3) - val continued = if (errorCount > 3) "\n..." else "" - val message = threeErrors.mkString( - s"$errorCount error(s) occurred uploading to BigQuery: \n", - "\n", - continued) - new RuntimeException(message) - } + case errors => + IO.raiseError { + val errorCount = errors.size + val threeErrors = errors.map(String.valueOf).distinct.sorted.take(3) + val continued = if (errorCount > 3) "\n..." else "" + val message = + threeErrors.mkString(s"$errorCount error(s) occurred uploading to BigQuery: \n", "\n", continued) + new RuntimeException(message) + } } } private def toTestFailureRow(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, - centaurTestException: CentaurTestException): RowToInsert = { + centaurTestException: CentaurTestException + ): RowToInsert = RowToInsert of Map( "ci_env_branch" -> ciEnvironment.branch, "ci_env_event" -> ciEnvironment.event, @@ -165,13 +176,12 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe "test_name" -> Option(testEnvironment.testCase.name), "test_stack_trace" -> Option(ExceptionUtils.getStackTrace(centaurTestException)), "test_timestamp" -> Option(OffsetDateTime.now.toUtcMilliString), - "test_workflow_id" -> centaurTestException.workflowIdOption, - ).collect { - case (key, Some(value)) => (key, value) + "test_workflow_id" -> centaurTestException.workflowIdOption + ).collect { case (key, Some(value)) => + (key, value) }.asJava - } - private def toCallAttemptFailureRow(callAttemptFailure: CallAttemptFailure): RowToInsert = { + private def toCallAttemptFailureRow(callAttemptFailure: CallAttemptFailure): RowToInsert = RowToInsert of Map( "call_fully_qualified_name" -> Option(callAttemptFailure.callFullyQualifiedName), "call_root" -> callAttemptFailure.callRootOption, @@ -182,24 +192,22 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe "start" -> callAttemptFailure.startOption.map(_.toUtcMilliString), "stderr" -> callAttemptFailure.stderrOption, "stdout" -> callAttemptFailure.stdoutOption, - "workflow_id" -> Option(callAttemptFailure.workflowId), - ).collect { - case (key, Some(value)) => (key, value) + "workflow_id" -> Option(callAttemptFailure.workflowId) + ).collect { case (key, Some(value)) => + (key, value) }.asJava - } - private def toJobKeyValueRow(jobKeyValueEntry: JobKeyValueEntry): RowToInsert = { + private def toJobKeyValueRow(jobKeyValueEntry: JobKeyValueEntry): RowToInsert = RowToInsert of Map[String, Any]( "call_fully_qualified_name" -> jobKeyValueEntry.callFullyQualifiedName, "job_attempt" -> jobKeyValueEntry.jobAttempt, "job_index" -> jobKeyValueEntry.jobIndex, "store_key" -> jobKeyValueEntry.storeKey, "store_value" -> jobKeyValueEntry.storeValue, - "workflow_execution_uuid" -> jobKeyValueEntry.workflowExecutionUuid, + "workflow_execution_uuid" -> jobKeyValueEntry.workflowExecutionUuid ).asJava - } - private def toMetadataRow(metadataEntry: MetadataEntry): RowToInsert = { + private def toMetadataRow(metadataEntry: MetadataEntry): RowToInsert = RowToInsert of Map( "call_fully_qualified_name" -> metadataEntry.callFullyQualifiedName, "job_attempt" -> metadataEntry.jobAttempt, @@ -208,11 +216,10 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe "metadata_timestamp" -> Option(metadataEntry.metadataTimestamp.toSystemOffsetDateTime.toUtcMilliString), "metadata_value" -> metadataEntry.metadataValue.map(_.toRawString), "metadata_value_type" -> metadataEntry.metadataValueType, - "workflow_execution_uuid" -> Option(metadataEntry.workflowExecutionUuid), - ).collect { - case (key, Some(value)) => (key, value) + "workflow_execution_uuid" -> Option(metadataEntry.workflowExecutionUuid) + ).collect { case (key, Some(value)) => + (key, value) }.asJava - } } object BigQueryReporter { diff --git a/centaur/src/it/scala/centaur/reporting/CiEnvironment.scala b/centaur/src/it/scala/centaur/reporting/CiEnvironment.scala index 6750925ca64..0845d8b8c94 100644 --- a/centaur/src/it/scala/centaur/reporting/CiEnvironment.scala +++ b/centaur/src/it/scala/centaur/reporting/CiEnvironment.scala @@ -5,8 +5,7 @@ import scala.util.Try /** * Scala representation of the CI environment values defined in `test.inc.sh`. */ -case class CiEnvironment -( +case class CiEnvironment( isCi: Option[Boolean], `type`: Option[String], branch: Option[String], @@ -20,7 +19,7 @@ case class CiEnvironment ) object CiEnvironment { - def apply(): CiEnvironment = { + def apply(): CiEnvironment = new CiEnvironment( isCi = sys.env.get("CROMWELL_BUILD_IS_CI").flatMap(tryToBoolean), `type` = sys.env.get("CROMWELL_BUILD_TYPE"), @@ -31,9 +30,8 @@ object CiEnvironment { provider = sys.env.get("CROMWELL_BUILD_PROVIDER"), os = sys.env.get("CROMWELL_BUILD_OS"), url = sys.env.get("CROMWELL_BUILD_URL"), - centaurType = sys.env.get("CROMWELL_BUILD_CENTAUR_TYPE"), + centaurType = sys.env.get("CROMWELL_BUILD_CENTAUR_TYPE") ) - } /** Try converting the value to a boolean, or return None. */ private def tryToBoolean(string: String): Option[Boolean] = Try(string.toBoolean).toOption diff --git a/centaur/src/it/scala/centaur/reporting/ErrorReporter.scala b/centaur/src/it/scala/centaur/reporting/ErrorReporter.scala index 8481bf94c51..c654ad3cfc0 100644 --- a/centaur/src/it/scala/centaur/reporting/ErrorReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/ErrorReporter.scala @@ -8,6 +8,7 @@ import scala.concurrent.ExecutionContext * Reports errors during testing. */ trait ErrorReporter { + /** The various parameters for this reporter. */ def params: ErrorReporterParams @@ -15,8 +16,7 @@ trait ErrorReporter { def destination: String /** Send a report of a failure. */ - def logFailure(testEnvironment: TestEnvironment, - ciEnvironment: CiEnvironment, - throwable: Throwable) - (implicit executionContext: ExecutionContext): IO[Unit] + def logFailure(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, throwable: Throwable)(implicit + executionContext: ExecutionContext + ): IO[Unit] } diff --git a/centaur/src/it/scala/centaur/reporting/ErrorReporterCromwellDatabase.scala b/centaur/src/it/scala/centaur/reporting/ErrorReporterCromwellDatabase.scala index 89a3e5210b8..79e13c47700 100644 --- a/centaur/src/it/scala/centaur/reporting/ErrorReporterCromwellDatabase.scala +++ b/centaur/src/it/scala/centaur/reporting/ErrorReporterCromwellDatabase.scala @@ -11,25 +11,25 @@ class ErrorReporterCromwellDatabase(cromwellDatabase: CromwellDatabase) { import centaur.TestContext._ - def jobKeyValueEntriesIo(workflowExecutionUuidOption: Option[String]) - (implicit executionContext: ExecutionContext): IO[Seq[JobKeyValueEntry]] = { + def jobKeyValueEntriesIo(workflowExecutionUuidOption: Option[String])(implicit + executionContext: ExecutionContext + ): IO[Seq[JobKeyValueEntry]] = workflowExecutionUuidOption.map(jobKeyValueEntriesIo).getOrElse(IO.pure(Seq.empty)) - } - def jobKeyValueEntriesIo(workflowExecutionUuid: String) - (implicit executionContext: ExecutionContext): IO[Seq[JobKeyValueEntry]] = { + def jobKeyValueEntriesIo(workflowExecutionUuid: String)(implicit + executionContext: ExecutionContext + ): IO[Seq[JobKeyValueEntry]] = IO.fromFuture(IO(cromwellDatabase.engineDatabase.queryJobKeyValueEntries(workflowExecutionUuid))) - } - def metadataEntriesIo(workflowExecutionUuidOption: Option[String]) - (implicit executionContext: ExecutionContext): IO[Seq[MetadataEntry]] = { + def metadataEntriesIo(workflowExecutionUuidOption: Option[String])(implicit + executionContext: ExecutionContext + ): IO[Seq[MetadataEntry]] = workflowExecutionUuidOption.map(metadataEntriesIo).getOrElse(IO.pure(Seq.empty)) - } - def metadataEntriesIo(workflowExecutionUuid: String) - (implicit executionContext: ExecutionContext): IO[Seq[MetadataEntry]] = { + def metadataEntriesIo(workflowExecutionUuid: String)(implicit + executionContext: ExecutionContext + ): IO[Seq[MetadataEntry]] = // 30 seconds is less than production (60s as of 2018-08) but hopefully high enough to work on a CI machine with contended resources IO.fromFuture(IO(cromwellDatabase.metadataDatabase.queryMetadataEntries(workflowExecutionUuid, 30.seconds))) - } } diff --git a/centaur/src/it/scala/centaur/reporting/ErrorReporterParams.scala b/centaur/src/it/scala/centaur/reporting/ErrorReporterParams.scala index 6e716b82942..9a403e88138 100644 --- a/centaur/src/it/scala/centaur/reporting/ErrorReporterParams.scala +++ b/centaur/src/it/scala/centaur/reporting/ErrorReporterParams.scala @@ -5,8 +5,7 @@ import com.typesafe.config.Config /** * Collects all of the parameters to pass to a new ErrorReporter. */ -case class ErrorReporterParams -( +case class ErrorReporterParams( name: String, rootConfig: Config, reporterConfig: Config, diff --git a/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala b/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala index 8f989e3bd1b..2058264db29 100644 --- a/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala +++ b/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala @@ -25,9 +25,8 @@ class ErrorReporters(rootConfig: Config) { providersConfig.entrySet.asScala.map(_.getKey.split("\\.").toList.head).toList } - private val errorReportersIo: IO[List[ErrorReporter]] = { + private val errorReportersIo: IO[List[ErrorReporter]] = AggregatedIo.aggregateExceptions("Errors while creating ErrorReporters", errorReporterNames.map(getErrorReporter)) - } val errorReporters: List[ErrorReporter] = errorReportersIo.unsafeRunSync() @@ -42,26 +41,27 @@ class ErrorReporters(rootConfig: Config) { * @param throwable The exception that occurred while running the test. * @return An IO effect that will log the failure. */ - def logFailure(testEnvironment: TestEnvironment, - ciEnvironment: CiEnvironment, - throwable: Throwable) - (implicit executionContext: ExecutionContext): IO[Unit] = { + def logFailure(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, throwable: Throwable)(implicit + executionContext: ExecutionContext + ): IO[Unit] = if (errorReporters.isEmpty) { // If the there are no reporters, then just "throw" the exception. Do not retry to run the test. IO.raiseError(throwable) } else { val listIo = errorReporters.map(_.logFailure(testEnvironment, ciEnvironment, throwable)) - AggregatedIo.aggregateExceptions("Errors while reporting a failure", listIo).handleErrorWith(err => { - err.addSuppressed(throwable) - IO.raiseError(err) - }).void + AggregatedIo + .aggregateExceptions("Errors while reporting a failure", listIo) + .handleErrorWith { err => + err.addSuppressed(throwable) + IO.raiseError(err) + } + .void } - } /** * Constructs the IO reporter by name. */ - private def getErrorReporter(errorReporterName: String): IO[ErrorReporter] = { + private def getErrorReporter(errorReporterName: String): IO[ErrorReporter] = IO { val clazz = errorReporterConfig.getString(s"providers.$errorReporterName.class") val reporterConfig = errorReporterConfig.getOrElse(s"providers.$errorReporterName.config", ConfigFactory.empty) @@ -69,7 +69,6 @@ class ErrorReporters(rootConfig: Config) { val params = ErrorReporterParams(errorReporterName, rootConfig, reporterConfig, errorReporterCromwellDatabase) constructor.newInstance(params).asInstanceOf[ErrorReporter] } - } } object ErrorReporters extends StrictLogging { @@ -84,9 +83,8 @@ object ErrorReporters extends StrictLogging { if (retryAttempts > 0) logger.info("Error retry count: {}", retryAttempts) - def logFailure(testEnvironment: TestEnvironment, - throwable: Throwable) - (implicit executionContext: ExecutionContext): IO[Unit] = { + def logFailure(testEnvironment: TestEnvironment, throwable: Throwable)(implicit + executionContext: ExecutionContext + ): IO[Unit] = errorReporters.logFailure(testEnvironment, ciEnvironment, throwable) - } } diff --git a/centaur/src/it/scala/centaur/reporting/GcsReporter.scala b/centaur/src/it/scala/centaur/reporting/GcsReporter.scala index 41e15d80b3b..55b5be270f4 100644 --- a/centaur/src/it/scala/centaur/reporting/GcsReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/GcsReporter.scala @@ -9,7 +9,10 @@ import net.ceedubs.ficus.Ficus._ import scala.concurrent.ExecutionContext -class GcsReporter(override val params: ErrorReporterParams) extends ErrorReporter with SuccessReporter with StrictLogging { +class GcsReporter(override val params: ErrorReporterParams) + extends ErrorReporter + with SuccessReporter + with StrictLogging { val storage = StorageOptions.getDefaultInstance.getService val reportBucket = params.reporterConfig.as[String]("report-bucket") val reportPath = params.reporterConfig.as[String]("report-path") @@ -21,10 +24,9 @@ class GcsReporter(override val params: ErrorReporterParams) extends ErrorReporte * In this ErrorReporter implementation this method will save information about exceptions of type * CentaurTestException to GCS. Exceptions of other types will be ignored. */ - override def logFailure(testEnvironment: TestEnvironment, - ciEnvironment: CiEnvironment, - throwable: Throwable) - (implicit executionContext: ExecutionContext): IO[Unit] = { + override def logFailure(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, throwable: Throwable)(implicit + executionContext: ExecutionContext + ): IO[Unit] = throwable match { case centaurTestException: CentaurTestException => logger.info(s"Reporting failed metadata to gs://$reportBucket/$reportPath") @@ -32,7 +34,6 @@ class GcsReporter(override val params: ErrorReporterParams) extends ErrorReporte case _ => IO.unit // this ErrorReporter only supports exceptions of CentaurTestException type } - } override def logSuccessfulRun(submitResponse: SubmitWorkflowResponse): IO[Unit] = { logger.info(s"Reporting successful metadata to gs://$reportBucket/$reportPath") @@ -44,7 +45,8 @@ class GcsReporter(override val params: ErrorReporterParams) extends ErrorReporte private def pushJsonToGcs(json: String) = IO { storage.create( - BlobInfo.newBuilder(reportBucket, reportPath) + BlobInfo + .newBuilder(reportBucket, reportPath) .setContentType("application/json") .build(), json.toArray.map(_.toByte) diff --git a/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala b/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala index 4332e01db24..ff5aeb5f7d5 100644 --- a/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala @@ -13,15 +13,13 @@ import scala.concurrent.ExecutionContext * Useful as a backup in cases where another reporter is not available, for example in external PRs where secure * environment variables are not available. */ -class Slf4jReporter(override val params: ErrorReporterParams) - extends ErrorReporter with StrictLogging { +class Slf4jReporter(override val params: ErrorReporterParams) extends ErrorReporter with StrictLogging { override lazy val destination: String = "error" - override def logFailure(testEnvironment: TestEnvironment, - ciEnvironment: CiEnvironment, - throwable: Throwable) - (implicit executionContext: ExecutionContext): IO[Unit] = { + override def logFailure(testEnvironment: TestEnvironment, ciEnvironment: CiEnvironment, throwable: Throwable)(implicit + executionContext: ExecutionContext + ): IO[Unit] = IO { val errorMessage = throwable match { @@ -41,9 +39,9 @@ class Slf4jReporter(override val params: ErrorReporterParams) if (testEnvironment.attempt >= testEnvironment.retries) { logger.error(message, throwable) } else { - val messageWithShortExceptionContext = message + " (" + ExceptionUtils.getMessage(throwable).replace("\n", " ").take(150) + "[...])" + val messageWithShortExceptionContext = + message + " (" + ExceptionUtils.getMessage(throwable).replace("\n", " ").take(150) + "[...])" logger.warn(messageWithShortExceptionContext) } } - } } diff --git a/centaur/src/it/scala/centaur/reporting/SuccessReporter.scala b/centaur/src/it/scala/centaur/reporting/SuccessReporter.scala index 2453ca71898..fb0c6a389e9 100644 --- a/centaur/src/it/scala/centaur/reporting/SuccessReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/SuccessReporter.scala @@ -12,13 +12,13 @@ object SuccessReporters { * This is gross and piggy backs on the error reporting code but achieves what we need for now without a refactoring * of the error reporting code to handle both success and error reporting */ - private val successReporters: List[ErrorReporter with SuccessReporter] = ErrorReporters.errorReporters.errorReporters.collect({case s: SuccessReporter => s }) + private val successReporters: List[ErrorReporter with SuccessReporter] = + ErrorReporters.errorReporters.errorReporters.collect { case s: SuccessReporter => s } - def logSuccessfulRun(submitResponse: SubmitWorkflowResponse): IO[SubmitResponse] = { - if (successReporters.isEmpty) IO.pure(submitResponse) + def logSuccessfulRun(submitResponse: SubmitWorkflowResponse): IO[SubmitResponse] = + if (successReporters.isEmpty) IO.pure(submitResponse) else { val listIo = successReporters.map(_.logSuccessfulRun(submitResponse)) AggregatedIo.aggregateExceptions("Errors while reporting centaur success", listIo).map(_ => submitResponse) } - } } diff --git a/centaur/src/main/scala/centaur/CentaurConfig.scala b/centaur/src/main/scala/centaur/CentaurConfig.scala index f17c288283a..c15ae31f453 100644 --- a/centaur/src/main/scala/centaur/CentaurConfig.scala +++ b/centaur/src/main/scala/centaur/CentaurConfig.scala @@ -41,16 +41,19 @@ sealed trait CentaurRunMode { def cromwellUrl: URL } -case class UnmanagedCromwellServer(cromwellUrl : URL) extends CentaurRunMode -case class ManagedCromwellServer(preRestart: CromwellConfiguration, postRestart: CromwellConfiguration, withRestart: Boolean) extends CentaurRunMode { +case class UnmanagedCromwellServer(cromwellUrl: URL) extends CentaurRunMode +case class ManagedCromwellServer(preRestart: CromwellConfiguration, + postRestart: CromwellConfiguration, + withRestart: Boolean +) extends CentaurRunMode { override val cromwellUrl = new URL(s"http://localhost:${CromwellManager.ManagedCromwellPort}") } object CentaurConfig { lazy val conf: Config = ConfigFactory.load().getConfig("centaur") - + lazy val runMode: CentaurRunMode = CentaurRunMode(conf) - + lazy val cromwellUrl: URL = runMode.cromwellUrl lazy val workflowProgressTimeout: FiniteDuration = conf.getDuration("workflow-progress-timeout").toScala lazy val sendReceiveTimeout: FiniteDuration = conf.getDuration("sendReceiveTimeout").toScala diff --git a/centaur/src/main/scala/centaur/CromwellManager.scala b/centaur/src/main/scala/centaur/CromwellManager.scala index a53416a0ded..55c317b41b3 100644 --- a/centaur/src/main/scala/centaur/CromwellManager.scala +++ b/centaur/src/main/scala/centaur/CromwellManager.scala @@ -18,15 +18,15 @@ object CromwellManager extends StrictLogging { private var cromwellProcess: Option[CromwellProcess] = None private var _ready: Boolean = false private var _isManaged: Boolean = false - + /** * Returns true if Cromwell is ready to be queried, false otherwise * In Unmanaged mode, this is irrelevant so always return true. * In managed mode return the value of _ready */ def isReady: Boolean = !_isManaged || _ready - - // Check that we have a cromwellProcess, that this process is alive, and that cromwell is ready to accept requests + + // Check that we have a cromwellProcess, that this process is alive, and that cromwell is ready to accept requests private def isAlive(checkType: String): Boolean = { val processAlive = cromwellProcess.exists(_.isAlive) logger.info(s"Cromwell process alive $checkType = $processAlive") @@ -76,14 +76,14 @@ object CromwellManager extends StrictLogging { def stopCromwell(reason: String) = { _ready = false logger.info(s"Stopping Cromwell... ($reason)") - try { + try cromwellProcess foreach { _.stop() } - } catch { - case e: Exception => + catch { + case e: Exception => logger.error("Caught exception while stopping Cromwell") e.printStackTrace() } - + cromwellProcess = None } } diff --git a/centaur/src/main/scala/centaur/CromwellTracker.scala b/centaur/src/main/scala/centaur/CromwellTracker.scala index 4a89dea302d..1ec6fefe54c 100644 --- a/centaur/src/main/scala/centaur/CromwellTracker.scala +++ b/centaur/src/main/scala/centaur/CromwellTracker.scala @@ -14,7 +14,6 @@ import org.apache.commons.math3.stat.inference.ChiSquareTest import scala.language.postfixOps - case class CromwellTracker(backendCount: Int, configuredSignificance: Double) extends StrictLogging { var counts: Map[String, Int] = Map() def track(metadata: WorkflowMetadata): Unit = { @@ -45,10 +44,15 @@ case class CromwellTracker(backendCount: Int, configuredSignificance: Double) ex val actual: Array[Long] = counts.values map { _.toLong } toArray val observedSignificance = new ChiSquareTest().chiSquareTest(expected, actual) - logger.info(f"configured/observed horicromtal significance levels: $configuredSignificance%.4f/$observedSignificance%.4f", configuredSignificance, observedSignificance) + logger.info( + f"configured/observed horicromtal significance levels: $configuredSignificance%.4f/$observedSignificance%.4f", + configuredSignificance, + observedSignificance + ) if (observedSignificance < configuredSignificance) { - val message = f"Failed horicromtal check: observed significance level $observedSignificance%.4f, minimum of $configuredSignificance%.4f was required" + val message = + f"Failed horicromtal check: observed significance level $observedSignificance%.4f, minimum of $configuredSignificance%.4f was required" throw new RuntimeException(message) } } diff --git a/centaur/src/main/scala/centaur/DockerComposeCromwellConfiguration.scala b/centaur/src/main/scala/centaur/DockerComposeCromwellConfiguration.scala index 6fab385537b..eeb1df1557f 100644 --- a/centaur/src/main/scala/centaur/DockerComposeCromwellConfiguration.scala +++ b/centaur/src/main/scala/centaur/DockerComposeCromwellConfiguration.scala @@ -14,13 +14,17 @@ object DockerComposeCromwellConfiguration { } } -case class DockerComposeCromwellConfiguration(dockerTag: String, dockerComposeFile: String, conf: String, logFile: String) extends CromwellConfiguration { +case class DockerComposeCromwellConfiguration(dockerTag: String, + dockerComposeFile: String, + conf: String, + logFile: String +) extends CromwellConfiguration { override def createProcess: CromwellProcess = { - case class DockerComposeCromwellProcess(override val cromwellConfiguration: DockerComposeCromwellConfiguration) extends CromwellProcess { + case class DockerComposeCromwellProcess(override val cromwellConfiguration: DockerComposeCromwellConfiguration) + extends CromwellProcess { - private def composeCommand(command: String*): Array[String] = { + private def composeCommand(command: String*): Array[String] = Array("docker-compose", "-f", dockerComposeFile) ++ command - } private val startCommand = composeCommand("up", "--abort-on-container-exit") private val logsCommand = composeCommand("logs") @@ -29,14 +33,13 @@ case class DockerComposeCromwellConfiguration(dockerTag: String, dockerComposeFi private val envVariables = Map[String, String]( "CROMWELL_BUILD_CENTAUR_MANAGED_PORT" -> ManagedCromwellPort.toString, "CROMWELL_BUILD_CENTAUR_MANAGED_TAG" -> dockerTag, - "CROMWELL_BUILD_CENTAUR_MANAGED_CONFIG" -> conf, + "CROMWELL_BUILD_CENTAUR_MANAGED_CONFIG" -> conf ) private var process: Option[Process] = None - override def start(): Unit = { + override def start(): Unit = process = Option(runProcess(startCommand, envVariables)) - } override def stop(): Unit = { if (!isAlive) { diff --git a/centaur/src/main/scala/centaur/JarCromwellConfiguration.scala b/centaur/src/main/scala/centaur/JarCromwellConfiguration.scala index c95316d1a32..1bf83b16ee1 100644 --- a/centaur/src/main/scala/centaur/JarCromwellConfiguration.scala +++ b/centaur/src/main/scala/centaur/JarCromwellConfiguration.scala @@ -15,20 +15,15 @@ object JarCromwellConfiguration { case class JarCromwellConfiguration(jar: String, conf: String, logFile: String) extends CromwellConfiguration { override def createProcess: CromwellProcess = { - case class JarCromwellProcess(override val cromwellConfiguration: JarCromwellConfiguration) extends CromwellProcess { - private val command = Array( - "java", - s"-Dconfig.file=$conf", - s"-Dwebservice.port=$ManagedCromwellPort", - "-jar", - jar, - "server") + case class JarCromwellProcess(override val cromwellConfiguration: JarCromwellConfiguration) + extends CromwellProcess { + private val command = + Array("java", s"-Dconfig.file=$conf", s"-Dwebservice.port=$ManagedCromwellPort", "-jar", jar, "server") private var process: Option[Process] = None - override def start(): Unit = { + override def start(): Unit = process = Option(runProcess(command, Map.empty)) - } override def stop(): Unit = { process foreach { @@ -37,7 +32,7 @@ case class JarCromwellConfiguration(jar: String, conf: String, logFile: String) process = None } - override def isAlive: Boolean = process.exists { _.isAlive } + override def isAlive: Boolean = process.exists(_.isAlive) override def logFile: String = cromwellConfiguration.logFile } diff --git a/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala b/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala index 33f96974b42..defc30bc108 100644 --- a/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala +++ b/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala @@ -30,20 +30,20 @@ object CentaurCromwellClient extends StrictLogging { // Do not use scala.concurrent.ExecutionContext.Implicits.global as long as this is using Await.result // See https://github.com/akka/akka-http/issues/602 // And https://github.com/viktorklang/blog/blob/master/Futures-in-Scala-2.12-part-7.md - final implicit val blockingEc: ExecutionContextExecutor = ExecutionContext.fromExecutor( - Executors.newFixedThreadPool(100, DaemonizedDefaultThreadFactory)) + implicit final val blockingEc: ExecutionContextExecutor = + ExecutionContext.fromExecutor(Executors.newFixedThreadPool(100, DaemonizedDefaultThreadFactory)) // Akka HTTP needs both the actor system and a materializer - final implicit val system: ActorSystem = ActorSystem("centaur-acting-like-a-system") - final implicit val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(system)) + implicit final val system: ActorSystem = ActorSystem("centaur-acting-like-a-system") + implicit final val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(system)) final val apiVersion = "v1" val cromwellClient = new CromwellClient(CentaurConfig.cromwellUrl, apiVersion) val defaultMetadataArgs: Option[Map[String, List[String]]] = config.getAs[Map[String, List[String]]]("centaur.metadata-args") - def submit(workflow: Workflow): IO[SubmittedWorkflow] = { - sendReceiveFutureCompletion(() => { + def submit(workflow: Workflow): IO[SubmittedWorkflow] = + sendReceiveFutureCompletion { () => val submitted = cromwellClient.submit(workflow.toWorkflowSubmission) submitted.biSemiflatMap( httpResponse => @@ -59,44 +59,40 @@ object CentaurCromwellClient extends StrictLogging { _ = workflow.submittedWorkflowTracker.add(submittedWorkflow) } yield submittedWorkflow ) - }) - } + } - def describe(workflow: Workflow): IO[WaasDescription] = { + def describe(workflow: Workflow): IO[WaasDescription] = sendReceiveFutureCompletion(() => cromwellClient.describe(workflow.toWorkflowDescribeRequest)) - } - def status(workflow: SubmittedWorkflow): IO[WorkflowStatus] = { + def status(workflow: SubmittedWorkflow): IO[WorkflowStatus] = sendReceiveFutureCompletion(() => cromwellClient.status(workflow.id)) - } - def abort(workflow: SubmittedWorkflow): IO[WorkflowStatus] = { + def abort(workflow: SubmittedWorkflow): IO[WorkflowStatus] = sendReceiveFutureCompletion(() => cromwellClient.abort(workflow.id)) - } - def outputs(workflow: SubmittedWorkflow): IO[WorkflowOutputs] = { + def outputs(workflow: SubmittedWorkflow): IO[WorkflowOutputs] = sendReceiveFutureCompletion(() => cromwellClient.outputs(workflow.id)) - } - def callCacheDiff(workflowA: SubmittedWorkflow, callA: String, workflowB: SubmittedWorkflow, callB: String): IO[CallCacheDiff] = { - sendReceiveFutureCompletion(() => cromwellClient.callCacheDiff(workflowA.id, callA, ShardIndex(None), workflowB.id, callB, ShardIndex(None))) - } + def callCacheDiff(workflowA: SubmittedWorkflow, + callA: String, + workflowB: SubmittedWorkflow, + callB: String + ): IO[CallCacheDiff] = + sendReceiveFutureCompletion(() => + cromwellClient.callCacheDiff(workflowA.id, callA, ShardIndex(None), workflowB.id, callB, ShardIndex(None)) + ) - def logs(workflow: SubmittedWorkflow): IO[WorkflowMetadata] = { + def logs(workflow: SubmittedWorkflow): IO[WorkflowMetadata] = sendReceiveFutureCompletion(() => cromwellClient.logs(workflow.id)) - } - def labels(workflow: SubmittedWorkflow): IO[WorkflowLabels] = { + def labels(workflow: SubmittedWorkflow): IO[WorkflowLabels] = sendReceiveFutureCompletion(() => cromwellClient.labels(workflow.id)) - } - def addLabels(workflow: SubmittedWorkflow, newLabels: List[Label]): IO[WorkflowLabels] = { + def addLabels(workflow: SubmittedWorkflow, newLabels: List[Label]): IO[WorkflowLabels] = sendReceiveFutureCompletion(() => cromwellClient.addLabels(workflow.id, newLabels)) - } - def version: IO[CromwellVersion] = { + def version: IO[CromwellVersion] = sendReceiveFutureCompletion(() => cromwellClient.version) - } /* Sends a quick ping to the Cromwell query endpoint. The query endpoint is the only one which both hits the @@ -104,7 +100,9 @@ object CentaurCromwellClient extends StrictLogging { currently does not support query. */ def isAlive: Boolean = { - val response = Http().singleRequest(HttpRequest(uri=s"${CentaurConfig.cromwellUrl}/api/workflows/$apiVersion/query?status=Succeeded")) + val response = Http().singleRequest( + HttpRequest(uri = s"${CentaurConfig.cromwellUrl}/api/workflows/$apiVersion/query?status=Succeeded") + ) // Silence the following warning by discarding the result of a successful query: // Response entity was not subscribed after 1 second. Make sure to read the response entity body or call `discardBytes()` on it. val successOrFailure = response map { _.entity.discardBytes() } @@ -113,18 +111,19 @@ object CentaurCromwellClient extends StrictLogging { def metadata(workflow: SubmittedWorkflow, args: Option[Map[String, List[String]]] = defaultMetadataArgs, - expandSubworkflows: Boolean = false): IO[WorkflowMetadata] = { + expandSubworkflows: Boolean = false + ): IO[WorkflowMetadata] = { val mandatoryArgs = Map("expandSubWorkflows" -> List(expandSubworkflows.toString)) metadataWithId(workflow.id, Option(args.getOrElse(Map.empty) ++ mandatoryArgs)) } - def metadataWithId(id: WorkflowId, args: Option[Map[String, List[String]]] = defaultMetadataArgs): IO[WorkflowMetadata] = { + def metadataWithId(id: WorkflowId, + args: Option[Map[String, List[String]]] = defaultMetadataArgs + ): IO[WorkflowMetadata] = sendReceiveFutureCompletion(() => cromwellClient.metadata(id, args)) - } - def archiveStatus(id: WorkflowId): IO[String] = { + def archiveStatus(id: WorkflowId): IO[String] = sendReceiveFutureCompletion(() => cromwellClient.query(id)).map(_.results.head.metadataArchiveStatus) - } implicit private val timer: Timer[IO] = IO.timer(blockingEc) implicit private val contextShift: ContextShift[IO] = IO.contextShift(blockingEc) @@ -137,42 +136,43 @@ object CentaurCromwellClient extends StrictLogging { val stackTraceString = ExceptionUtils.getStackTrace(new Exception) - ioDelay.flatMap( _ => + ioDelay.flatMap(_ => // Could probably use IO to do the retrying too. For now use a copyport of Retry from cromwell core. Retry 5 times, // wait 5 seconds between retries. Timeout the whole thing using the IO timeout. // https://github.com/cb372/cats-retry // https://typelevel.org/cats-effect/datatypes/io.html#example-retrying-with-exponential-backoff - IO.fromFuture(IO(Retry.withRetry( - () => func().asIo.unsafeToFuture(), - Option(5), - 5.seconds, - isTransient = isTransient, - isFatal = isFatal - )).timeoutTo(timeout, - { + IO.fromFuture( + IO( + Retry.withRetry( + () => func().asIo.unsafeToFuture(), + Option(5), + 5.seconds, + isTransient = isTransient, + isFatal = isFatal + ) + ).timeoutTo( + timeout, IO.raiseError(new TimeoutException("Timeout from retryRequest " + timeout.toString + ": " + stackTraceString)) - } - ))) + ) + ) + ) } - def sendReceiveFutureCompletion[T](x: () => FailureResponseOrT[T]): IO[T] = { + def sendReceiveFutureCompletion[T](x: () => FailureResponseOrT[T]): IO[T] = retryRequest(x, CentaurConfig.sendReceiveTimeout) - } private def isFatal(f: Throwable) = f match { case _: DeserializationException => true case _ => false } - private def isTransient(f: Throwable) = { + private def isTransient(f: Throwable) = f match { - case _: StreamTcpException | - _: IOException | - _: UnsupportedContentTypeException => true + case _: StreamTcpException | _: IOException | _: UnsupportedContentTypeException => true case BufferOverflowException(message) => message.contains("Please retry the request later.") case unsuccessful: UnsuccessfulRequestException => unsuccessful.httpResponse.status == StatusCodes.NotFound - case unexpected: RuntimeException => unexpected.getMessage.contains("The http server closed the connection unexpectedly") + case unexpected: RuntimeException => + unexpected.getMessage.contains("The http server closed the connection unexpectedly") case _ => false } - } } diff --git a/centaur/src/main/scala/centaur/api/Retry.scala b/centaur/src/main/scala/centaur/api/Retry.scala index f384bc37684..1cbee9a1901 100644 --- a/centaur/src/main/scala/centaur/api/Retry.scala +++ b/centaur/src/main/scala/centaur/api/Retry.scala @@ -7,6 +7,7 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} object Retry { + /** * Copied from cromwell.core * Replaced the backoff with a fixed retry delay @@ -16,8 +17,8 @@ object Retry { delay: FiniteDuration, isTransient: Throwable => Boolean = throwableToFalse, isFatal: Throwable => Boolean = throwableToFalse, - onRetry: Throwable => Unit = noopOnRetry) - (implicit actorSystem: ActorSystem): Future[A] = { + onRetry: Throwable => Unit = noopOnRetry + )(implicit actorSystem: ActorSystem): Future[A] = { // In the future we might want EC passed in separately but at the moment it caused more issues than it solved to do so implicit val ec: ExecutionContext = actorSystem.dispatcher @@ -25,10 +26,18 @@ object Retry { case throwable if isFatal(throwable) => Future.failed(throwable) case throwable if !isFatal(throwable) => val retriesLeft = if (isTransient(throwable)) maxRetries else maxRetries map { _ - 1 } - + if (retriesLeft.forall(_ > 0)) { onRetry(throwable) - after(delay, actorSystem.scheduler)(withRetry(f, delay = delay, maxRetries = retriesLeft, isTransient = isTransient, isFatal = isFatal, onRetry = onRetry)) + after(delay, actorSystem.scheduler)( + withRetry(f, + delay = delay, + maxRetries = retriesLeft, + isTransient = isTransient, + isFatal = isFatal, + onRetry = onRetry + ) + ) } else { Future.failed(throwable) } @@ -38,4 +47,3 @@ object Retry { def throwableToFalse(t: Throwable) = false def noopOnRetry(t: Throwable) = {} } - diff --git a/centaur/src/main/scala/centaur/json/JsonUtils.scala b/centaur/src/main/scala/centaur/json/JsonUtils.scala index 1af725d595b..90e3bd91a98 100644 --- a/centaur/src/main/scala/centaur/json/JsonUtils.scala +++ b/centaur/src/main/scala/centaur/json/JsonUtils.scala @@ -8,6 +8,7 @@ object JsonUtils { val attemptNumber = "attempt" implicit class EnhancedJsValue(val jsValue: JsValue) extends AnyVal { + /** * Modified from http://stackoverflow.com/a/31592156 - changes were made both to port from using * Play-JSON to Spray-JSON as well as to handle some cases specific to Cromwell's metadata response @@ -32,9 +33,8 @@ object JsonUtils { */ def flatten(prefix: String = ""): JsObject = { - def flattenShardAndAttempt(k:String, v: JsArray, f: JsObject => String): JsObject = { - v.elements.map(_.asJsObject).fold(JsObject.empty) { (x, y) => x ++ y.flatten(s"$k.${f(y)}") } - } + def flattenShardAndAttempt(k: String, v: JsArray, f: JsObject => String): JsObject = + v.elements.map(_.asJsObject).fold(JsObject.empty)((x, y) => x ++ y.flatten(s"$k.${f(y)}")) jsValue.asJsObject.fields.foldLeft(JsObject.empty) { case (acc, (k, v: JsArray)) if v.isSingleCallArray => acc ++ JsObject(k -> v.elements.head).flatten(prefix) @@ -44,10 +44,13 @@ object JsonUtils { to avoid lossy conversion for multiple attempts of the same shard. The older way of flattening shards with only shard index in the flattened structure is also kept so that the new structure doesn't fail tests that rely on the older flattened structure. This should be cleaned up in https://broadworkbench.atlassian.net/browse/BW-483 - */ + */ acc ++ flattenShardAndAttempt(k, v, (y: JsObject) => y.getField(shardIndex).get) ++ - flattenShardAndAttempt(k, v, (y: JsObject) => s"${y.getField(shardIndex).get}.${y.getField(attemptNumber).get}") + flattenShardAndAttempt(k, + v, + (y: JsObject) => s"${y.getField(shardIndex).get}.${y.getField(attemptNumber).get}" + ) case (acc, (k, v: JsArray)) => v.elements.zipWithIndex.foldLeft(acc) { case (accumulator, (element, idx)) => val maybePrefix = if (prefix.isEmpty) "" else s"$prefix." @@ -76,7 +79,7 @@ object JsonUtils { // A couple of helper functions to assist with flattening Cromwell metadata responses def hasField(fieldName: String): Boolean = jsObject.fields.keySet contains fieldName def getField(fieldName: String): Option[String] = jsObject.fields.get(fieldName) map { _.toString() } - def flattenToMap: Map [String, JsValue] = jsObject.flatten().fields map { case (k, v: JsValue) => k -> v} + def flattenToMap: Map[String, JsValue] = jsObject.flatten().fields map { case (k, v: JsValue) => k -> v } } /** @@ -89,9 +92,8 @@ object JsonUtils { def nonEmptyObjectArray = jsArray.isObjectArray && jsArray.nonEmpty def isSingleCallArray = jsArray.hasField(shardIndex) && jsArray.size == 1 - def hasField(fieldName: String): Boolean = { + def hasField(fieldName: String): Boolean = if (jsArray.nonEmptyObjectArray) jsArray.elements.map(_.asJsObject) forall { _.hasField(fieldName) } else false - } } } diff --git a/centaur/src/main/scala/centaur/test/CentaurTestException.scala b/centaur/src/main/scala/centaur/test/CentaurTestException.scala index fa83a3afb1a..ffc50a8dfdb 100644 --- a/centaur/src/main/scala/centaur/test/CentaurTestException.scala +++ b/centaur/src/main/scala/centaur/test/CentaurTestException.scala @@ -12,12 +12,12 @@ import cromwell.api.model.{SubmittedWorkflow, WorkflowMetadata} * @param metadataJsonOption The optional metadata. * @param causeOption The optional underlying cause. */ -case class CentaurTestException private(message: String, - testName: String, - workflowIdOption: Option[String], - metadataJsonOption: Option[String], - causeOption: Option[Exception]) - extends RuntimeException(message, causeOption.orNull) +case class CentaurTestException private (message: String, + testName: String, + workflowIdOption: Option[String], + metadataJsonOption: Option[String], + causeOption: Option[Exception] +) extends RuntimeException(message, causeOption.orNull) object CentaurTestException { @@ -25,7 +25,8 @@ object CentaurTestException { def apply(message: String, workflowDefinition: Workflow, submittedWorkflow: SubmittedWorkflow, - actualMetadata: WorkflowMetadata): CentaurTestException = { + actualMetadata: WorkflowMetadata + ): CentaurTestException = new CentaurTestException( message, workflowDefinition.testName, @@ -33,12 +34,9 @@ object CentaurTestException { Option(actualMetadata.value), None ) - } /** Create a new CentaurTestException for a submitted workflow. */ - def apply(message: String, - workflowDefinition: Workflow, - submittedWorkflow: SubmittedWorkflow): CentaurTestException = { + def apply(message: String, workflowDefinition: Workflow, submittedWorkflow: SubmittedWorkflow): CentaurTestException = new CentaurTestException( message, workflowDefinition.testName, @@ -46,10 +44,9 @@ object CentaurTestException { None, None ) - } /** Create a new CentaurTestException for only a workflow definition. */ - def apply(message: String, workflowDefinition: Workflow): CentaurTestException = { + def apply(message: String, workflowDefinition: Workflow): CentaurTestException = new CentaurTestException( message, workflowDefinition.testName, @@ -57,10 +54,9 @@ object CentaurTestException { None, None ) - } /** Create a new CentaurTestException for only a workflow definition, including a root cause. */ - def apply(message: String, workflowDefinition: Workflow, cause: Exception): CentaurTestException = { + def apply(message: String, workflowDefinition: Workflow, cause: Exception): CentaurTestException = new CentaurTestException( message, workflowDefinition.testName, @@ -68,5 +64,4 @@ object CentaurTestException { None, Option(cause) ) - } } diff --git a/centaur/src/main/scala/centaur/test/ObjectCounter.scala b/centaur/src/main/scala/centaur/test/ObjectCounter.scala index 124f78e1dc8..a5cb51ea190 100644 --- a/centaur/src/main/scala/centaur/test/ObjectCounter.scala +++ b/centaur/src/main/scala/centaur/test/ObjectCounter.scala @@ -40,20 +40,19 @@ object ObjectCounterInstances { listObjectsAtPath(_).size } - implicit val blobObjectCounter: ObjectCounter[BlobContainerClient] = (containerClient : BlobContainerClient) => { + implicit val blobObjectCounter: ObjectCounter[BlobContainerClient] = (containerClient: BlobContainerClient) => { val pathToInt: Path => Int = providedPath => { - //Our path parsing is somewhat GCP centric. Convert to a blob path starting from the container root. - def pathToBlobPath(parsedPath : Path) : String = { + // Our path parsing is somewhat GCP centric. Convert to a blob path starting from the container root. + def pathToBlobPath(parsedPath: Path): String = (Option(parsedPath.bucket), Option(parsedPath.directory)) match { case (None, _) => "" case (Some(_), None) => parsedPath.bucket case (Some(_), Some(_)) => parsedPath.bucket + "/" + parsedPath.directory } - } val fullPath = pathToBlobPath(providedPath) - val blobsInFolder = containerClient.listBlobsByHierarchy(fullPath) - //if something "isPrefix", it's a directory. Otherwise, its a file. We just want to count files. + val blobsInFolder = containerClient.listBlobsByHierarchy(fullPath) + // if something "isPrefix", it's a directory. Otherwise, its a file. We just want to count files. blobsInFolder.asScala.count(!_.isPrefix) } pathToInt(_) @@ -63,7 +62,8 @@ object ObjectCounterInstances { object ObjectCounterSyntax { implicit class ObjectCounterSyntax[A](client: A) { - def countObjects(regex: String)(implicit c: ObjectCounter[A]): String => Int = c.parsePath(regex) andThen c.countObjectsAtPath(client) + def countObjects(regex: String)(implicit c: ObjectCounter[A]): String => Int = + c.parsePath(regex) andThen c.countObjectsAtPath(client) } } diff --git a/centaur/src/main/scala/centaur/test/Test.scala b/centaur/src/main/scala/centaur/test/Test.scala index d0a56a2cbf5..66a4655a107 100644 --- a/centaur/src/main/scala/centaur/test/Test.scala +++ b/centaur/src/main/scala/centaur/test/Test.scala @@ -22,9 +22,20 @@ import com.typesafe.scalalogging.StrictLogging import common.validation.Validation._ import configs.syntax._ import cromwell.api.CromwellClient.UnsuccessfulRequestException -import cromwell.api.model.{CallCacheDiff, Failed, HashDifference, SubmittedWorkflow, Succeeded, TerminalStatus, WaasDescription, WorkflowId, WorkflowMetadata, WorkflowStatus} +import cromwell.api.model.{ + CallCacheDiff, + Failed, + HashDifference, + SubmittedWorkflow, + Succeeded, + TerminalStatus, + WaasDescription, + WorkflowId, + WorkflowMetadata, + WorkflowStatus +} import cromwell.cloudsupport.aws.AwsConfiguration -import cromwell.cloudsupport.azure.{AzureUtils} +import cromwell.cloudsupport.azure.AzureUtils import cromwell.cloudsupport.gcp.GoogleConfiguration import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import io.circe.parser._ @@ -53,32 +64,28 @@ sealed abstract class Test[A] { object Test { def successful[A](value: A): Test[A] = testMonad.pure(value) - def invalidTestDefinition[A](message: String, workflowDefinition: Workflow): Test[A] = { + def invalidTestDefinition[A](message: String, workflowDefinition: Workflow): Test[A] = new Test[A] { override def run: IO[Nothing] = IO.raiseError(CentaurTestException(message, workflowDefinition)) } - } implicit val testMonad: Monad[Test] = new Monad[Test] { - override def flatMap[A, B](fa: Test[A])(f: A => Test[B]): Test[B] = { + override def flatMap[A, B](fa: Test[A])(f: A => Test[B]): Test[B] = new Test[B] { override def run: IO[B] = fa.run flatMap { f(_).run } } - } - override def pure[A](x: A): Test[A] = { + override def pure[A](x: A): Test[A] = new Test[A] { override def run: IO[A] = IO.pure(x) } - } /** Call the default non-stack-safe but correct version of this method. */ - override def tailRecM[A, B](a: A)(f: A => Test[Either[A, B]]): Test[B] = { + override def tailRecM[A, B](a: A)(f: A => Test[Either[A, B]]): Test[B] = flatMap(f(a)) { case Right(b) => pure(b) case Left(nextA) => tailRecM(nextA)(f) } - } } implicit class TestableIO[A](a: IO[A]) { @@ -103,14 +110,14 @@ object Operations extends StrictLogging { lazy val authName: String = googleConf.getString("auth") lazy val genomicsEndpointUrl: String = googleConf.getString("genomics.endpoint-url") lazy val genomicsAndStorageScopes = List(StorageScopes.CLOUD_PLATFORM_READ_ONLY, GenomicsScopes.GENOMICS) - lazy val credentials: Credentials = configuration.auth(authName) + lazy val credentials: Credentials = configuration + .auth(authName) .unsafe .credentials(genomicsAndStorageScopes) - lazy val credentialsProjectOption: Option[String] = { - Option(credentials) collect { - case serviceAccountCredentials: ServiceAccountCredentials => serviceAccountCredentials.getProjectId + lazy val credentialsProjectOption: Option[String] = + Option(credentials) collect { case serviceAccountCredentials: ServiceAccountCredentials => + serviceAccountCredentials.getProjectId } - } lazy val confProjectOption: Option[String] = googleConf.get[Option[String]]("project") valueOrElse None // The project from the config or from the credentials. By default the project is read from the system environment. lazy val projectOption: Option[String] = confProjectOption orElse credentialsProjectOption @@ -140,13 +147,14 @@ object Operations extends StrictLogging { lazy val awsConfiguration: AwsConfiguration = AwsConfiguration(CentaurConfig.conf) lazy val awsConf: Config = CentaurConfig.conf.getConfig("aws") lazy val awsAuthName: String = awsConf.getString("auths") - lazy val region: String = awsConf.getString("region") - lazy val accessKeyId: String = awsConf.getString("access-key") + lazy val region: String = awsConf.getString("region") + lazy val accessKeyId: String = awsConf.getString("access-key") lazy val secretAccessKey: String = awsConf.getString("secret-key") def buildAmazonS3Client: S3Client = { val basicAWSCredentials = AwsBasicCredentials.create(accessKeyId, secretAccessKey) - S3Client.builder() + S3Client + .builder() .region(Region.of(region)) .credentialsProvider(StaticCredentialsProvider.create(basicAWSCredentials)) .build() @@ -156,16 +164,16 @@ object Operations extends StrictLogging { val azureSubscription = azureConfig.getString("subscription") val blobContainer = azureConfig.getString("container") val azureEndpoint = azureConfig.getString("endpoint") - //NB: Centaur will throw an exception if it isn't able to authenticate with Azure blob storage via the local environment. - lazy val blobContainerClient: BlobContainerClient = AzureUtils.buildContainerClientFromLocalEnvironment(blobContainer, azureEndpoint, Option(azureSubscription)).get + // NB: Centaur will throw an exception if it isn't able to authenticate with Azure blob storage via the local environment. + lazy val blobContainerClient: BlobContainerClient = + AzureUtils.buildContainerClientFromLocalEnvironment(blobContainer, azureEndpoint, Option(azureSubscription)).get - def submitWorkflow(workflow: Workflow): Test[SubmittedWorkflow] = { + def submitWorkflow(workflow: Workflow): Test[SubmittedWorkflow] = new Test[SubmittedWorkflow] { override def run: IO[SubmittedWorkflow] = for { id <- CentaurCromwellClient.submit(workflow) } yield id } - } /** * A smoke test of the version endpoint, this confirms that a) nothing explodes and b) the result must be a JSON object @@ -180,15 +188,15 @@ object Operations extends StrictLogging { def checkTimingRequirement(timeRequirement: Option[FiniteDuration]): Test[FiniteDuration] = new Test[FiniteDuration] { override def run: IO[FiniteDuration] = timeRequirement match { case Some(duration) => IO.pure(duration) - case None => IO.raiseError(new Exception("Duration value for 'maximumTime' required but not supplied in test config")) + case None => + IO.raiseError(new Exception("Duration value for 'maximumTime' required but not supplied in test config")) } } def checkFastEnough(before: Long, after: Long, allowance: FiniteDuration): Test[Unit] = new Test[Unit] { - override def run: IO[Unit] = { + override def run: IO[Unit] = if (after - before < allowance.toSeconds) IO.pure(()) else IO.raiseError(new Exception(s"Test took too long. Allowance was $allowance. Actual time: ${after - before}")) - } } def timingVerificationNotSupported(timingRequirement: Option[FiniteDuration]): Test[Unit] = new Test[Unit] { @@ -198,7 +206,7 @@ object Operations extends StrictLogging { } - def checkDescription(workflow: Workflow, validityExpectation: Option[Boolean], retries: Int = 3): Test[Unit] = { + def checkDescription(workflow: Workflow, validityExpectation: Option[Boolean], retries: Int = 3): Test[Unit] = new Test[Unit] { private val timeout = 60.seconds @@ -210,77 +218,79 @@ object Operations extends StrictLogging { case None => IO.pure(()) case Some(d.valid) => IO.pure(()) case Some(otherExpectation) => - logger.error(s"Unexpected 'valid=${d.valid}' response when expecting $otherExpectation. Full unexpected description:${System.lineSeparator()}$d") - IO.raiseError(new Exception(s"Expected this workflow's /describe validity to be '$otherExpectation' but got: '${d.valid}' (errors: ${d.errors.mkString(", ")})")) + logger.error( + s"Unexpected 'valid=${d.valid}' response when expecting $otherExpectation. Full unexpected description:${System + .lineSeparator()}$d" + ) + IO.raiseError( + new Exception( + s"Expected this workflow's /describe validity to be '$otherExpectation' but got: '${d.valid}' (errors: ${d.errors + .mkString(", ")})" + ) + ) } - }).timeoutTo(timeout, IO { - if (alreadyTried + 1 >= retries) { - throw new TimeoutException("Timeout from checkDescription 60 seconds: " + timeoutStackTraceString) - } else { - logger.warn(s"checkDescription timeout on attempt ${alreadyTried + 1}. ") - checkDescriptionInner(alreadyTried + 1) - () + }).timeoutTo( + timeout, + IO { + if (alreadyTried + 1 >= retries) { + throw new TimeoutException("Timeout from checkDescription 60 seconds: " + timeoutStackTraceString) + } else { + logger.warn(s"checkDescription timeout on attempt ${alreadyTried + 1}. ") + checkDescriptionInner(alreadyTried + 1) + () + } } - }) + ) } - - override def run: IO[Unit] = { - - + override def run: IO[Unit] = // We can't describe workflows based on zipped imports, so don't try: if (workflow.skipDescribeEndpointValidation || workflow.data.zippedImports.nonEmpty) { IO.pure(()) } else { checkDescriptionInner(0) } - } } - } - def submitInvalidWorkflow(workflow: Workflow): Test[SubmitHttpResponse] = { + def submitInvalidWorkflow(workflow: Workflow): Test[SubmitHttpResponse] = new Test[SubmitHttpResponse] { - override def run: IO[SubmitHttpResponse] = { - CentaurCromwellClient.submit(workflow).redeemWith( - { - case unsuccessfulRequestException: UnsuccessfulRequestException => - val httpResponse = unsuccessfulRequestException.httpResponse - val statusCode = httpResponse.status.intValue() - httpResponse.entity match { - case akka.http.scaladsl.model.HttpEntity.Strict(_, data) => - IO.pure(SubmitHttpResponse(statusCode, data.utf8String)) - case _ => - val message = s"Expected a strict http response entity but got ${httpResponse.entity}" - IO.raiseError(CentaurTestException(message, workflow, unsuccessfulRequestException)) - } - case unexpected: Exception => - val message = s"Unexpected error: ${unexpected.getMessage}" - IO.raiseError(CentaurTestException(message, workflow, unexpected)) - case throwable: Throwable => throw throwable - }, - { - submittedWorkflow => { + override def run: IO[SubmitHttpResponse] = + CentaurCromwellClient + .submit(workflow) + .redeemWith( + { + case unsuccessfulRequestException: UnsuccessfulRequestException => + val httpResponse = unsuccessfulRequestException.httpResponse + val statusCode = httpResponse.status.intValue() + httpResponse.entity match { + case akka.http.scaladsl.model.HttpEntity.Strict(_, data) => + IO.pure(SubmitHttpResponse(statusCode, data.utf8String)) + case _ => + val message = s"Expected a strict http response entity but got ${httpResponse.entity}" + IO.raiseError(CentaurTestException(message, workflow, unsuccessfulRequestException)) + } + case unexpected: Exception => + val message = s"Unexpected error: ${unexpected.getMessage}" + IO.raiseError(CentaurTestException(message, workflow, unexpected)) + case throwable: Throwable => throw throwable + }, + { submittedWorkflow => val message = s"Expected a failure but got a successfully submitted workflow with id ${submittedWorkflow.id}" IO.raiseError(CentaurTestException(message, workflow)) } - } - ) - } + ) } - } - def abortWorkflow(workflow: SubmittedWorkflow): Test[WorkflowStatus] = { + def abortWorkflow(workflow: SubmittedWorkflow): Test[WorkflowStatus] = new Test[WorkflowStatus] { override def run: IO[WorkflowStatus] = CentaurCromwellClient.abort(workflow) } - } - def waitFor(duration: FiniteDuration): Test[Unit] = { + def waitFor(duration: FiniteDuration): Test[Unit] = new Test[Unit] { override def run: IO[Unit] = IO.sleep(duration) } - } /** * Polls until a valid status is reached. @@ -290,16 +300,18 @@ object Operations extends StrictLogging { def expectSomeProgress(workflow: SubmittedWorkflow, testDefinition: Workflow, expectedStatuses: Set[WorkflowStatus], - timeout: FiniteDuration): Test[SubmittedWorkflow] = { + timeout: FiniteDuration + ): Test[SubmittedWorkflow] = new Test[SubmittedWorkflow] { - def status(remainingTimeout: FiniteDuration): IO[SubmittedWorkflow] = { + def status(remainingTimeout: FiniteDuration): IO[SubmittedWorkflow] = for { workflowStatus <- CentaurCromwellClient.status(workflow) mappedStatus <- workflowStatus match { case s if expectedStatuses.contains(s) => IO.pure(workflow) case s: TerminalStatus => CentaurCromwellClient.metadata(workflow) flatMap { metadata => - val message = s"Unexpected terminal status $s while waiting for one of [${expectedStatuses.mkString(", ")}] (workflow ID: ${workflow.id})" + val message = + s"Unexpected terminal status $s while waiting for one of [${expectedStatuses.mkString(", ")}] (workflow ID: ${workflow.id})" IO.raiseError(CentaurTestException(message, testDefinition, workflow, metadata)) } case _ if remainingTimeout > 0.seconds => @@ -308,16 +320,15 @@ object Operations extends StrictLogging { s <- status(remainingTimeout - 10.seconds) } yield s case other => - val message = s"Cromwell failed to progress into any of the statuses [${expectedStatuses.mkString(", ")}]. Was still '$other' after $timeout (workflow ID: ${workflow.id})" + val message = + s"Cromwell failed to progress into any of the statuses [${expectedStatuses.mkString(", ")}]. Was still '$other' after $timeout (workflow ID: ${workflow.id})" IO.raiseError(CentaurTestException(message, testDefinition, workflow)) } } yield mappedStatus - } override def run: IO[SubmittedWorkflow] = status(timeout).timeout(CentaurConfig.maxWorkflowLength) } - } /** * Polls until a specific status is reached. @@ -326,9 +337,10 @@ object Operations extends StrictLogging { */ def pollUntilStatus(workflow: SubmittedWorkflow, testDefinition: Workflow, - expectedStatus: WorkflowStatus): Test[SubmittedWorkflow] = { + expectedStatus: WorkflowStatus + ): Test[SubmittedWorkflow] = new Test[SubmittedWorkflow] { - def status: IO[SubmittedWorkflow] = { + def status: IO[SubmittedWorkflow] = for { workflowStatus <- CentaurCromwellClient.status(workflow) mappedStatus <- workflowStatus match { @@ -336,35 +348,38 @@ object Operations extends StrictLogging { case s: TerminalStatus => val reducedMetadataOptions: Map[String, List[String]] = CentaurCromwellClient.defaultMetadataArgs.getOrElse(Map.empty) ++ Map( - "includeKey" -> (List("status") ++ (if (expectedStatus == Succeeded) List("failures") else List.empty)), + "includeKey" -> (List("status") ++ (if (expectedStatus == Succeeded) List("failures") + else List.empty)), "expandSubWorkflows" -> List("false") ) - CentaurCromwellClient.metadata(workflow = workflow, args = Option(reducedMetadataOptions)) flatMap { metadata => - val failuresString = if (expectedStatus == Succeeded) { - (for { - metadataJson <- parse(metadata.value).toOption - asObject <- metadataJson.asObject - failures <- asObject.toMap.get("failures") - } yield s" Metadata 'failures' content: ${failures.spaces2}").getOrElse("No additional failure information found in metadata.") - } else { - "" - } - - val message = s"Unexpected terminal status $s but was waiting for $expectedStatus (workflow ID: ${workflow.id}).$failuresString" - IO.raiseError(CentaurTestException(message, testDefinition, workflow, metadata)) + CentaurCromwellClient.metadata(workflow = workflow, args = Option(reducedMetadataOptions)) flatMap { + metadata => + val failuresString = if (expectedStatus == Succeeded) { + (for { + metadataJson <- parse(metadata.value).toOption + asObject <- metadataJson.asObject + failures <- asObject.toMap.get("failures") + } yield s" Metadata 'failures' content: ${failures.spaces2}") + .getOrElse("No additional failure information found in metadata.") + } else { + "" + } + + val message = + s"Unexpected terminal status $s but was waiting for $expectedStatus (workflow ID: ${workflow.id}).$failuresString" + IO.raiseError(CentaurTestException(message, testDefinition, workflow, metadata)) } - case _ => for { - _ <- IO.sleep(10.seconds) - s <- status - } yield s + case _ => + for { + _ <- IO.sleep(10.seconds) + s <- status + } yield s } } yield mappedStatus - } override def run: IO[SubmittedWorkflow] = status.timeout(CentaurConfig.maxWorkflowLength) } - } /** * Validate that the given jobId matches the one in the metadata @@ -373,7 +388,8 @@ object Operations extends StrictLogging { workflow: SubmittedWorkflow, metadata: WorkflowMetadata, callFqn: String, - formerJobId: String): Test[Unit] = { + formerJobId: String + ): Test[Unit] = new Test[Unit] { override def run: IO[Unit] = CentaurCromwellClient.metadata(workflow) flatMap { s => s.asFlat.value.get(s"calls.$callFqn.jobId") match { @@ -387,56 +403,58 @@ object Operations extends StrictLogging { } } } - } - def validatePAPIAborted(workflowDefinition: Workflow, workflow: SubmittedWorkflow, jobId: String): Test[Unit] = { + def validatePAPIAborted(workflowDefinition: Workflow, workflow: SubmittedWorkflow, jobId: String): Test[Unit] = new Test[Unit] { - def checkPAPIAborted(): IO[Unit] = { + def checkPAPIAborted(): IO[Unit] = for { - operation <- IO { genomics.projects().operations().get(jobId).execute() } + operation <- IO(genomics.projects().operations().get(jobId).execute()) done = operation.getDone operationError = Option(operation.getError) - aborted = operationError.exists(_.getCode == 1) && operationError.exists(_.getMessage.startsWith("Operation canceled")) - result <- if (!(done && aborted)) { - CentaurCromwellClient.metadata(workflow) flatMap { metadata => - val message = s"Underlying JES job was not aborted properly. " + - s"Done = $done. Error = ${operationError.map(_.getMessage).getOrElse("N/A")} (workflow ID: ${workflow.id})" - IO.raiseError(CentaurTestException(message, workflowDefinition, workflow, metadata)) - } - } else IO.unit + aborted = operationError.exists(_.getCode == 1) && operationError.exists( + _.getMessage.startsWith("Operation canceled") + ) + result <- + if (!(done && aborted)) { + CentaurCromwellClient.metadata(workflow) flatMap { metadata => + val message = s"Underlying JES job was not aborted properly. " + + s"Done = $done. Error = ${operationError.map(_.getMessage).getOrElse("N/A")} (workflow ID: ${workflow.id})" + IO.raiseError(CentaurTestException(message, workflowDefinition, workflow, metadata)) + } + } else IO.unit } yield result - } override def run: IO[Unit] = if (jobId.startsWith("operations/")) { checkPAPIAborted() } else IO.unit } - } /** * Polls until a specific call is in Running state. Returns the job id. */ - def pollUntilCallIsRunning(workflowDefinition: Workflow, workflow: SubmittedWorkflow, callFqn: String): Test[String] = { + def pollUntilCallIsRunning(workflowDefinition: Workflow, + workflow: SubmittedWorkflow, + callFqn: String + ): Test[String] = { // Special case for sub workflow testing - def findJobIdInSubWorkflow(subWorkflowId: String): IO[Option[String]] = { + def findJobIdInSubWorkflow(subWorkflowId: String): IO[Option[String]] = for { metadata <- CentaurCromwellClient .metadataWithId(WorkflowId.fromString(subWorkflowId)) .redeem(_ => None, Option.apply) jobId <- IO.pure(metadata.flatMap(_.asFlat.value.get("calls.inner_abort.aborted.jobId"))) } yield jobId.map(_.asInstanceOf[JsString].value) - } - def valueAsString(key: String, metadata: WorkflowMetadata) = { + def valueAsString(key: String, metadata: WorkflowMetadata) = metadata.asFlat.value.get(key).map(_.asInstanceOf[JsString].value) - } def findCallStatus(metadata: WorkflowMetadata): IO[Option[(String, String)]] = { val status = metadata.asFlat.value.get(s"calls.$callFqn.executionStatus") val statusString = status.map(_.asInstanceOf[JsString].value) for { - jobId <- valueAsString(s"calls.$callFqn.jobId", metadata).map(jobId => IO.pure(Option(jobId))) + jobId <- valueAsString(s"calls.$callFqn.jobId", metadata) + .map(jobId => IO.pure(Option(jobId))) .orElse(valueAsString(s"calls.$callFqn.subWorkflowId", metadata).map(findJobIdInSubWorkflow)) .getOrElse(IO.pure(None)) pair = (statusString, jobId) match { @@ -447,7 +465,7 @@ object Operations extends StrictLogging { } new Test[String] { - def doPerform(): IO[String] = { + def doPerform(): IO[String] = for { // We don't want to keep going forever if the workflow failed status <- CentaurCromwellClient.status(workflow) @@ -464,13 +482,13 @@ object Operations extends StrictLogging { case Some(("Failed", _)) => val message = s"$callFqn failed" IO.raiseError(CentaurTestException(message, workflowDefinition, workflow, metadata)) - case _ => for { - _ <- IO.sleep(5.seconds) - recurse <- doPerform() - } yield recurse + case _ => + for { + _ <- IO.sleep(5.seconds) + recurse <- doPerform() + } yield recurse } } yield result - } override def run: IO[String] = doPerform().timeout(CentaurConfig.maxWorkflowLength) } @@ -483,34 +501,40 @@ object Operations extends StrictLogging { for { md <- CentaurCromwellClient.metadata(workflowB) - calls = md.asFlat.value.keySet.flatMap({ + calls = md.asFlat.value.keySet.flatMap { case callNameRegexp(name) => Option(name) case _ => None - }) - diffs <- calls.toList.traverse[IO, CallCacheDiff]({ callName => + } + diffs <- calls.toList.traverse[IO, CallCacheDiff] { callName => CentaurCromwellClient.callCacheDiff(workflowA, callName, workflowB, callName) - }) + } } yield diffs.flatMap(_.hashDifferential) } - override def run: IO[Unit] = { + override def run: IO[Unit] = hashDiffOfAllCalls map { case diffs if diffs.nonEmpty && CentaurCromwellClient.LogFailures => Console.err.println(s"Hash differential for ${workflowA.id} and ${workflowB.id}") - diffs.map({ diff => - s"For key ${diff.hashKey}:\nCall A: ${diff.callA.getOrElse("N/A")}\nCall B: ${diff.callB.getOrElse("N/A")}" - }).foreach(Console.err.println) + diffs + .map { diff => + s"For key ${diff.hashKey}:\nCall A: ${diff.callA.getOrElse("N/A")}\nCall B: ${diff.callB.getOrElse("N/A")}" + } + .foreach(Console.err.println) case _ => } - } } /* Select only those flat metadata items whose keys begin with the specified prefix, removing the prefix from the keys. Also * perform variable substitutions for UUID and WORKFLOW_ROOT and remove any ~> Centaur metadata expectation metacharacters. */ - private def selectMetadataExpectationSubsetByPrefix(workflow: Workflow, prefix: String, workflowId: WorkflowId, workflowRoot: String): List[(String, JsValue)] = { + private def selectMetadataExpectationSubsetByPrefix(workflow: Workflow, + prefix: String, + workflowId: WorkflowId, + workflowRoot: String + ): List[(String, JsValue)] = { import WorkflowFlatMetadata._ def replaceVariables(value: JsValue): JsValue = value match { - case s: JsString => JsString(s.value.replaceExpectationVariables(workflowId, workflowRoot).replaceFirst("^~>", "")) + case s: JsString => + JsString(s.value.replaceExpectationVariables(workflowId, workflowRoot).replaceFirst("^~>", "")) case o => o } val filterLabels: PartialFunction[(String, JsValue), (String, JsValue)] = { @@ -525,7 +549,8 @@ object Operations extends StrictLogging { def fetchAndValidateOutputs(submittedWorkflow: SubmittedWorkflow, workflow: Workflow, - workflowRoot: String): Test[JsObject] = new Test[JsObject] { + workflowRoot: String + ): Test[JsObject] = new Test[JsObject] { def checkOutputs(expectedOutputs: List[(String, JsValue)])(actualOutputs: Map[String, JsValue]): IO[Unit] = { val expected = expectedOutputs.toSet @@ -535,11 +560,13 @@ object Operations extends StrictLogging { lazy val inExpectedButNotInActual = expected.diff(actual) if (!workflow.allowOtherOutputs && inActualButNotInExpected.nonEmpty) { - val message = s"In actual outputs but not in expected and other outputs not allowed: ${inActualButNotInExpected.mkString(", ")}" + val message = + s"In actual outputs but not in expected and other outputs not allowed: ${inActualButNotInExpected.mkString(", ")}" IO.raiseError(CentaurTestException(message, workflow, submittedWorkflow)) } else if (inExpectedButNotInActual.nonEmpty) { - val message = s"In actual outputs but not in expected: ${inExpectedButNotInActual.mkString(", ")}" + System.lineSeparator + - s"In expected outputs but not in actual: ${inExpectedButNotInActual.mkString(", ")}" + val message = + s"In actual outputs but not in expected: ${inExpectedButNotInActual.mkString(", ")}" + System.lineSeparator + + s"In expected outputs but not in actual: ${inExpectedButNotInActual.mkString(", ")}" IO.raiseError(CentaurTestException(message, workflow, submittedWorkflow)) } else { IO.unit @@ -549,7 +576,8 @@ object Operations extends StrictLogging { override def run: IO[JsObject] = { import centaur.test.metadata.WorkflowFlatOutputs._ - val expectedOutputs: List[(String, JsValue)] = selectMetadataExpectationSubsetByPrefix(workflow, "outputs.", submittedWorkflow.id, workflowRoot) + val expectedOutputs: List[(String, JsValue)] = + selectMetadataExpectationSubsetByPrefix(workflow, "outputs.", submittedWorkflow.id, workflowRoot) for { outputs <- CentaurCromwellClient.outputs(submittedWorkflow) @@ -562,7 +590,8 @@ object Operations extends StrictLogging { def fetchAndValidateLabels(submittedWorkflow: SubmittedWorkflow, workflow: Workflow, - workflowRoot: String): Test[Unit] = new Test[Unit] { + workflowRoot: String + ): Test[Unit] = new Test[Unit] { override def run: IO[Unit] = { import centaur.test.metadata.WorkflowFlatLabels._ @@ -570,17 +599,18 @@ object Operations extends StrictLogging { val expectedLabels: List[(String, JsValue)] = workflowIdLabel :: selectMetadataExpectationSubsetByPrefix(workflow, "labels.", submittedWorkflow.id, workflowRoot) - def validateLabels(actualLabels: Map[String, JsValue]) = { val diff = expectedLabels.toSet.diff(actualLabels.toSet) if (diff.isEmpty) { IO.unit } else { - IO.raiseError(CentaurTestException( - s"In expected labels but not in actual: ${diff.mkString(", ")}", - workflow, - submittedWorkflow - )) + IO.raiseError( + CentaurTestException( + s"In expected labels but not in actual: ${diff.mkString(", ")}", + workflow, + submittedWorkflow + ) + ) } } @@ -593,75 +623,77 @@ object Operations extends StrictLogging { } /** Compares logs filtered from the raw `metadata` endpoint with the `logs` endpoint. */ - def validateLogs(metadata: WorkflowMetadata, - submittedWorkflow: SubmittedWorkflow, - workflow: Workflow): Test[Unit] = new Test[Unit] { - val suffixes = Set("stdout", "shardIndex", "stderr", "attempt", "backendLogs.log") - - def removeSubworkflowKeys(flattened: Map[String, JsValue]): Map[String, JsValue] = { - val subWorkflowIdPrefixes = flattened.keys.filter(_.endsWith(".subWorkflowId")).map(s => s.substring(0, s.lastIndexOf('.'))) - flattened filter { case (k, _) => !subWorkflowIdPrefixes.exists(k.startsWith) } - } - - // Filter to only include the fields in the flattened metadata that should appear in the logs endpoint. - def filterForLogsFields(flattened: Map[String, JsValue]): Map[String, JsValue] = removeSubworkflowKeys(flattened).filter { - case (k, _) => k == "id" || suffixes.exists(s => k.endsWith("." + s) && !k.contains(".outputs.") && !k.startsWith("outputs.")) - } + def validateLogs(metadata: WorkflowMetadata, submittedWorkflow: SubmittedWorkflow, workflow: Workflow): Test[Unit] = + new Test[Unit] { + val suffixes = Set("stdout", "shardIndex", "stderr", "attempt", "backendLogs.log") - override def run: IO[Unit] = { + def removeSubworkflowKeys(flattened: Map[String, JsValue]): Map[String, JsValue] = { + val subWorkflowIdPrefixes = + flattened.keys.filter(_.endsWith(".subWorkflowId")).map(s => s.substring(0, s.lastIndexOf('.'))) + flattened filter { case (k, _) => !subWorkflowIdPrefixes.exists(k.startsWith) } + } - def validateLogsMetadata(flatLogs: Map[String, JsValue], flatFilteredMetadata: Map[String, JsValue]): IO[Unit] = - if (flatLogs.equals(flatFilteredMetadata)) { - IO.unit - } else { - val message = (List("actual logs endpoint output did not equal filtered metadata", "flat logs: ") ++ - flatLogs.toList ++ List("flat filtered metadata: ") ++ flatFilteredMetadata.toList).mkString("\n") - IO.raiseError(CentaurTestException(message, workflow, submittedWorkflow)) + // Filter to only include the fields in the flattened metadata that should appear in the logs endpoint. + def filterForLogsFields(flattened: Map[String, JsValue]): Map[String, JsValue] = + removeSubworkflowKeys(flattened).filter { case (k, _) => + k == "id" || suffixes.exists(s => + k.endsWith("." + s) && !k.contains(".outputs.") && !k.startsWith("outputs.") + ) } - for { - logs <- CentaurCromwellClient.logs(submittedWorkflow) - flatLogs = logs.asFlat.value - flatFilteredMetadata = metadata.asFlat.value |> filterForLogsFields - _ <- validateLogsMetadata(flatLogs, flatFilteredMetadata) - } yield () + override def run: IO[Unit] = { + + def validateLogsMetadata(flatLogs: Map[String, JsValue], flatFilteredMetadata: Map[String, JsValue]): IO[Unit] = + if (flatLogs.equals(flatFilteredMetadata)) { + IO.unit + } else { + val message = (List("actual logs endpoint output did not equal filtered metadata", "flat logs: ") ++ + flatLogs.toList ++ List("flat filtered metadata: ") ++ flatFilteredMetadata.toList).mkString("\n") + IO.raiseError(CentaurTestException(message, workflow, submittedWorkflow)) + } + + for { + logs <- CentaurCromwellClient.logs(submittedWorkflow) + flatLogs = logs.asFlat.value + flatFilteredMetadata = metadata.asFlat.value |> filterForLogsFields + _ <- validateLogsMetadata(flatLogs, flatFilteredMetadata) + } yield () + } } - } - def fetchMetadata(submittedWorkflow: SubmittedWorkflow, - expandSubworkflows: Boolean): IO[WorkflowMetadata] = { + def fetchMetadata(submittedWorkflow: SubmittedWorkflow, expandSubworkflows: Boolean): IO[WorkflowMetadata] = CentaurCromwellClient.metadata(submittedWorkflow, expandSubworkflows = expandSubworkflows) - } def fetchAndValidateNonSubworkflowMetadata(submittedWorkflow: SubmittedWorkflow, workflowSpec: Workflow, - cacheHitUUID: Option[UUID] = None): Test[WorkflowMetadata] = { + cacheHitUUID: Option[UUID] = None + ): Test[WorkflowMetadata] = new Test[WorkflowMetadata] { def fetchOnce(): IO[WorkflowMetadata] = fetchMetadata(submittedWorkflow, expandSubworkflows = false) def eventuallyMetadata(workflow: SubmittedWorkflow, - expectedMetadata: WorkflowFlatMetadata): IO[WorkflowMetadata] = { - validateMetadata(workflow, expectedMetadata).handleErrorWith({ _ => + expectedMetadata: WorkflowFlatMetadata + ): IO[WorkflowMetadata] = + validateMetadata(workflow, expectedMetadata).handleErrorWith { _ => for { _ <- IO.sleep(2.seconds) recurse <- eventuallyMetadata(workflow, expectedMetadata) } yield recurse - }) - } + } def validateMetadata(workflow: SubmittedWorkflow, - expectedMetadata: WorkflowFlatMetadata): IO[WorkflowMetadata] = { - def checkDiff(diffs: Iterable[String], actualMetadata: WorkflowMetadata): IO[Unit] = { + expectedMetadata: WorkflowFlatMetadata + ): IO[WorkflowMetadata] = { + def checkDiff(diffs: Iterable[String], actualMetadata: WorkflowMetadata): IO[Unit] = if (diffs.nonEmpty) { val message = s"Invalid metadata response:\n -${diffs.mkString("\n -")}\n" IO.raiseError(CentaurTestException(message, workflowSpec, workflow, actualMetadata)) } else { IO.unit } - } - def validateUnwantedMetadata(actualMetadata: WorkflowMetadata): IO[Unit] = { + def validateUnwantedMetadata(actualMetadata: WorkflowMetadata): IO[Unit] = if (workflowSpec.notInMetadata.nonEmpty) { // Check that none of the "notInMetadata" keys are in the actual metadata val absentMdIntersect = workflowSpec.notInMetadata.toSet.intersect(actualMetadata.asFlat.value.keySet) @@ -674,23 +706,23 @@ object Operations extends StrictLogging { } else { IO.unit } - } - def validateAllowOtherOutputs(actualMetadata: WorkflowMetadata): IO[Unit] = { + def validateAllowOtherOutputs(actualMetadata: WorkflowMetadata): IO[Unit] = if (workflowSpec.allowOtherOutputs) IO.unit else { val flat = actualMetadata.asFlat.value val actualOutputs: Iterable[String] = flat.keys.filter(_.startsWith("outputs.")) - val expectedOutputs: Iterable[String] = workflowSpec.metadata.map(w => w.value.keys.filter(_.startsWith("outputs."))).getOrElse(List.empty) + val expectedOutputs: Iterable[String] = + workflowSpec.metadata.map(w => w.value.keys.filter(_.startsWith("outputs."))).getOrElse(List.empty) val diff = actualOutputs.toSet.diff(expectedOutputs.toSet) if (diff.nonEmpty) { - val message = s"Found unwanted keys in metadata with `allow-other-outputs` = false: ${diff.mkString(", ")}" + val message = + s"Found unwanted keys in metadata with `allow-other-outputs` = false: ${diff.mkString(", ")}" IO.raiseError(CentaurTestException(message, workflowSpec, workflow, actualMetadata)) } else { IO.unit } } - } for { actualMetadata <- fetchOnce() @@ -709,14 +741,14 @@ object Operations extends StrictLogging { case None => fetchOnce() } } - } def validateMetadataJson(testType: String, expected: JsObject, actual: JsObject, submittedWorkflow: SubmittedWorkflow, workflow: Workflow, - allowableAddedOneWordFields: List[String]): IO[Unit] = { + allowableAddedOneWordFields: List[String] + ): IO[Unit] = if (actual.equals(expected)) { IO.unit } else { @@ -748,46 +780,66 @@ object Operations extends StrictLogging { } else { val writer: JsonWriter[Vector[Operation[JsValue]]] = new JsonWriter[Vector[Operation[JsValue]]] { def processOperation(op: Operation[JsValue]): JsValue = op match { - case Add(path, value) => JsObject(Map[String, JsValue]( - "description" -> JsString("Unexpected value found"), - "path" -> JsString(path.toString), - "value" -> value)) - case Copy(from, path) => JsObject(Map[String, JsValue]( - "description" -> JsString("Value(s) unexpectedly copied"), - "expected_at" -> JsString(from.toString), - "also_at" -> JsString(path.toString))) - case Move(from, path) => JsObject(Map[String, JsValue]( - "description" -> JsString("Value(s) unexpectedly moved"), - "expected_location" -> JsString(from.toString), - "actual_location" -> JsString(path.toString))) - case Remove(path, old) => JsObject(Map[String, JsValue]( - "description" -> JsString("Value missing"), - "expected_location" -> JsString(path.toString)) ++ - old.map(o => "expected_value" -> o)) - case Replace(path, value, old) => JsObject(Map[String, JsValue]( - "description" -> JsString("Incorrect value found"), - "path" -> JsString(path.toString), - "found_value" -> value) ++ old.map(o => "expected_value" -> o)) - case diffson.jsonpatch.Test(path, value) => JsObject(Map[String, JsValue]( - "op" -> JsString("test"), - "path" -> JsString(path.toString), - "value" -> value)) + case Add(path, value) => + JsObject( + Map[String, JsValue]("description" -> JsString("Unexpected value found"), + "path" -> JsString(path.toString), + "value" -> value + ) + ) + case Copy(from, path) => + JsObject( + Map[String, JsValue]("description" -> JsString("Value(s) unexpectedly copied"), + "expected_at" -> JsString(from.toString), + "also_at" -> JsString(path.toString) + ) + ) + case Move(from, path) => + JsObject( + Map[String, JsValue]("description" -> JsString("Value(s) unexpectedly moved"), + "expected_location" -> JsString(from.toString), + "actual_location" -> JsString(path.toString) + ) + ) + case Remove(path, old) => + JsObject( + Map[String, JsValue]("description" -> JsString("Value missing"), + "expected_location" -> JsString(path.toString) + ) ++ + old.map(o => "expected_value" -> o) + ) + case Replace(path, value, old) => + JsObject( + Map[String, JsValue]("description" -> JsString("Incorrect value found"), + "path" -> JsString(path.toString), + "found_value" -> value + ) ++ old.map(o => "expected_value" -> o) + ) + case diffson.jsonpatch.Test(path, value) => + JsObject( + Map[String, JsValue]("op" -> JsString("test"), "path" -> JsString(path.toString), "value" -> value) + ) } - override def write(vector: Vector[Operation[JsValue]]): JsValue = { + override def write(vector: Vector[Operation[JsValue]]): JsValue = JsArray(vector.map(processOperation)) - } } val jsonDiff = filteredDifferences.toJson(writer).prettyPrint - IO.raiseError(CentaurTestException(s"Error during $testType metadata comparison. Diff: $jsonDiff Expected: $expected Actual: $actual", workflow, submittedWorkflow)) + IO.raiseError( + CentaurTestException( + s"Error during $testType metadata comparison. Diff: $jsonDiff Expected: $expected Actual: $actual", + workflow, + submittedWorkflow + ) + ) } } - } def fetchAndValidateJobManagerStyleMetadata(submittedWorkflow: SubmittedWorkflow, workflow: Workflow, - prefetchedOriginalNonSubWorkflowMetadata: Option[String]): Test[WorkflowMetadata] = new Test[WorkflowMetadata] { + prefetchedOriginalNonSubWorkflowMetadata: Option[String] + ): Test[WorkflowMetadata] = new Test[WorkflowMetadata] { // If the non-subworkflow metadata was already fetched, there's no need to fetch it again. def originalMetadataStringIO: IO[String] = prefetchedOriginalNonSubWorkflowMetadata match { @@ -799,19 +851,43 @@ object Operations extends StrictLogging { originalMetadata <- originalMetadataStringIO jmMetadata <- CentaurCromwellClient.metadata( workflow = submittedWorkflow, - Option(CentaurCromwellClient.defaultMetadataArgs.getOrElse(Map.empty) ++ jmArgs)) + Option(CentaurCromwellClient.defaultMetadataArgs.getOrElse(Map.empty) ++ jmArgs) + ) jmMetadataObject <- IO.fromTry(Try(jmMetadata.value.parseJson.asJsObject)) expectation <- IO.fromTry(Try(extractJmStyleMetadataFields(originalMetadata.parseJson.asJsObject))) - _ <- validateMetadataJson(testType = s"fetchAndValidateJobManagerStyleMetadata", expectation, jmMetadataObject, submittedWorkflow, workflow, allowableOneWordAdditionsInJmMetadata) + _ <- validateMetadataJson(testType = s"fetchAndValidateJobManagerStyleMetadata", + expectation, + jmMetadataObject, + submittedWorkflow, + workflow, + allowableOneWordAdditionsInJmMetadata + ) } yield jmMetadata } val oneWordJmIncludeKeys = List( - "attempt", "callRoot", "end", - "executionStatus", "failures", "inputs", "jobId", - "calls", "outputs", "shardIndex", "start", "stderr", "stdout", - "description", "executionEvents", "labels", "parentWorkflowId", - "returnCode", "status", "submission", "subWorkflowId", "workflowName" + "attempt", + "callRoot", + "end", + "executionStatus", + "failures", + "inputs", + "jobId", + "calls", + "outputs", + "shardIndex", + "start", + "stderr", + "stdout", + "description", + "executionEvents", + "labels", + "parentWorkflowId", + "returnCode", + "status", + "submission", + "subWorkflowId", + "workflowName" ) // Our Job Manager metadata validation works by comparing the first pull of metadata which satisfies all test requirements @@ -839,12 +915,13 @@ object Operations extends StrictLogging { // NB: this filter to remove "calls" is because - although it is a single word in the JM request, // it gets treated specially by the API (so has to be treated specially here too) - def processOneWordIncludes(json: JsObject) = (oneWordJmIncludeKeys.filterNot(_ == "calls") :+ "id").foldRight(JsObject.empty) { (toInclude, current) => - json.fields.get(toInclude) match { - case Some(jsonToInclude) => JsObject(current.fields + (toInclude -> jsonToInclude)) - case None => current + def processOneWordIncludes(json: JsObject) = + (oneWordJmIncludeKeys.filterNot(_ == "calls") :+ "id").foldRight(JsObject.empty) { (toInclude, current) => + json.fields.get(toInclude) match { + case Some(jsonToInclude) => JsObject(current.fields + (toInclude -> jsonToInclude)) + case None => current + } } - } def processCallCacheField(callJson: JsObject) = for { originalCallCachingField <- callJson.fields.get("callCaching") @@ -864,7 +941,9 @@ object Operations extends StrictLogging { } val workflowLevelWithOneWordIncludes = processOneWordIncludes(originalWorkflowMetadataJson) - val callsField = originalCallMetadataJson map { calls => Map("calls" -> processCallsSection(calls)) } getOrElse Map.empty + val callsField = originalCallMetadataJson map { calls => + Map("calls" -> processCallsSection(calls)) + } getOrElse Map.empty JsObject(workflowLevelWithOneWordIncludes.fields ++ callsField) } @@ -875,7 +954,8 @@ object Operations extends StrictLogging { def validateCacheResultField(workflowDefinition: Workflow, submittedWorkflow: SubmittedWorkflow, metadata: WorkflowMetadata, - blacklistedValue: String): Test[Unit] = { + blacklistedValue: String + ): Test[Unit] = new Test[Unit] { override def run: IO[Unit] = { val badCacheResults = metadata.asFlat.value collect { @@ -891,24 +971,24 @@ object Operations extends StrictLogging { } } } - } def validateDirectoryContentsCounts(workflowDefinition: Workflow, submittedWorkflow: SubmittedWorkflow, - metadata: WorkflowMetadata): Test[Unit] = new Test[Unit] { + metadata: WorkflowMetadata + ): Test[Unit] = new Test[Unit] { private val workflowId = submittedWorkflow.id.id.toString override def run: IO[Unit] = workflowDefinition.directoryContentCounts match { case None => IO.unit case Some(directoryContentCountCheck) => - val counts = directoryContentCountCheck.expectedDirectoryContentsCounts map { - case (directory, count) => - val substitutedDir = directory.replaceAll("<>", workflowId) - (substitutedDir, count, directoryContentCountCheck.checkFiles.countObjectsAtPath(substitutedDir)) + val counts = directoryContentCountCheck.expectedDirectoryContentsCounts map { case (directory, count) => + val substitutedDir = directory.replaceAll("<>", workflowId) + (substitutedDir, count, directoryContentCountCheck.checkFiles.countObjectsAtPath(substitutedDir)) } val badCounts = counts collect { - case (directory, expectedCount, actualCount) if expectedCount != actualCount => s"Expected to find $expectedCount item(s) at $directory but got $actualCount" + case (directory, expectedCount, actualCount) if expectedCount != actualCount => + s"Expected to find $expectedCount item(s) at $directory but got $actualCount" } if (badCounts.isEmpty) { IO.unit @@ -921,21 +1001,22 @@ object Operations extends StrictLogging { def validateNoCacheHits(submittedWorkflow: SubmittedWorkflow, metadata: WorkflowMetadata, - workflowDefinition: Workflow): Test[Unit] = { + workflowDefinition: Workflow + ): Test[Unit] = validateCacheResultField(workflowDefinition, submittedWorkflow, metadata, "Cache Hit") - } def validateNoCacheMisses(submittedWorkflow: SubmittedWorkflow, metadata: WorkflowMetadata, - workflowDefinition: Workflow): Test[Unit] = { + workflowDefinition: Workflow + ): Test[Unit] = validateCacheResultField(workflowDefinition, submittedWorkflow, metadata, "Cache Miss") - } def validateSubmitFailure(workflow: Workflow, expectedSubmitResponse: SubmitHttpResponse, - actualSubmitResponse: SubmitHttpResponse): Test[Unit] = { + actualSubmitResponse: SubmitHttpResponse + ): Test[Unit] = new Test[Unit] { - override def run: IO[Unit] = { + override def run: IO[Unit] = if (expectedSubmitResponse == actualSubmitResponse) { IO.unit } else { @@ -949,7 +1030,5 @@ object Operations extends StrictLogging { |""".stripMargin IO.raiseError(CentaurTestException(message, workflow)) } - } } - } } diff --git a/centaur/src/main/scala/centaur/test/TestOptions.scala b/centaur/src/main/scala/centaur/test/TestOptions.scala index 10354c756bb..ceba238a4da 100644 --- a/centaur/src/main/scala/centaur/test/TestOptions.scala +++ b/centaur/src/main/scala/centaur/test/TestOptions.scala @@ -18,15 +18,13 @@ object TestOptions { Apply[ErrorOr].map2(tags, ignore)((t, i) => TestOptions(t, i)) } - def tagsFromConfig(conf: Config): ErrorOr[List[String]] = { + def tagsFromConfig(conf: Config): ErrorOr[List[String]] = conf.get[List[String]]("tags") match { case Success(tagStrings) => Valid(tagStrings.map(_.toLowerCase).distinct) case Failure(_) => Valid(List.empty[String]) } - } - - def ignoreFromConfig(conf: Config): ErrorOr[Boolean] = { + def ignoreFromConfig(conf: Config): ErrorOr[Boolean] = if (conf.hasPath("ignore")) { conf.get[Boolean]("ignore") match { case Success(ignore) => Valid(ignore) @@ -35,6 +33,4 @@ object TestOptions { } else { Valid(false) } - } } - diff --git a/centaur/src/main/scala/centaur/test/formulas/TestFormulas.scala b/centaur/src/main/scala/centaur/test/formulas/TestFormulas.scala index 774f852237d..6c006283894 100644 --- a/centaur/src/main/scala/centaur/test/formulas/TestFormulas.scala +++ b/centaur/src/main/scala/centaur/test/formulas/TestFormulas.scala @@ -12,11 +12,21 @@ import centaur.test.workflow.Workflow import centaur.test.{Operations, Test} import centaur.{CentaurConfig, CromwellManager, CromwellTracker, ManagedCromwellServer} import com.typesafe.scalalogging.StrictLogging -import cromwell.api.model.{Aborted, Aborting, Failed, Running, SubmittedWorkflow, Succeeded, TerminalStatus, WorkflowMetadata} +import cromwell.api.model.{ + Aborted, + Aborting, + Failed, + Running, + SubmittedWorkflow, + Succeeded, + TerminalStatus, + WorkflowMetadata +} import scala.concurrent.duration._ import centaur.test.metadata.WorkflowFlatMetadata._ import spray.json.JsString + /** * A collection of test formulas which can be used, building upon operations by chaining them together via a * for comprehension. These assembled formulas can then be run by a client @@ -33,7 +43,7 @@ object TestFormulas extends StrictLogging { |""".stripMargin.trim ) - private def runWorkflowUntilTerminalStatus(workflow: Workflow, status: TerminalStatus): Test[SubmittedWorkflow] = { + private def runWorkflowUntilTerminalStatus(workflow: Workflow, status: TerminalStatus): Test[SubmittedWorkflow] = for { _ <- checkVersion() s <- submitWorkflow(workflow) @@ -41,14 +51,15 @@ object TestFormulas extends StrictLogging { workflow = s, testDefinition = workflow, expectedStatuses = Set(Running, status), - timeout = CentaurConfig.workflowProgressTimeout, + timeout = CentaurConfig.workflowProgressTimeout ) _ <- pollUntilStatus(s, workflow, status) } yield s - } - private def runSuccessfulWorkflow(workflow: Workflow): Test[SubmittedWorkflow] = runWorkflowUntilTerminalStatus(workflow, Succeeded) - private def runFailingWorkflow(workflow: Workflow): Test[SubmittedWorkflow] = runWorkflowUntilTerminalStatus(workflow, Failed) + private def runSuccessfulWorkflow(workflow: Workflow): Test[SubmittedWorkflow] = + runWorkflowUntilTerminalStatus(workflow, Succeeded) + private def runFailingWorkflow(workflow: Workflow): Test[SubmittedWorkflow] = + runWorkflowUntilTerminalStatus(workflow, Failed) def runSuccessfulWorkflowAndVerifyTimeAndOutputs(workflowDefinition: Workflow): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = Option(true)) @@ -56,18 +67,28 @@ object TestFormulas extends StrictLogging { beforeTimestamp = OffsetDateTime.now().toInstant.getEpochSecond submittedWorkflow <- runSuccessfulWorkflow(workflowDefinition) afterTimestamp = OffsetDateTime.now().toInstant.getEpochSecond - _ <- fetchAndValidateOutputs(submittedWorkflow, workflowDefinition, "ROOT NOT SUPPORTED IN TIMING/OUTPUT ONLY TESTS") + _ <- fetchAndValidateOutputs(submittedWorkflow, + workflowDefinition, + "ROOT NOT SUPPORTED IN TIMING/OUTPUT ONLY TESTS" + ) _ <- checkFastEnough(beforeTimestamp, afterTimestamp, timeAllowance) } yield SubmitResponse(submittedWorkflow) - def runSuccessfulWorkflowAndVerifyMetadata(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { + def runSuccessfulWorkflowAndVerifyMetadata( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = Option(true)) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) submittedWorkflow <- runSuccessfulWorkflow(workflowDefinition) metadata <- fetchAndValidateNonSubworkflowMetadata(submittedWorkflow, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = Option(metadata.value)) + _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = Option(metadata.value) + ) notArchivedFlatMetadata = metadata.asFlat - workflowRoot = notArchivedFlatMetadata.value.get("workflowRoot").collectFirst { case JsString(r) => r } getOrElse "No Workflow Root" + workflowRoot = notArchivedFlatMetadata.value.get("workflowRoot").collectFirst { case JsString(r) => + r + } getOrElse "No Workflow Root" _ <- fetchAndValidateOutputs(submittedWorkflow, workflowDefinition, workflowRoot) _ <- fetchAndValidateLabels(submittedWorkflow, workflowDefinition, workflowRoot) _ <- validateLogs(metadata, submittedWorkflow, workflowDefinition) @@ -75,17 +96,24 @@ object TestFormulas extends StrictLogging { _ <- validateDirectoryContentsCounts(workflowDefinition, submittedWorkflow, metadata) } yield SubmitResponse(submittedWorkflow) - def runFailingWorkflowAndVerifyMetadata(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { + def runFailingWorkflowAndVerifyMetadata( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = None) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) submittedWorkflow <- runFailingWorkflow(workflowDefinition) metadata <- fetchAndValidateNonSubworkflowMetadata(submittedWorkflow, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = Option(metadata.value)) + _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = Option(metadata.value) + ) _ = cromwellTracker.track(metadata) _ <- validateDirectoryContentsCounts(workflowDefinition, submittedWorkflow, metadata) } yield SubmitResponse(submittedWorkflow) - def runWorkflowTwiceExpectingCaching(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + def runWorkflowTwiceExpectingCaching( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = Option(true)) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) @@ -93,14 +121,18 @@ object TestFormulas extends StrictLogging { secondWf <- runSuccessfulWorkflow(workflowDefinition.secondRun) _ <- printHashDifferential(firstWF, secondWf) metadata <- fetchAndValidateNonSubworkflowMetadata(secondWf, workflowDefinition, Option(firstWF.id.id)) - _ <- fetchAndValidateJobManagerStyleMetadata(secondWf, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(secondWf, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) _ <- validateNoCacheMisses(secondWf, metadata, workflowDefinition) _ <- validateDirectoryContentsCounts(workflowDefinition, secondWf, metadata) } yield SubmitResponse(secondWf) - } - def runWorkflowThriceExpectingCaching(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + def runWorkflowThriceExpectingCaching( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = Option(true)) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) @@ -115,41 +147,48 @@ object TestFormulas extends StrictLogging { _ <- validateNoCacheMisses(thirdWf, metadataThree, workflowDefinition) _ <- validateDirectoryContentsCounts(workflowDefinition, thirdWf, metadataThree) } yield SubmitResponse(thirdWf) - } - def runWorkflowTwiceExpectingNoCaching(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + def runWorkflowTwiceExpectingNoCaching( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = Option(true)) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) _ <- runSuccessfulWorkflow(workflowDefinition) // Build caches testWf <- runSuccessfulWorkflow(workflowDefinition.secondRun) metadata <- fetchAndValidateNonSubworkflowMetadata(testWf, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(testWf, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(testWf, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) _ <- validateNoCacheHits(testWf, metadata, workflowDefinition) _ <- validateDirectoryContentsCounts(workflowDefinition, testWf, metadata) } yield SubmitResponse(testWf) - } - def runFailingWorkflowTwiceExpectingNoCaching(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + def runFailingWorkflowTwiceExpectingNoCaching( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = None) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) _ <- runFailingWorkflow(workflowDefinition) // Build caches testWf <- runFailingWorkflow(workflowDefinition) metadata <- fetchAndValidateNonSubworkflowMetadata(testWf, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(testWf, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(testWf, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) _ <- validateNoCacheHits(testWf, metadata, workflowDefinition) _ <- validateDirectoryContentsCounts(workflowDefinition, testWf, metadata) } yield SubmitResponse(testWf) - } private def cromwellRestart(workflowDefinition: Workflow, callMarker: CallMarker, testRecover: Boolean, - finalStatus: TerminalStatus)( - implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + finalStatus: TerminalStatus + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = CentaurConfig.runMode match { case ManagedCromwellServer(_, postRestart, withRestart) if withRestart => for { @@ -163,27 +202,31 @@ object TestFormulas extends StrictLogging { workflow = submittedWorkflow, testDefinition = workflowDefinition, expectedStatuses = Set(Running, finalStatus), - timeout = CentaurConfig.workflowProgressTimeout, + timeout = CentaurConfig.workflowProgressTimeout ) _ <- pollUntilStatus(submittedWorkflow, workflowDefinition, finalStatus) metadata <- fetchAndValidateNonSubworkflowMetadata(submittedWorkflow, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) - _ <- if (testRecover) { - validateRecovered(workflowDefinition, submittedWorkflow, metadata, callMarker.callKey, jobId) - } - else { - Test.successful(()) - } + _ <- + if (testRecover) { + validateRecovered(workflowDefinition, submittedWorkflow, metadata, callMarker.callKey, jobId) + } else { + Test.successful(()) + } _ <- validateDirectoryContentsCounts(workflowDefinition, submittedWorkflow, metadata) } yield SubmitResponse(submittedWorkflow) case _ if finalStatus == Succeeded => runSuccessfulWorkflowAndVerifyMetadata(workflowDefinition) case _ if finalStatus == Failed => runFailingWorkflowAndVerifyMetadata(workflowDefinition) case _ => Test.invalidTestDefinition("This test can only run successful or failed workflow", workflowDefinition) } - } - def instantAbort(workflowDefinition: Workflow)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { + def instantAbort( + workflowDefinition: Workflow + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = for { _ <- checkDescription(workflowDefinition, validityExpectation = Option(true)) _ <- timingVerificationNotSupported(workflowDefinition.maximumAllowedTime) submittedWorkflow <- submitWorkflow(workflowDefinition) @@ -192,16 +235,21 @@ object TestFormulas extends StrictLogging { workflow = submittedWorkflow, testDefinition = workflowDefinition, expectedStatuses = Set(Running, Aborting, Aborted), - timeout = CentaurConfig.workflowProgressTimeout, + timeout = CentaurConfig.workflowProgressTimeout ) _ <- pollUntilStatus(submittedWorkflow, workflowDefinition, Aborted) metadata <- fetchAndValidateNonSubworkflowMetadata(submittedWorkflow, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) _ <- validateDirectoryContentsCounts(workflowDefinition, submittedWorkflow, metadata) } yield SubmitResponse(submittedWorkflow) - def scheduledAbort(workflowDefinition: Workflow, callMarker: CallMarker, restart: Boolean)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + def scheduledAbort(workflowDefinition: Workflow, callMarker: CallMarker, restart: Boolean)(implicit + cromwellTracker: Option[CromwellTracker] + ): Test[SubmitResponse] = { def withRestart(): Unit = CentaurConfig.runMode match { case ManagedCromwellServer(_, postRestart, withRestart) if withRestart => CromwellManager.stopCromwell(s"Scheduled restart from ${workflowDefinition.testName}") @@ -217,42 +265,45 @@ object TestFormulas extends StrictLogging { // The Cromwell call status could be running but the backend job might not have started yet, give it some time _ <- waitFor(30.seconds) _ <- abortWorkflow(submittedWorkflow) - _ = if(restart) withRestart() + _ = if (restart) withRestart() _ <- expectSomeProgress( workflow = submittedWorkflow, testDefinition = workflowDefinition, expectedStatuses = Set(Running, Aborting, Aborted), - timeout = CentaurConfig.workflowProgressTimeout, + timeout = CentaurConfig.workflowProgressTimeout ) _ <- pollUntilStatus(submittedWorkflow, workflowDefinition, Aborted) _ <- validatePAPIAborted(workflowDefinition, submittedWorkflow, jobId) // Wait a little to make sure that if the abort didn't work and calls start running we see them in the metadata _ <- waitFor(30.seconds) metadata <- fetchAndValidateNonSubworkflowMetadata(submittedWorkflow, workflowDefinition) - _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(submittedWorkflow, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) _ <- validateDirectoryContentsCounts(workflowDefinition, submittedWorkflow, metadata) } yield SubmitResponse(submittedWorkflow) } def workflowRestart(workflowDefinition: Workflow, - callMarker: CallMarker, - recover: Boolean, - finalStatus: TerminalStatus)( - implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + callMarker: CallMarker, + recover: Boolean, + finalStatus: TerminalStatus + )(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = cromwellRestart(workflowDefinition, callMarker, testRecover = recover, finalStatus = finalStatus) - } - def submitInvalidWorkflow(workflow: Workflow, expectedSubmitResponse: SubmitHttpResponse): Test[SubmitResponse] = { + def submitInvalidWorkflow(workflow: Workflow, expectedSubmitResponse: SubmitHttpResponse): Test[SubmitResponse] = for { _ <- checkDescription(workflow, validityExpectation = None) _ <- timingVerificationNotSupported(workflow.maximumAllowedTime) actualSubmitResponse <- Operations.submitInvalidWorkflow(workflow) _ <- validateSubmitFailure(workflow, expectedSubmitResponse, actualSubmitResponse) } yield actualSubmitResponse - } - def papiUpgrade(workflowDefinition: Workflow, callMarker: CallMarker)(implicit cromwellTracker: Option[CromwellTracker]): Test[SubmitResponse] = { + def papiUpgrade(workflowDefinition: Workflow, callMarker: CallMarker)(implicit + cromwellTracker: Option[CromwellTracker] + ): Test[SubmitResponse] = CentaurConfig.runMode match { case ManagedCromwellServer(_, postRestart, withRestart) if withRestart => for { @@ -266,21 +317,25 @@ object TestFormulas extends StrictLogging { workflow = first, testDefinition = workflowDefinition, expectedStatuses = Set(Running, Succeeded), - timeout = CentaurConfig.workflowProgressTimeout, + timeout = CentaurConfig.workflowProgressTimeout ) _ <- pollUntilStatus(first, workflowDefinition, Succeeded) _ <- checkDescription(workflowDefinition.secondRun, validityExpectation = Option(true)) - second <- runSuccessfulWorkflow(workflowDefinition.secondRun) // Same WDL and config but a "backend" runtime option targeting PAPI v2. + second <- runSuccessfulWorkflow( + workflowDefinition.secondRun + ) // Same WDL and config but a "backend" runtime option targeting PAPI v2. _ <- printHashDifferential(first, second) metadata <- fetchAndValidateNonSubworkflowMetadata(second, workflowDefinition, Option(first.id.id)) - _ <- fetchAndValidateJobManagerStyleMetadata(second, workflowDefinition, prefetchedOriginalNonSubWorkflowMetadata = None) + _ <- fetchAndValidateJobManagerStyleMetadata(second, + workflowDefinition, + prefetchedOriginalNonSubWorkflowMetadata = None + ) _ = cromwellTracker.track(metadata) _ <- validateNoCacheMisses(second, metadata, workflowDefinition) _ <- validateDirectoryContentsCounts(workflowDefinition, second, metadata) } yield SubmitResponse(second) case _ => Test.invalidTestDefinition("Configuration not supported by PapiUpgradeTest", workflowDefinition) } - } implicit class EnhancedCromwellTracker(val tracker: Option[CromwellTracker]) extends AnyVal { def track(metadata: WorkflowMetadata): Unit = tracker foreach { _.track(metadata) } diff --git a/centaur/src/main/scala/centaur/test/markers/CallMarker.scala b/centaur/src/main/scala/centaur/test/markers/CallMarker.scala index 08eafc09213..56274a07ed4 100644 --- a/centaur/src/main/scala/centaur/test/markers/CallMarker.scala +++ b/centaur/src/main/scala/centaur/test/markers/CallMarker.scala @@ -7,12 +7,11 @@ import configs.Result.{Failure, Success} import configs.syntax._ object CallMarker { - def fromConfig(config: Config): ErrorOr[Option[CallMarker]] = { + def fromConfig(config: Config): ErrorOr[Option[CallMarker]] = config.get[Option[String]]("callMark") match { case Success(marker) => (marker map CallMarker.apply).validNel case Failure(f) => s"Invalid restart marker $f".invalidNel } - } } /** diff --git a/centaur/src/main/scala/centaur/test/metadata/CallAttemptFailure.scala b/centaur/src/main/scala/centaur/test/metadata/CallAttemptFailure.scala index 098328d0d41..f79aaca6826 100644 --- a/centaur/src/main/scala/centaur/test/metadata/CallAttemptFailure.scala +++ b/centaur/src/main/scala/centaur/test/metadata/CallAttemptFailure.scala @@ -12,8 +12,7 @@ import io.circe.parser._ * * https://github.com/DataBiosphere/job-manager/blob/f83e4284e2419389b7e515720c9d960d2eb81a29/servers/cromwell/jobs/controllers/jobs_controller.py#L155-L162 */ -case class CallAttemptFailure -( +case class CallAttemptFailure( workflowId: String, callFullyQualifiedName: String, jobIndex: Int, @@ -27,29 +26,25 @@ case class CallAttemptFailure ) object CallAttemptFailure { - def buildFailures(jsonOption: Option[String]): IO[Vector[CallAttemptFailure]] = { + def buildFailures(jsonOption: Option[String]): IO[Vector[CallAttemptFailure]] = jsonOption.map(buildFailures).getOrElse(IO.pure(Vector.empty)) - } - def buildFailures(json: String): IO[Vector[CallAttemptFailure]] = { + def buildFailures(json: String): IO[Vector[CallAttemptFailure]] = IO.fromEither(decode[Vector[CallAttemptFailure]](json)) - } - private implicit val decodeFailures: Decoder[Vector[CallAttemptFailure]] = { + implicit private val decodeFailures: Decoder[Vector[CallAttemptFailure]] = Decoder.instance { c => for { workflowId <- c.get[String]("id") calls <- c.get[Map[String, Json]]("calls").map(_.toVector) - callAttemptFailures <- calls.flatTraverse[Decoder.Result, CallAttemptFailure] { - case (callName, callJson) => - val decoderCallAttempt = decodeFromCallAttempt(workflowId, callName) - callJson.as[Vector[Option[CallAttemptFailure]]](Decoder.decodeVector(decoderCallAttempt)).map(_.flatten) + callAttemptFailures <- calls.flatTraverse[Decoder.Result, CallAttemptFailure] { case (callName, callJson) => + val decoderCallAttempt = decodeFromCallAttempt(workflowId, callName) + callJson.as[Vector[Option[CallAttemptFailure]]](Decoder.decodeVector(decoderCallAttempt)).map(_.flatten) } } yield callAttemptFailures } or Decoder.const(Vector.empty) - } - private def decodeFromCallAttempt(workflowId: String, callName: String): Decoder[Option[CallAttemptFailure]] = { + private def decodeFromCallAttempt(workflowId: String, callName: String): Decoder[Option[CallAttemptFailure]] = Decoder.instance { c => for { shardIndexOption <- c.get[Option[Int]]("shardIndex") @@ -77,5 +72,4 @@ object CallAttemptFailure { } } yield callAttemptFailureOption } or Decoder.const(None) - } } diff --git a/centaur/src/main/scala/centaur/test/metadata/WorkflowFlatMetadata.scala b/centaur/src/main/scala/centaur/test/metadata/WorkflowFlatMetadata.scala index 182d1fad58f..5054105c154 100644 --- a/centaur/src/main/scala/centaur/test/metadata/WorkflowFlatMetadata.scala +++ b/centaur/src/main/scala/centaur/test/metadata/WorkflowFlatMetadata.scala @@ -17,7 +17,6 @@ import spray.json._ import scala.language.postfixOps import scala.util.{Failure, Success, Try} - /** * Workflow metadata that has been flattened for Centaur test purposes. The keys are similar to the simpleton-syntax * stored in the Cromwell database, and values are primitive types, not nested JSON objects or arrays. @@ -27,15 +26,23 @@ case class WorkflowFlatMetadata(value: Map[String, JsValue]) extends AnyVal { def diff(actual: WorkflowFlatMetadata, workflowID: UUID, cacheHitUUID: Option[UUID] = None): Iterable[String] = { // If the test fails in initialization there wouldn't be workflow root metadata, and if that's the expectation // then that's ok. - val workflowRoot = actual.value.get("workflowRoot").collectFirst { case JsString(r) => r } getOrElse "No Workflow Root" + val workflowRoot = + actual.value.get("workflowRoot").collectFirst { case JsString(r) => r } getOrElse "No Workflow Root" val missingErrors = value.keySet.diff(actual.value.keySet) map { k => s"Missing key: $k" } - val mismatchErrors = value.keySet.intersect(actual.value.keySet) flatMap { k => diffValues(k, value(k), actual.value(k), - workflowID, workflowRoot, cacheHitUUID)} + val mismatchErrors = value.keySet.intersect(actual.value.keySet) flatMap { k => + diffValues(k, value(k), actual.value(k), workflowID, workflowRoot, cacheHitUUID) + } mismatchErrors ++ missingErrors } - private def diffValues(key: String, expected: JsValue, actual: JsValue, workflowID: UUID, workflowRoot: String, cacheHitUUID: Option[UUID]): Option[String] = { + private def diffValues(key: String, + expected: JsValue, + actual: JsValue, + workflowID: UUID, + workflowRoot: String, + cacheHitUUID: Option[UUID] + ): Option[String] = { /* FIXME/TODO: @@ -59,8 +66,10 @@ case class WorkflowFlatMetadata(value: Map[String, JsValue]) extends AnyVal { val stripped = stripQuotes(cacheSubstitutions).stripPrefix("~~") (!stripQuotes(o.toString).contains(stripped)).option(s"Actual value ${o.toString()} does not contain $stripped") case o: JsString => (cacheSubstitutions != o.toString).option(s"expected: $cacheSubstitutions but got: $actual") - case o: JsNumber => (expected != JsString(o.value.toString)).option(s"expected: $cacheSubstitutions but got: $actual") - case o: JsBoolean => (expected != JsString(o.value.toString)).option(s"expected: $cacheSubstitutions but got: $actual") + case o: JsNumber => + (expected != JsString(o.value.toString)).option(s"expected: $cacheSubstitutions but got: $actual") + case o: JsBoolean => + (expected != JsString(o.value.toString)).option(s"expected: $cacheSubstitutions but got: $actual") case o: JsArray if stripQuotes(cacheSubstitutions).startsWith("~>") => val stripped = stripQuotes(cacheSubstitutions).stripPrefix("~>") val replaced = stripped.replaceAll("\\\\\"", "\"") @@ -76,12 +85,11 @@ case class WorkflowFlatMetadata(value: Map[String, JsValue]) extends AnyVal { object WorkflowFlatMetadata { - def fromConfig(config: Config): ErrorOr[WorkflowFlatMetadata] = { + def fromConfig(config: Config): ErrorOr[WorkflowFlatMetadata] = config.extract[Map[String, Option[String]]] match { case Result.Success(m) => Valid(WorkflowFlatMetadata(m safeMapValues { _.map(JsString.apply).getOrElse(JsNull) })) case Result.Failure(_) => invalidNel(s"Metadata block can not be converted to a Map: $config") } - } def fromWorkflowMetadata(workflowMetadata: WorkflowMetadata): ErrorOr[WorkflowFlatMetadata] = { val jsValue: ErrorOr[JsValue] = Try(workflowMetadata.value.parseJson) match { @@ -96,9 +104,8 @@ object WorkflowFlatMetadata { } implicit class EnhancedWorkflowMetadata(val workflowMetadata: WorkflowMetadata) { - def asFlat: WorkflowFlatMetadata = { + def asFlat: WorkflowFlatMetadata = WorkflowFlatMetadata.fromWorkflowMetadata(workflowMetadata).unsafe - } } implicit class EnhancedWorkflowFlatMetadata(val workflowFlatMetadata: WorkflowFlatMetadata) { @@ -113,25 +120,22 @@ object WorkflowFlatMetadata { } implicit class EnhancedExpectation(val expectation: String) extends AnyVal { - def replaceExpectationVariables(workflowId: WorkflowId, workflowRoot: String): String = { + def replaceExpectationVariables(workflowId: WorkflowId, workflowRoot: String): String = expectation.replaceAll("<>", workflowId.toString).replaceAll("<>", workflowRoot) - } } } object WorkflowFlatOutputs { implicit class EnhancedWorkflowOutputs(val workflowOutputs: WorkflowOutputs) extends AnyVal { - def asFlat: WorkflowFlatMetadata = { + def asFlat: WorkflowFlatMetadata = workflowOutputs.outputs.asMap map WorkflowFlatMetadata.apply unsafe - } } } object WorkflowFlatLabels { implicit class EnhancedWorkflowLabels(val workflowLabels: WorkflowLabels) extends AnyVal { - def asFlat: WorkflowFlatMetadata = { + def asFlat: WorkflowFlatMetadata = workflowLabels.labels.asMap map WorkflowFlatMetadata.apply unsafe - } } } @@ -140,11 +144,10 @@ object JsValueEnhancer { import DefaultJsonProtocol._ import centaur.json.JsonUtils._ - def asMap: ErrorOr[Map[String, JsValue]] = { + def asMap: ErrorOr[Map[String, JsValue]] = Try(jsValue.asJsObject.flatten().convertTo[Map[String, JsValue]]) match { case Success(m) => Valid(m) case Failure(e) => invalidNel(s"Unable to convert JsValue to JsObject: ${e.getMessage}") } - } } } diff --git a/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala b/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala index 258c211c900..f4ed8a57589 100644 --- a/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala +++ b/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala @@ -19,8 +19,8 @@ case class CentaurTestCase(workflow: Workflow, testFormat: CentaurTestFormat, testOptions: TestOptions, submittedWorkflowTracker: SubmittedWorkflowTracker, - submitResponseOption: Option[SubmitHttpResponse])( - implicit cromwellTracker: Option[CromwellTracker]) { + submitResponseOption: Option[SubmitHttpResponse] +)(implicit cromwellTracker: Option[CromwellTracker]) { def testFunction: Test[SubmitResponse] = this.testFormat match { case WorkflowSuccessTest => TestFormulas.runSuccessfulWorkflowAndVerifyMetadata(workflow) @@ -32,10 +32,14 @@ case class CentaurTestCase(workflow: Workflow, case RunFailingTwiceExpectingNoCallCachingTest => TestFormulas.runFailingWorkflowTwiceExpectingNoCaching(workflow) case SubmitFailureTest => TestFormulas.submitInvalidWorkflow(workflow, submitResponseOption.get) case InstantAbort => TestFormulas.instantAbort(workflow) - case CromwellRestartWithRecover(callMarker)=> TestFormulas.workflowRestart(workflow, callMarker, recover = true, finalStatus = Succeeded) - case WorkflowFailureRestartWithRecover(callMarker)=> TestFormulas.workflowRestart(workflow, callMarker, recover = true, finalStatus = Failed) - case WorkflowFailureRestartWithoutRecover(callMarker)=> TestFormulas.workflowRestart(workflow, callMarker, recover = false, finalStatus = Failed) - case CromwellRestartWithoutRecover(callMarker) => TestFormulas.workflowRestart(workflow, callMarker, recover = false, finalStatus = Succeeded) + case CromwellRestartWithRecover(callMarker) => + TestFormulas.workflowRestart(workflow, callMarker, recover = true, finalStatus = Succeeded) + case WorkflowFailureRestartWithRecover(callMarker) => + TestFormulas.workflowRestart(workflow, callMarker, recover = true, finalStatus = Failed) + case WorkflowFailureRestartWithoutRecover(callMarker) => + TestFormulas.workflowRestart(workflow, callMarker, recover = false, finalStatus = Failed) + case CromwellRestartWithoutRecover(callMarker) => + TestFormulas.workflowRestart(workflow, callMarker, recover = false, finalStatus = Succeeded) case ScheduledAbort(callMarker) => TestFormulas.scheduledAbort(workflow, callMarker, restart = false) case ScheduledAbortWithRestart(callMarker) => TestFormulas.scheduledAbort(workflow, callMarker, restart = true) case PapiUpgradeTest(callMarker) => TestFormulas.papiUpgrade(workflow, callMarker) @@ -58,13 +62,14 @@ case class CentaurTestCase(workflow: Workflow, } object CentaurTestCase { - def fromFile(cromwellTracker: Option[CromwellTracker])(file: File): ErrorOr[CentaurTestCase] = { + def fromFile(cromwellTracker: Option[CromwellTracker])(file: File): ErrorOr[CentaurTestCase] = Try(ConfigFactory.parseFile(file.toJava).resolve()) match { case Success(c) => - CentaurTestCase.fromConfig(c, file.parent, cromwellTracker) flatMap validateTestCase leftMap { s"Error in test file '$file'." :: _ } + CentaurTestCase.fromConfig(c, file.parent, cromwellTracker) flatMap validateTestCase leftMap { + s"Error in test file '$file'." :: _ + } case Failure(f) => invalidNel(s"Invalid test config: $file (${f.getMessage})") } - } def fromConfig(conf: Config, configFile: File, cromwellTracker: Option[CromwellTracker]): ErrorOr[CentaurTestCase] = { val submittedWorkflowTracker = new SubmittedWorkflowTracker() @@ -77,18 +82,18 @@ object CentaurTestCase { } } - private def validateTestCase(testCase: CentaurTestCase): ErrorOr[CentaurTestCase] = { + private def validateTestCase(testCase: CentaurTestCase): ErrorOr[CentaurTestCase] = testCase.testFormat match { - case SubmitFailureTest => validateSubmitFailure(testCase.workflow, testCase.submitResponseOption).map(_ => testCase) + case SubmitFailureTest => + validateSubmitFailure(testCase.workflow, testCase.submitResponseOption).map(_ => testCase) case _ => Valid(testCase) } - } private def validateSubmitFailure(workflow: Workflow, - submitResponseOption: Option[SubmitHttpResponse]): ErrorOr[SubmitResponse] = { + submitResponseOption: Option[SubmitHttpResponse] + ): ErrorOr[SubmitResponse] = submitResponseOption match { case None => invalidNel("No submit stanza included in test config") case Some(response) => Valid(response) } - } } diff --git a/centaur/src/main/scala/centaur/test/standard/CentaurTestFormat.scala b/centaur/src/main/scala/centaur/test/standard/CentaurTestFormat.scala index 1cb2345f25c..eae961d6e5f 100644 --- a/centaur/src/main/scala/centaur/test/standard/CentaurTestFormat.scala +++ b/centaur/src/main/scala/centaur/test/standard/CentaurTestFormat.scala @@ -9,7 +9,7 @@ import configs.syntax._ sealed abstract class CentaurTestFormat(val name: String) { val lowerCaseName = name.toLowerCase - + def testSpecString: String = this match { case WorkflowSuccessTest => "successfully run" case WorkflowSuccessAndTimedOutputsTest => "successfully run" @@ -20,12 +20,14 @@ sealed abstract class CentaurTestFormat(val name: String) { case RunFailingTwiceExpectingNoCallCachingTest => "Fail the first run and NOT call cache the second run of" case SubmitFailureTest => "fail to submit" case InstantAbort => "abort a workflow immediately after submission" - case _: PapiUpgradeTest => "make sure a PAPI upgrade preserves call caching when the `name-for-call-caching-purposes` attribute is used" + case _: PapiUpgradeTest => + "make sure a PAPI upgrade preserves call caching when the `name-for-call-caching-purposes` attribute is used" case _: CromwellRestartWithRecover => "survive a Cromwell restart and recover jobs" case _: CromwellRestartWithoutRecover => "survive a Cromwell restart" case _: ScheduledAbort => "abort a workflow mid run" case _: ScheduledAbortWithRestart => "abort a workflow mid run and restart immediately" - case _: WorkflowFailureRestartWithRecover => "survive a Cromwell restart when a workflow was failing and recover jobs" + case _: WorkflowFailureRestartWithRecover => + "survive a Cromwell restart when a workflow was failing and recover jobs" case _: WorkflowFailureRestartWithoutRecover => "survive a Cromwell restart when a workflow was failing" case other => s"unrecognized format $other" } @@ -39,71 +41,89 @@ object CentaurTestFormat { sealed trait SequentialTestFormat extends CentaurTestFormat { override def isParallel: Boolean = false } - + sealed trait RestartFormat extends SequentialTestFormat - sealed trait WithCallMarker { this: CentaurTestFormat => val build: CallMarker => CentaurTestFormat } - + sealed trait WithCallMarker { this: CentaurTestFormat => + val build: CallMarker => CentaurTestFormat + } + case object WorkflowSuccessTest extends CentaurTestFormat("WorkflowSuccess") case object WorkflowSuccessAndTimedOutputsTest extends CentaurTestFormat("WorkflowSuccessAndTimedOutputs") case object WorkflowFailureTest extends CentaurTestFormat("WorkflowFailure") case object RunTwiceExpectingCallCachingTest extends CentaurTestFormat("RunTwiceExpectingCallCaching") case object RunThriceExpectingCallCachingTest extends CentaurTestFormat(name = "RunThriceExpectingCallCaching") case object RunTwiceExpectingNoCallCachingTest extends CentaurTestFormat("RunTwiceExpectingNoCallCaching") - case object RunFailingTwiceExpectingNoCallCachingTest extends CentaurTestFormat("RunFailingTwiceExpectingNoCallCaching") + case object RunFailingTwiceExpectingNoCallCachingTest + extends CentaurTestFormat("RunFailingTwiceExpectingNoCallCaching") case object SubmitFailureTest extends CentaurTestFormat("SubmitFailure") case object InstantAbort extends CentaurTestFormat("InstantAbort") with SequentialTestFormat object CromwellRestartWithRecover extends CentaurTestFormat("CromwellRestartWithRecover") with WithCallMarker { val build = CromwellRestartWithRecover.apply _ } - case class CromwellRestartWithRecover(callMarker: CallMarker) extends CentaurTestFormat(CromwellRestartWithRecover.name) with RestartFormat - + case class CromwellRestartWithRecover(callMarker: CallMarker) + extends CentaurTestFormat(CromwellRestartWithRecover.name) + with RestartFormat + object CromwellRestartWithoutRecover extends CentaurTestFormat("CromwellRestartWithoutRecover") with WithCallMarker { val build = CromwellRestartWithoutRecover.apply _ } - case class CromwellRestartWithoutRecover(callMarker: CallMarker) extends CentaurTestFormat(CromwellRestartWithoutRecover.name) with RestartFormat + case class CromwellRestartWithoutRecover(callMarker: CallMarker) + extends CentaurTestFormat(CromwellRestartWithoutRecover.name) + with RestartFormat object ScheduledAbort extends CentaurTestFormat("ScheduledAbort") with WithCallMarker { val build = ScheduledAbort.apply _ } - case class ScheduledAbort(callMarker: CallMarker) extends CentaurTestFormat(ScheduledAbort.name) with SequentialTestFormat + case class ScheduledAbort(callMarker: CallMarker) + extends CentaurTestFormat(ScheduledAbort.name) + with SequentialTestFormat object ScheduledAbortWithRestart extends CentaurTestFormat("ScheduledAbortWithRestart") with WithCallMarker { val build = ScheduledAbortWithRestart.apply _ } - case class ScheduledAbortWithRestart(callMarker: CallMarker) extends CentaurTestFormat(ScheduledAbortWithRestart.name) with RestartFormat + case class ScheduledAbortWithRestart(callMarker: CallMarker) + extends CentaurTestFormat(ScheduledAbortWithRestart.name) + with RestartFormat - object WorkflowFailureRestartWithRecover extends CentaurTestFormat("WorkflowFailureRestartWithRecover") with WithCallMarker { + object WorkflowFailureRestartWithRecover + extends CentaurTestFormat("WorkflowFailureRestartWithRecover") + with WithCallMarker { val build = WorkflowFailureRestartWithRecover.apply _ } - case class WorkflowFailureRestartWithRecover(callMarker: CallMarker) extends CentaurTestFormat(WorkflowFailureRestartWithRecover.name) with RestartFormat + case class WorkflowFailureRestartWithRecover(callMarker: CallMarker) + extends CentaurTestFormat(WorkflowFailureRestartWithRecover.name) + with RestartFormat - object WorkflowFailureRestartWithoutRecover extends CentaurTestFormat("WorkflowFailureRestartWithoutRecover") with WithCallMarker { + object WorkflowFailureRestartWithoutRecover + extends CentaurTestFormat("WorkflowFailureRestartWithoutRecover") + with WithCallMarker { val build = WorkflowFailureRestartWithoutRecover.apply _ } - case class WorkflowFailureRestartWithoutRecover(callMarker: CallMarker) extends CentaurTestFormat(WorkflowFailureRestartWithoutRecover.name) with RestartFormat + case class WorkflowFailureRestartWithoutRecover(callMarker: CallMarker) + extends CentaurTestFormat(WorkflowFailureRestartWithoutRecover.name) + with RestartFormat object PapiUpgradeTest extends CentaurTestFormat("PapiUpgrade") with WithCallMarker { val build = PapiUpgradeTest.apply _ } case class PapiUpgradeTest(callMarker: CallMarker) extends CentaurTestFormat(PapiUpgradeTest.name) with RestartFormat - def fromConfig(conf: Config): Checked[CentaurTestFormat] = { - + def fromConfig(conf: Config): Checked[CentaurTestFormat] = CallMarker.fromConfig(conf).toEither flatMap { callMarker => conf.get[String]("testFormat") match { case Success(f) => CentaurTestFormat.fromString(f, callMarker) case Failure(_) => "No testFormat string provided".invalidNelCheck[CentaurTestFormat] } } - } private def fromString(testFormat: String, callMarker: Option[CallMarker]): Checked[CentaurTestFormat] = { def withCallMarker(name: String, constructor: CallMarker => CentaurTestFormat) = callMarker match { case Some(marker) => constructor(marker).validNelCheck - case None => s"$name needs a callMarker to know on which call to trigger the restart".invalidNelCheck[CentaurTestFormat] + case None => + s"$name needs a callMarker to know on which call to trigger the restart".invalidNelCheck[CentaurTestFormat] } - + List( WorkflowSuccessTest, WorkflowSuccessAndTimedOutputsTest, @@ -121,9 +141,10 @@ object CentaurTestFormat { WorkflowFailureRestartWithRecover, WorkflowFailureRestartWithoutRecover, PapiUpgradeTest - ).collectFirst({ - case format: WithCallMarker if format.name.equalsIgnoreCase(testFormat) => withCallMarker(format.name, format.build) + ).collectFirst { + case format: WithCallMarker if format.name.equalsIgnoreCase(testFormat) => + withCallMarker(format.name, format.build) case format if format.name.equalsIgnoreCase(testFormat) => format.validNelCheck - }).getOrElse(s"No such test format: $testFormat".invalidNelCheck[CentaurTestFormat]) + }.getOrElse(s"No such test format: $testFormat".invalidNelCheck[CentaurTestFormat]) } } diff --git a/centaur/src/main/scala/centaur/test/submit/SubmitResponse.scala b/centaur/src/main/scala/centaur/test/submit/SubmitResponse.scala index ed160482bef..faed88cc8cd 100644 --- a/centaur/src/main/scala/centaur/test/submit/SubmitResponse.scala +++ b/centaur/src/main/scala/centaur/test/submit/SubmitResponse.scala @@ -14,13 +14,11 @@ import cromwell.api.model.SubmittedWorkflow sealed trait SubmitResponse object SubmitResponse { - def apply(submittedWorkflow: SubmittedWorkflow): SubmitResponse = { + def apply(submittedWorkflow: SubmittedWorkflow): SubmitResponse = SubmitWorkflowResponse(submittedWorkflow) - } - def apply(statusCode: Int, message: String): SubmitResponse = { + def apply(statusCode: Int, message: String): SubmitResponse = SubmitHttpResponse(statusCode, message) - } } case class SubmitWorkflowResponse(submittedWorkflow: SubmittedWorkflow) extends SubmitResponse @@ -29,7 +27,7 @@ case class SubmitHttpResponse(statusCode: Int, message: String) extends SubmitRe object SubmitHttpResponse { - def fromConfig(conf: Config): ErrorOr[Option[SubmitHttpResponse]] = { + def fromConfig(conf: Config): ErrorOr[Option[SubmitHttpResponse]] = conf.get[Config]("submit") match { case Result.Failure(_) => Valid(None) case Result.Success(submitConf) => @@ -41,17 +39,13 @@ object SubmitHttpResponse { Option(_) } } - } - private def toErrorOr[A](result: Result[A]): ErrorOr[A] = { + private def toErrorOr[A](result: Result[A]): ErrorOr[A] = result match { case Result.Success(value) => Valid(value) case Result.Failure(error) => - error.messages - .toList - .toNel + error.messages.toList.toNel .getOrElse(throw new RuntimeException("Paranoia... error.messages is a Nel exposed as a Seq.")) .invalid } - } } diff --git a/centaur/src/main/scala/centaur/test/workflow/DirectoryContentCountCheck.scala b/centaur/src/main/scala/centaur/test/workflow/DirectoryContentCountCheck.scala index 2bf90619dab..4d064455f8c 100644 --- a/centaur/src/main/scala/centaur/test/workflow/DirectoryContentCountCheck.scala +++ b/centaur/src/main/scala/centaur/test/workflow/DirectoryContentCountCheck.scala @@ -2,7 +2,7 @@ package centaur.test.workflow import cats.data.Validated._ import cats.syntax.all._ -import centaur.test.{AWSFilesChecker, FilesChecker, LocalFilesChecker, PipelinesFilesChecker, BlobFilesChecker} +import centaur.test.{AWSFilesChecker, BlobFilesChecker, FilesChecker, LocalFilesChecker, PipelinesFilesChecker} import com.typesafe.config.Config import common.validation.ErrorOr.ErrorOr import configs.Result @@ -16,18 +16,23 @@ object DirectoryContentCountCheck { if (!keepGoing) { valid(None) } else { - val directoryContentCountsValidation: ErrorOr[Map[String, Int]] = conf.get[Map[String, Int]]("outputExpectations") match { - case Result.Success(a) => valid(a) - case Result.Failure(_) => invalidNel(s"Test '$name': Unable to read outputExpectations as a Map[String, Int]") - } + val directoryContentCountsValidation: ErrorOr[Map[String, Int]] = + conf.get[Map[String, Int]]("outputExpectations") match { + case Result.Success(a) => valid(a) + case Result.Failure(_) => invalidNel(s"Test '$name': Unable to read outputExpectations as a Map[String, Int]") + } val fileSystemChecker: ErrorOr[FilesChecker] = conf.get[String]("fileSystemCheck") match { case Result.Success("gcs") => valid(PipelinesFilesChecker) case Result.Success("local") => valid(LocalFilesChecker) case Result.Success("aws") => valid(AWSFilesChecker) case Result.Success("blob") => valid(BlobFilesChecker) - case Result.Success(_) => invalidNel(s"Test '$name': Invalid 'fileSystemCheck' value (must be either 'local', 'gcs', 'blob', or 'aws'") - case Result.Failure(_) => invalidNel(s"Test '$name': Must specify a 'fileSystemCheck' value (must be either 'local', 'gcs', 'blob', or 'aws'") + case Result.Success(_) => + invalidNel(s"Test '$name': Invalid 'fileSystemCheck' value (must be either 'local', 'gcs', 'blob', or 'aws'") + case Result.Failure(_) => + invalidNel( + s"Test '$name': Must specify a 'fileSystemCheck' value (must be either 'local', 'gcs', 'blob', or 'aws'" + ) } (directoryContentCountsValidation, fileSystemChecker) mapN { (d, f) => Option(DirectoryContentCountCheck(d, f)) } diff --git a/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala b/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala index dc3f0355f80..718db740dab 100644 --- a/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala +++ b/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala @@ -25,7 +25,6 @@ class SubmittedWorkflowTracker { * object require a retry. Prevents unwanted cache hits from partially successful attempts when retrying a call * caching test case. */ - def add(submittedWorkflow: SubmittedWorkflow): Unit = { + def add(submittedWorkflow: SubmittedWorkflow): Unit = submittedWorkflowIds = submittedWorkflow.id :: submittedWorkflowIds - } } diff --git a/centaur/src/main/scala/centaur/test/workflow/Workflow.scala b/centaur/src/main/scala/centaur/test/workflow/Workflow.scala index 743924bb9c6..212064acfe1 100644 --- a/centaur/src/main/scala/centaur/test/workflow/Workflow.scala +++ b/centaur/src/main/scala/centaur/test/workflow/Workflow.scala @@ -15,17 +15,18 @@ import cromwell.api.model.{WorkflowDescribeRequest, WorkflowSingleSubmission} import java.nio.file.Path import scala.concurrent.duration.FiniteDuration -final case class Workflow private(testName: String, - data: WorkflowData, - metadata: Option[WorkflowFlatMetadata], - notInMetadata: List[String], - directoryContentCounts: Option[DirectoryContentCountCheck], - backends: BackendsRequirement, - retryTestFailures: Boolean, - allowOtherOutputs: Boolean, - skipDescribeEndpointValidation: Boolean, - submittedWorkflowTracker: SubmittedWorkflowTracker, - maximumAllowedTime: Option[FiniteDuration]) { +final case class Workflow private (testName: String, + data: WorkflowData, + metadata: Option[WorkflowFlatMetadata], + notInMetadata: List[String], + directoryContentCounts: Option[DirectoryContentCountCheck], + backends: BackendsRequirement, + retryTestFailures: Boolean, + allowOtherOutputs: Boolean, + skipDescribeEndpointValidation: Boolean, + submittedWorkflowTracker: SubmittedWorkflowTracker, + maximumAllowedTime: Option[FiniteDuration] +) { def toWorkflowSubmission: WorkflowSingleSubmission = WorkflowSingleSubmission( workflowSource = data.workflowContent, @@ -36,7 +37,8 @@ final case class Workflow private(testName: String, inputsJson = data.inputs.map(_.unsafeRunSync()), options = data.options.map(_.unsafeRunSync()), labels = Option(data.labels), - zippedImports = data.zippedImports) + zippedImports = data.zippedImports + ) def toWorkflowDescribeRequest: WorkflowDescribeRequest = WorkflowDescribeRequest( workflowSource = data.workflowContent, @@ -46,22 +48,26 @@ final case class Workflow private(testName: String, inputsJson = data.inputs.map(_.unsafeRunSync()) ) - def secondRun: Workflow = { + def secondRun: Workflow = copy(data = data.copy(options = data.secondOptions)) - } - def thirdRun: Workflow = { + def thirdRun: Workflow = copy(data = data.copy(options = data.thirdOptions)) - } } object Workflow { - def fromConfig(conf: Config, configFile: File, submittedWorkflowTracker: SubmittedWorkflowTracker): ErrorOr[Workflow] = { + def fromConfig(conf: Config, + configFile: File, + submittedWorkflowTracker: SubmittedWorkflowTracker + ): ErrorOr[Workflow] = conf.get[String]("name") match { case Result.Success(n) => // If backend is provided, Centaur will only run this test if that backend is available on Cromwell - val backendsRequirement = BackendsRequirement.fromConfig(conf.get[String]("backendsMode").map(_.toLowerCase).valueOrElse("all"), conf.get[List[String]]("backends").valueOrElse(List.empty[String]).map(_.toLowerCase)) + val backendsRequirement = BackendsRequirement.fromConfig( + conf.get[String]("backendsMode").map(_.toLowerCase).valueOrElse("all"), + conf.get[List[String]]("backends").valueOrElse(List.empty[String]).map(_.toLowerCase) + ) // If basePath is provided it'll be used as basis for finding other files, otherwise use the dir the config was in val basePath = conf.get[Option[Path]]("basePath") valueOrElse None map (File(_)) getOrElse configFile val metadata: ErrorOr[Option[WorkflowFlatMetadata]] = conf.get[Config]("metadata") match { @@ -73,7 +79,8 @@ object Workflow { case Result.Failure(_) => List.empty } - val directoryContentCheckValidation: ErrorOr[Option[DirectoryContentCountCheck]] = DirectoryContentCountCheck.forConfig(n, conf) + val directoryContentCheckValidation: ErrorOr[Option[DirectoryContentCountCheck]] = + DirectoryContentCountCheck.forConfig(n, conf) val files = conf.get[Config]("files") match { case Result.Success(f) => WorkflowData.fromConfig(filesConfig = f, fullConfig = conf, basePath = basePath) case Result.Failure(_) => invalidNel(s"No 'files' block in $configFile") @@ -89,10 +96,21 @@ object Workflow { val maximumTime: Option[FiniteDuration] = conf.get[Option[FiniteDuration]]("maximumTime").value (files, directoryContentCheckValidation, metadata, retryTestFailuresErrorOr) mapN { - (f, d, m, retryTestFailures) => Workflow(n, f, m, absentMetadata, d, backendsRequirement, retryTestFailures, allowOtherOutputs, validateDescription, submittedWorkflowTracker, maximumTime) + (f, d, m, retryTestFailures) => + Workflow(n, + f, + m, + absentMetadata, + d, + backendsRequirement, + retryTestFailures, + allowOtherOutputs, + validateDescription, + submittedWorkflowTracker, + maximumTime + ) } case Result.Failure(_) => invalidNel(s"No test 'name' for: $configFile") } - } } diff --git a/centaur/src/main/scala/centaur/test/workflow/WorkflowData.scala b/centaur/src/main/scala/centaur/test/workflow/WorkflowData.scala index 72e66b31e18..2096a5f88d7 100644 --- a/centaur/src/main/scala/centaur/test/workflow/WorkflowData.scala +++ b/centaur/src/main/scala/centaur/test/workflow/WorkflowData.scala @@ -34,7 +34,8 @@ case class WorkflowData(workflowContent: Option[String], labels: List[Label], zippedImports: Option[File], secondOptions: Option[IO[String]] = None, - thirdOptions: Option[IO[String]] = None) + thirdOptions: Option[IO[String]] = None +) object WorkflowData { val blockingEC = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(5)) @@ -47,55 +48,64 @@ object WorkflowData { val workflowSourcePath = filesConfig.as[Option[String]]("workflow") (workflowSourcePath, workflowUrl) match { - case (Some(workflowPath), None) => Valid(WorkflowData( - workflowPath = Option(workflowPath), - workflowUrl = None, - filesConfig = filesConfig, - fullConfig = fullConfig, - basePath = basePath)) - case (None, Some(_)) => Valid(WorkflowData( - workflowPath = None, - workflowUrl = workflowUrl, - filesConfig = filesConfig, - fullConfig = fullConfig, - basePath = basePath)) + case (Some(workflowPath), None) => + Valid( + WorkflowData(workflowPath = Option(workflowPath), + workflowUrl = None, + filesConfig = filesConfig, + fullConfig = fullConfig, + basePath = basePath + ) + ) + case (None, Some(_)) => + Valid( + WorkflowData(workflowPath = None, + workflowUrl = workflowUrl, + filesConfig = filesConfig, + fullConfig = fullConfig, + basePath = basePath + ) + ) case (Some(_), Some(_)) => invalidNel(s"Both 'workflow' path or 'workflowUrl' can't be provided.") case (None, None) => invalidNel(s"No 'workflow' path or 'workflowUrl' provided.") } } - def apply(workflowPath: Option[String], workflowUrl: Option[String], filesConfig: Config, fullConfig: Config, basePath: File): WorkflowData = { + def apply(workflowPath: Option[String], + workflowUrl: Option[String], + filesConfig: Config, + fullConfig: Config, + basePath: File + ): WorkflowData = { def slurp(file: String): IO[String] = file match { - case http if http.startsWith("http://") || http.startsWith("https://") => + case http if http.startsWith("http://") || http.startsWith("https://") => httpClient.expect[String](http) case gcs if gcs.startsWith("gs://") => val noScheme = gcs.stripPrefix("gs://") val firstSlashPosition = noScheme.indexOf("/") val blob = BlobId.of(noScheme.substring(0, firstSlashPosition), noScheme.substring(firstSlashPosition + 1)) - IO { gcsStorage.readAllBytes(blob).map(_.toChar).mkString } + IO(gcsStorage.readAllBytes(blob).map(_.toChar).mkString) case local => - IO { basePath./(local).contentAsString } + IO(basePath./(local).contentAsString) } - - def getOptionalFileContent(name: String): Option[IO[String]] = { + + def getOptionalFileContent(name: String): Option[IO[String]] = filesConfig.getAs[String](name).map(slurp) - } def getImports = filesConfig.get[List[String]]("imports") match { case Success(paths) => zipImports(paths map basePath./) case Failure(_) => None } - def getImportsDirName(workflowPath: Option[File], workflowUrl: Option[String]): String = { + def getImportsDirName(workflowPath: Option[File], workflowUrl: Option[String]): String = workflowPath match { case Some(file) => file.name.replaceAll("\\.[^.]*$", "") case None => // workflow url is defined val fileName = workflowUrl.get.split("/").last fileName.replaceAll("\\.[^.]*$", "") } - } - def zipImports(imports: List[File]): Option[File] = { + def zipImports(imports: List[File]): Option[File] = imports match { case Nil => None case _ => @@ -109,7 +119,6 @@ object WorkflowData { Option(importsDir.zip()) } - } def getLabels: List[Label] = { import cromwell.api.model.LabelsJsonFormatter._ diff --git a/centaur/src/test/scala/centaur/api/DaemonizedDefaultThreadFactorySpec.scala b/centaur/src/test/scala/centaur/api/DaemonizedDefaultThreadFactorySpec.scala index afd3918d738..7069cee9c40 100644 --- a/centaur/src/test/scala/centaur/api/DaemonizedDefaultThreadFactorySpec.scala +++ b/centaur/src/test/scala/centaur/api/DaemonizedDefaultThreadFactorySpec.scala @@ -9,7 +9,7 @@ class DaemonizedDefaultThreadFactorySpec extends AnyFlatSpec with CromwellTimeou behavior of "DaemonizedDefaultThreadFactory" it should "create a non-blocking execution context" in { - val thread = DaemonizedDefaultThreadFactory.newThread(() => {}) + val thread = DaemonizedDefaultThreadFactory.newThread { () => } thread.getName should startWith("daemonpool-thread-") thread.isDaemon should be(true) } diff --git a/centaur/src/test/scala/centaur/json/JsonUtilsSpec.scala b/centaur/src/test/scala/centaur/json/JsonUtilsSpec.scala index 1f89477d388..91fe7123ed8 100644 --- a/centaur/src/test/scala/centaur/json/JsonUtilsSpec.scala +++ b/centaur/src/test/scala/centaur/json/JsonUtilsSpec.scala @@ -111,7 +111,7 @@ class JsonUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "calls.wf_hello.hello.outputs.salutation" -> "Hello Mr. Bean!", "calls.wf_hello.hello.runtimeAttributes.bootDiskSizeGb" -> "10", "calls.wf_hello.hello.runtimeAttributes.continueOnReturnCode" -> "0", - "calls.wf_hello.hello.runtimeAttributes.maxRetries" -> "0", + "calls.wf_hello.hello.runtimeAttributes.maxRetries" -> "0" ).map(x => (x._1, JsString(x._2))) val actualFlattenedMetadata: Map[String, JsValue] = metadata.parseJson.asJsObject.flatten().fields @@ -169,7 +169,7 @@ class JsonUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "calls.wf_hello.task1.executionEvents.0.description" -> "task 1 step 1", "calls.wf_hello.task2.executionEvents.0.description" -> "task 2 step 1", "calls.wf_hello.task1.runtimeAttributes.bootDiskSizeGb" -> "10", - "calls.wf_hello.task2.runtimeAttributes.bootDiskSizeGb" -> "10", + "calls.wf_hello.task2.runtimeAttributes.bootDiskSizeGb" -> "10" ).map(x => (x._1, JsString(x._2))) val actualFlattenedMetadata: Map[String, JsValue] = metadata.parseJson.asJsObject.flatten().fields @@ -249,7 +249,7 @@ class JsonUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "wf_hello.hello.0.2.shardIndex" -> 0, "wf_hello.hello.1.shardIndex" -> 1, "wf_hello.hello.1.1.shardIndex" -> 1, - "wf_hello.hello.1.2.shardIndex" -> 1, + "wf_hello.hello.1.2.shardIndex" -> 1 ).map(x => (x._1, JsNumber(x._2))) ++ Map( "id" -> "5abfaa90-570f-48d4-a35b-81d5ad4ea0f7", "status" -> "Succeeded", @@ -265,7 +265,7 @@ class JsonUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "wf_hello.hello.0.2.runtimeAttributes.memory" -> "1.1 GB", "wf_hello.hello.1.runtimeAttributes.memory" -> "1.1 GB", "wf_hello.hello.1.1.runtimeAttributes.memory" -> "1 GB", - "wf_hello.hello.1.2.runtimeAttributes.memory" -> "1.1 GB", + "wf_hello.hello.1.2.runtimeAttributes.memory" -> "1.1 GB" ).map(x => (x._1, JsString(x._2))) val actualFlattenedMetadata: Map[String, JsValue] = metadata.parseJson.asJsObject.flatten().fields diff --git a/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala b/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala index 9d09827027a..28a74274192 100644 --- a/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala +++ b/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala @@ -15,23 +15,29 @@ import scala.concurrent.duration._ class CentaurOperationsSpec extends AnyFlatSpec with Matchers { behavior of "validateMetadataJson" - val placeholderSubmittedWorkflow: SubmittedWorkflow = SubmittedWorkflow(id = WorkflowId(UUID.randomUUID()), null, null) - val placeholderWorkflow: Workflow = Workflow(testName = "", null, null, null, null, null, false, false, false, null, null) + val placeholderSubmittedWorkflow: SubmittedWorkflow = + SubmittedWorkflow(id = WorkflowId(UUID.randomUUID()), null, null) + val placeholderWorkflow: Workflow = + Workflow(testName = "", null, null, null, null, null, false, false, false, null, null) val allowableOneWordAdditions = List("farmer") def runTest(json1: String, json2: String, expectMatching: Boolean): Unit = { - val validation = Operations.validateMetadataJson("", - json1.parseJson.asJsObject, - json2.parseJson.asJsObject, - placeholderSubmittedWorkflow, - placeholderWorkflow, - allowableAddedOneWordFields = allowableOneWordAdditions).unsafeToFuture() + val validation = Operations + .validateMetadataJson( + "", + json1.parseJson.asJsObject, + json2.parseJson.asJsObject, + placeholderSubmittedWorkflow, + placeholderWorkflow, + allowableAddedOneWordFields = allowableOneWordAdditions + ) + .unsafeToFuture() Await.ready(validation, atMost = 10.seconds) validation.value.get match { case Success(()) if expectMatching => // great case Success(_) if !expectMatching => fail("Metadata unexpectedly matches") - case Failure(e) if expectMatching => fail("Metadata unexpectedly mismatches", e) + case Failure(e) if expectMatching => fail("Metadata unexpectedly mismatches", e) case Failure(_) if !expectMatching => // great case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } diff --git a/centaur/src/test/scala/centaur/test/metadata/CallAttemptFailureSpec.scala b/centaur/src/test/scala/centaur/test/metadata/CallAttemptFailureSpec.scala index 2476bb4b231..0be16d9a57a 100644 --- a/centaur/src/test/scala/centaur/test/metadata/CallAttemptFailureSpec.scala +++ b/centaur/src/test/scala/centaur/test/metadata/CallAttemptFailureSpec.scala @@ -6,7 +6,6 @@ import io.circe.ParsingFailure import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class CallAttemptFailureSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "CallAttemptFailure" diff --git a/centaur/src/test/scala/centaur/test/metadata/ExtractJobManagerStyleMetadataFieldsSpec.scala b/centaur/src/test/scala/centaur/test/metadata/ExtractJobManagerStyleMetadataFieldsSpec.scala index 2b9cb409352..f4ce7300385 100644 --- a/centaur/src/test/scala/centaur/test/metadata/ExtractJobManagerStyleMetadataFieldsSpec.scala +++ b/centaur/src/test/scala/centaur/test/metadata/ExtractJobManagerStyleMetadataFieldsSpec.scala @@ -6,7 +6,6 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import spray.json._ - class ExtractJobManagerStyleMetadataFieldsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "extracting Job Manager style metadata fields" @@ -45,6 +44,8 @@ class ExtractJobManagerStyleMetadataFieldsSpec extends AnyFlatSpec with Cromwell | } |}""".stripMargin - Operations.extractJmStyleMetadataFields(originalMetadata.parseJson.asJsObject) should be(expectedExpectedMetadata.parseJson.asJsObject) + Operations.extractJmStyleMetadataFields(originalMetadata.parseJson.asJsObject) should be( + expectedExpectedMetadata.parseJson.asJsObject + ) } } diff --git a/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala b/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala index f389192aa7f..0f9984d882e 100644 --- a/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala +++ b/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala @@ -11,7 +11,6 @@ import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.{ListObjectsRequest, ListObjectsResponse, S3Object} import org.scalatest.flatspec.AnyFlatSpec - class FileCheckerSpec extends AnyFlatSpec with CromwellTimeoutSpec with MockSugar { import centaur.test.ObjectCounterInstances._ @@ -26,24 +25,29 @@ class FileCheckerSpec extends AnyFlatSpec with CromwellTimeoutSpec with MockSuga private val wrongBucketPrefix = "s3Bucket://my-not-so-cool-bucket/somelogs/empty" private val EmptyTestPath = "" private val testGsPath = "gs://my-cool-bucket/path/to/file" - private val objResponse = ListObjectsResponse.builder() - .contents(util.Arrays.asList(S3Object.builder() - .build())) + private val objResponse = ListObjectsResponse + .builder() + .contents( + util.Arrays.asList( + S3Object + .builder() + .build() + ) + ) .build() private val objRequest = ListObjectsRequest.builder().bucket(bucketName).prefix(dirName).build() private val awsS3Path = awsS3ObjectCounter.parsePath(s3PrefixRegex)(testPath) private val gsPath = gcsObjectCounter.parsePath(gsPrefixRegex)(testGsPath) - "parsePath" should "return a bucket and directories" in { assert(awsS3Path.bucket == bucketName) assert(awsS3Path.directory == dirName) } "parsePath" should "throw Exception for wrong path" in { - assertThrows[centaur.test.IllegalPathException] {awsS3ObjectCounter.parsePath(s3PrefixRegex)(wrongBucketPrefix)} - assertThrows[centaur.test.IllegalPathException] {awsS3ObjectCounter.parsePath(s3PrefixRegex)(testGsPath)} - assertThrows[centaur.test.IllegalPathException] {awsS3ObjectCounter.parsePath(s3PrefixRegex)(EmptyTestPath)} + assertThrows[centaur.test.IllegalPathException](awsS3ObjectCounter.parsePath(s3PrefixRegex)(wrongBucketPrefix)) + assertThrows[centaur.test.IllegalPathException](awsS3ObjectCounter.parsePath(s3PrefixRegex)(testGsPath)) + assertThrows[centaur.test.IllegalPathException](awsS3ObjectCounter.parsePath(s3PrefixRegex)(EmptyTestPath)) } "countObjectAtPath" should "should return 1 if the file exist" in { diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileProvider.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileProvider.scala index 93f4cf77d34..2d3cba53bfc 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileProvider.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileProvider.scala @@ -11,11 +11,10 @@ import common.exception._ import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.http.HttpStatus +class DrsCloudNioFileProvider(drsPathResolver: EngineDrsPathResolver, drsReadInterpreter: DrsReadInterpreter) + extends CloudNioFileProvider { -class DrsCloudNioFileProvider(drsPathResolver: EngineDrsPathResolver, - drsReadInterpreter: DrsReadInterpreter) extends CloudNioFileProvider { - - private def checkIfPathExistsThroughDrsResolver(drsPath: String): IO[Boolean] = { + private def checkIfPathExistsThroughDrsResolver(drsPath: String): IO[Boolean] = /* * Unlike other cloud providers where directories are identified with a trailing slash at the end like `gs://bucket/dir/`, * DRS has a concept of bundles for directories (not supported yet). Hence for method `checkDirectoryExists` which appends a trailing '/' @@ -23,12 +22,14 @@ class DrsCloudNioFileProvider(drsPathResolver: EngineDrsPathResolver, */ if (drsPath.endsWith("/")) IO(false) else { - drsPathResolver.rawDrsResolverResponse(drsPath, NonEmptyList.one(DrsResolverField.GsUri)).use { drsResolverResponse => - val errorMsg = s"Status line was null for DRS Resolver response $drsResolverResponse." - toIO(Option(drsResolverResponse.getStatusLine), errorMsg) - }.map(_.getStatusCode == HttpStatus.SC_OK) + drsPathResolver + .rawDrsResolverResponse(drsPath, NonEmptyList.one(DrsResolverField.GsUri)) + .use { drsResolverResponse => + val errorMsg = s"Status line was null for DRS Resolver response $drsResolverResponse." + toIO(Option(drsResolverResponse.getStatusLine), errorMsg) + } + .map(_.getStatusCode == HttpStatus.SC_OK) } - } override def existsPath(drsPath: String, unused: String): Boolean = checkIfPathExistsThroughDrsResolver(drsPath).unsafeRunSync() @@ -36,34 +37,46 @@ class DrsCloudNioFileProvider(drsPathResolver: EngineDrsPathResolver, override def existsPaths(cloudHost: String, cloudPathPrefix: String): Boolean = existsPath(cloudHost, cloudPathPrefix) - override def listObjects(drsPath: String, unused: String, markerOption: Option[String]): CloudNioFileList = { + override def listObjects(drsPath: String, unused: String, markerOption: Option[String]): CloudNioFileList = throw new UnsupportedOperationException("DRS currently doesn't support list.") - } - override def copy(sourceCloudHost: String, sourceCloudPath: String, targetCloudHost: String, targetCloudPath: String): Unit = + override def copy(sourceCloudHost: String, + sourceCloudPath: String, + targetCloudHost: String, + targetCloudPath: String + ): Unit = throw new UnsupportedOperationException("DRS currently doesn't support copy.") override def deleteIfExists(cloudHost: String, cloudPath: String): Boolean = throw new UnsupportedOperationException("DRS currently doesn't support delete.") override def read(drsPath: String, unused: String, offset: Long): ReadableByteChannel = { - val fields = NonEmptyList.of(DrsResolverField.GsUri, DrsResolverField.GoogleServiceAccount, DrsResolverField.AccessUrl) + val fields = + NonEmptyList.of(DrsResolverField.GsUri, DrsResolverField.GoogleServiceAccount, DrsResolverField.AccessUrl) val byteChannelIO = for { drsResolverResponse <- drsPathResolver.resolveDrs(drsPath, fields) byteChannel <- drsReadInterpreter(drsPathResolver, drsResolverResponse) } yield byteChannel - byteChannelIO.handleErrorWith { - e => IO.raiseError(new RuntimeException(s"Error while reading from DRS path: $drsPath. Error: ${ExceptionUtils.getMessage(e)}")) - }.unsafeRunSync() + byteChannelIO + .handleErrorWith { e => + IO.raiseError( + new RuntimeException(s"Error while reading from DRS path: $drsPath. Error: ${ExceptionUtils.getMessage(e)}") + ) + } + .unsafeRunSync() } override def write(cloudHost: String, cloudPath: String): WritableByteChannel = throw new UnsupportedOperationException("DRS currently doesn't support write.") override def fileAttributes(drsPath: String, unused: String): Option[CloudNioRegularFileAttributes] = { - val fields = NonEmptyList.of(DrsResolverField.Size, DrsResolverField.TimeCreated, DrsResolverField.TimeUpdated, DrsResolverField.Hashes) + val fields = NonEmptyList.of(DrsResolverField.Size, + DrsResolverField.TimeCreated, + DrsResolverField.TimeUpdated, + DrsResolverField.Hashes + ) val fileAttributesIO = for { drsResolverResponse <- drsPathResolver.resolveDrs(drsPath, fields) diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProvider.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProvider.scala index 884072e4a31..ff614646b13 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProvider.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProvider.scala @@ -8,10 +8,11 @@ import com.typesafe.config.Config class DrsCloudNioFileSystemProvider(rootConfig: Config, val drsCredentials: DrsCredentials, - drsReadInterpreter: DrsReadInterpreter, - ) extends CloudNioFileSystemProvider { + drsReadInterpreter: DrsReadInterpreter +) extends CloudNioFileSystemProvider { - lazy val drsResolverConfig = if (rootConfig.hasPath("resolver")) rootConfig.getConfig("resolver") else rootConfig.getConfig("martha") + lazy val drsResolverConfig = + if (rootConfig.hasPath("resolver")) rootConfig.getConfig("resolver") else rootConfig.getConfig("martha") lazy val drsConfig: DrsConfig = DrsConfig.fromConfig(drsResolverConfig) lazy val drsPathResolver: EngineDrsPathResolver = diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioRegularFileAttributes.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioRegularFileAttributes.scala index 778d9b8380e..b09606294c6 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioRegularFileAttributes.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCloudNioRegularFileAttributes.scala @@ -11,8 +11,8 @@ class DrsCloudNioRegularFileAttributes(drsPath: String, sizeOption: Option[Long], hashOption: Option[FileHash], timeCreatedOption: Option[FileTime], - timeUpdatedOption: Option[FileTime], - ) extends CloudNioRegularFileAttributes{ + timeUpdatedOption: Option[FileTime] +) extends CloudNioRegularFileAttributes { override def fileKey(): String = drsPath @@ -33,50 +33,47 @@ object DrsCloudNioRegularFileAttributes { ("etag", HashType.S3Etag) ) - def getPreferredHash(hashesOption: Option[Map[String, String]]): Option[FileHash] = { + def getPreferredHash(hashesOption: Option[Map[String, String]]): Option[FileHash] = hashesOption match { case Some(hashes: Map[String, String]) if hashes.nonEmpty => priorityHashList collectFirst { case (key, hashType) if hashes.contains(key) => FileHash(hashType, hashes(key)) } - // if no preferred hash was found, go ahead and return none because we don't support anything that the DRS object is offering + // if no preferred hash was found, go ahead and return none because we don't support anything that the DRS object is offering case _ => None } - } - private def convertToOffsetDateTime(timeInString: String): IO[OffsetDateTime] = { + private def convertToOffsetDateTime(timeInString: String): IO[OffsetDateTime] = // Here timeInString is assumed to be a ISO-8601 DateTime with timezone IO(OffsetDateTime.parse(timeInString)) - .handleErrorWith( - offsetDateTimeException => - // As a fallback timeInString is assumed to be a ISO-8601 DateTime without timezone - IO(LocalDateTime.parse(timeInString).atOffset(ZoneOffset.UTC)) - .handleErrorWith(_ => IO.raiseError(offsetDateTimeException)) + .handleErrorWith(offsetDateTimeException => + // As a fallback timeInString is assumed to be a ISO-8601 DateTime without timezone + IO(LocalDateTime.parse(timeInString).atOffset(ZoneOffset.UTC)) + .handleErrorWith(_ => IO.raiseError(offsetDateTimeException)) ) - } - private def convertToFileTime(timeInString: String): IO[FileTime] = { + private def convertToFileTime(timeInString: String): IO[FileTime] = convertToOffsetDateTime(timeInString) .map(_.toInstant) .map(FileTime.from) - } - def convertToFileTime(drsPath: String, key: DrsResolverField.Value, timeInStringOption: Option[String]): IO[Option[FileTime]] = { + def convertToFileTime(drsPath: String, + key: DrsResolverField.Value, + timeInStringOption: Option[String] + ): IO[Option[FileTime]] = timeInStringOption match { case None => IO.pure(None) case Some(timeInString) => convertToFileTime(timeInString) .map(Option(_)) - .handleErrorWith( - throwable => - IO.raiseError( - new RuntimeException( - s"Error while parsing '$key' value from DRS Resolver to FileTime for DRS path $drsPath. " + - s"Reason: ${ExceptionUtils.getMessage(throwable)}.", - throwable, - ) + .handleErrorWith(throwable => + IO.raiseError( + new RuntimeException( + s"Error while parsing '$key' value from DRS Resolver to FileTime for DRS path $drsPath. " + + s"Reason: ${ExceptionUtils.getMessage(throwable)}.", + throwable ) + ) ) } - } } diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsConfig.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsConfig.scala index a2b0a385680..28b1ee57b7a 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsConfig.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsConfig.scala @@ -11,8 +11,8 @@ final case class DrsConfig(drsResolverUrl: String, waitInitial: FiniteDuration, waitMaximum: FiniteDuration, waitMultiplier: Double, - waitRandomizationFactor: Double, - ) + waitRandomizationFactor: Double +) object DrsConfig { // If you update these values also update Filesystems.md! @@ -29,20 +29,17 @@ object DrsConfig { private val EnvDrsResolverWaitMultiplier = "DRS_RESOLVER_WAIT_MULTIPLIER" private val EnvDrsResolverWaitRandomizationFactor = "DRS_RESOLVER_WAIT_RANDOMIZATION_FACTOR" - - def fromConfig(drsResolverConfig: Config): DrsConfig = { + def fromConfig(drsResolverConfig: Config): DrsConfig = DrsConfig( drsResolverUrl = drsResolverConfig.getString("url"), numRetries = drsResolverConfig.getOrElse("num-retries", DefaultNumRetries), waitInitial = drsResolverConfig.getOrElse("wait-initial", DefaultWaitInitial), waitMaximum = drsResolverConfig.getOrElse("wait-maximum", DefaultWaitMaximum), waitMultiplier = drsResolverConfig.getOrElse("wait-multiplier", DefaultWaitMultiplier), - waitRandomizationFactor = - drsResolverConfig.getOrElse("wait-randomization-factor", DefaultWaitRandomizationFactor), + waitRandomizationFactor = drsResolverConfig.getOrElse("wait-randomization-factor", DefaultWaitRandomizationFactor) ) - } - def fromEnv(env: Map[String, String]): DrsConfig = { + def fromEnv(env: Map[String, String]): DrsConfig = DrsConfig( drsResolverUrl = env(EnvDrsResolverUrl), numRetries = env.get(EnvDrsResolverNumRetries).map(_.toInt).getOrElse(DefaultNumRetries), @@ -50,18 +47,16 @@ object DrsConfig { waitMaximum = env.get(EnvDrsResolverWaitMaximumSeconds).map(_.toLong.seconds).getOrElse(DefaultWaitMaximum), waitMultiplier = env.get(EnvDrsResolverWaitMultiplier).map(_.toDouble).getOrElse(DefaultWaitMultiplier), waitRandomizationFactor = - env.get(EnvDrsResolverWaitRandomizationFactor).map(_.toDouble).getOrElse(DefaultWaitRandomizationFactor), + env.get(EnvDrsResolverWaitRandomizationFactor).map(_.toDouble).getOrElse(DefaultWaitRandomizationFactor) ) - } - def toEnv(drsConfig: DrsConfig): Map[String, String] = { + def toEnv(drsConfig: DrsConfig): Map[String, String] = Map( EnvDrsResolverUrl -> drsConfig.drsResolverUrl, EnvDrsResolverNumRetries -> s"${drsConfig.numRetries}", EnvDrsResolverWaitInitialSeconds -> s"${drsConfig.waitInitial.toSeconds}", EnvDrsResolverWaitMaximumSeconds -> s"${drsConfig.waitMaximum.toSeconds}", EnvDrsResolverWaitMultiplier -> s"${drsConfig.waitMultiplier}", - EnvDrsResolverWaitRandomizationFactor -> s"${drsConfig.waitRandomizationFactor}", + EnvDrsResolverWaitRandomizationFactor -> s"${drsConfig.waitRandomizationFactor}" ) - } } diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCredentials.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCredentials.scala index 2d3e972508a..78b2e9f3503 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCredentials.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsCredentials.scala @@ -27,11 +27,10 @@ trait DrsCredentials { * is designed for use within the Cromwell engine. */ case class GoogleOauthDrsCredentials(credentials: OAuth2Credentials, acceptableTTL: Duration) extends DrsCredentials { - //Based on method from GoogleRegistry + // Based on method from GoogleRegistry def getAccessToken: ErrorOr[String] = { - def accessTokenTTLIsAcceptable(accessToken: AccessToken): Boolean = { + def accessTokenTTLIsAcceptable(accessToken: AccessToken): Boolean = (accessToken.getExpirationTime.getTime - System.currentTimeMillis()).millis.gteq(acceptableTTL) - } Option(credentials.getAccessToken) match { case Some(accessToken) if accessTokenTTLIsAcceptable(accessToken) => @@ -51,26 +50,25 @@ object GoogleOauthDrsCredentials { GoogleOauthDrsCredentials(credentials, config.as[FiniteDuration]("access-token-acceptable-ttl")) } - /** * Strategy for obtaining an access token from Google Application Default credentials that are assumed to already exist * in the environment. This class is designed for use by standalone executables running in environments * that have direct access to a Google identity (ex. CromwellDrsLocalizer). */ case object GoogleAppDefaultTokenStrategy extends DrsCredentials { - private final val UserInfoEmailScope = "https://www.googleapis.com/auth/userinfo.email" - private final val UserInfoProfileScope = "https://www.googleapis.com/auth/userinfo.profile" + final private val UserInfoEmailScope = "https://www.googleapis.com/auth/userinfo.email" + final private val UserInfoProfileScope = "https://www.googleapis.com/auth/userinfo.profile" - def getAccessToken: ErrorOr[String] = { + def getAccessToken: ErrorOr[String] = Try { - val scopedCredentials = GoogleCredentials.getApplicationDefault().createScoped(UserInfoEmailScope, UserInfoProfileScope) + val scopedCredentials = + GoogleCredentials.getApplicationDefault().createScoped(UserInfoEmailScope, UserInfoProfileScope) scopedCredentials.refreshAccessToken().getTokenValue } match { case Success(null) => "null token value attempting to refresh access token".invalidNel case Success(value) => value.validNel case Failure(e) => s"Failed to refresh access token: ${e.getMessage}".invalidNel } - } } /** @@ -96,9 +94,11 @@ case class AzureDrsCredentials(identityClientId: Option[String]) extends DrsCred .authorityHost(azureProfile.getEnvironment.getActiveDirectoryEndpoint) def getAccessToken: ErrorOr[String] = { - val credentials = identityClientId.foldLeft(defaultCredentialBuilder) { - (builder, clientId) => builder.managedIdentityClientId(clientId) - }.build() + val credentials = identityClientId + .foldLeft(defaultCredentialBuilder) { (builder, clientId) => + builder.managedIdentityClientId(clientId) + } + .build() Try( credentials diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsPathResolver.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsPathResolver.scala index 22d86c31726..f8d898a0bcf 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsPathResolver.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsPathResolver.scala @@ -6,7 +6,7 @@ import cats.effect.{IO, Resource} import cats.implicits._ import cloud.nio.impl.drs.DrsPathResolver.{FatalRetryDisposition, RegularRetryDisposition} import cloud.nio.impl.drs.DrsResolverResponseSupport._ -import common.exception.{AggregatedMessageException, toIO} +import common.exception.{toIO, AggregatedMessageException} import common.validation.ErrorOr.ErrorOr import io.circe._ import io.circe.generic.semiauto._ @@ -40,22 +40,27 @@ abstract class DrsPathResolver(drsConfig: DrsConfig) { def getAccessToken: ErrorOr[String] - private def makeHttpRequestToDrsResolver(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): Resource[IO, HttpPost] = { + private def makeHttpRequestToDrsResolver(drsPath: String, + fields: NonEmptyList[DrsResolverField.Value] + ): Resource[IO, HttpPost] = { val io = getAccessToken match { - case Valid(token) => IO { - val postRequest = new HttpPost(drsConfig.drsResolverUrl) - val requestJson = DrsResolverRequest(drsPath, fields).asJson.noSpaces - postRequest.setEntity(new StringEntity(requestJson, ContentType.APPLICATION_JSON)) - postRequest.setHeader("Authorization", s"Bearer $token") - postRequest - } + case Valid(token) => + IO { + val postRequest = new HttpPost(drsConfig.drsResolverUrl) + val requestJson = DrsResolverRequest(drsPath, fields).asJson.noSpaces + postRequest.setEntity(new StringEntity(requestJson, ContentType.APPLICATION_JSON)) + postRequest.setHeader("Authorization", s"Bearer $token") + postRequest + } case Invalid(errors) => IO.raiseError(AggregatedMessageException("Error getting access token", errors.toList)) } Resource.eval(io) } - private def httpResponseToDrsResolverResponse(drsPathForDebugging: String)(httpResponse: HttpResponse): IO[DrsResolverResponse] = { + private def httpResponseToDrsResolverResponse( + drsPathForDebugging: String + )(httpResponse: HttpResponse): IO[DrsResolverResponse] = { val responseStatusLine = httpResponse.getStatusLine val status = responseStatusLine.getStatusCode @@ -73,45 +78,53 @@ abstract class DrsPathResolver(drsConfig: DrsConfig) { IO.raiseError(new RuntimeException(retryMessage) with RegularRetryDisposition) case _ => val drsResolverResponseEntityOption = Option(httpResponse.getEntity).map(EntityUtils.toString) - val exceptionMsg = errorMessageFromResponse(drsPathForDebugging, drsResolverResponseEntityOption, responseStatusLine, drsConfig.drsResolverUrl) - val responseEntityOption = (responseStatusLine.getStatusCode == HttpStatus.SC_OK).valueOrZero(drsResolverResponseEntityOption) + val exceptionMsg = errorMessageFromResponse(drsPathForDebugging, + drsResolverResponseEntityOption, + responseStatusLine, + drsConfig.drsResolverUrl + ) + val responseEntityOption = + (responseStatusLine.getStatusCode == HttpStatus.SC_OK).valueOrZero(drsResolverResponseEntityOption) val responseContentIO = toIO(responseEntityOption, exceptionMsg) - responseContentIO.flatMap { responseContent => - IO.fromEither(decode[DrsResolverResponse](responseContent)) - }.handleErrorWith { - e => IO.raiseError(new RuntimeException(s"Unexpected response during DRS resolution: ${ExceptionUtils.getMessage(e)}")) - } + responseContentIO + .flatMap { responseContent => + IO.fromEither(decode[DrsResolverResponse](responseContent)) + } + .handleErrorWith { e => + IO.raiseError( + new RuntimeException(s"Unexpected response during DRS resolution: ${ExceptionUtils.getMessage(e)}") + ) + } } } - private def executeDrsResolverRequest(httpPost: HttpPost): Resource[IO, HttpResponse]= { + private def executeDrsResolverRequest(httpPost: HttpPost): Resource[IO, HttpResponse] = for { httpClient <- Resource.fromAutoCloseable(IO(httpClientBuilder.build())) httpResponse <- Resource.fromAutoCloseable(IO(httpClient.execute(httpPost))) } yield httpResponse - } - def rawDrsResolverResponse(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): Resource[IO, HttpResponse] = { + def rawDrsResolverResponse(drsPath: String, + fields: NonEmptyList[DrsResolverField.Value] + ): Resource[IO, HttpResponse] = for { httpPost <- makeHttpRequestToDrsResolver(drsPath, fields) response <- executeDrsResolverRequest(httpPost) } yield response - } /** * * Resolves the DRS path through DRS Resolver url provided in the config. * Please note, this method returns an IO that would make a synchronous HTTP request to DRS Resolver when run. */ - def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = { + def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = rawDrsResolverResponse(drsPath, fields).use(httpResponseToDrsResolverResponse(drsPathForDebugging = drsPath)) - } - def openChannel(accessUrl: AccessUrl): IO[ReadableByteChannel] = { + def openChannel(accessUrl: AccessUrl): IO[ReadableByteChannel] = IO { val httpGet = new HttpGet(accessUrl.url) - accessUrl.headers.getOrElse(Map.empty).toList foreach { - case (name, value) => httpGet.addHeader(name, value) + accessUrl.headers.getOrElse(Map.empty).toList foreach { case (name, value) => + httpGet.addHeader(name, value) } val client = httpClientBuilder.build() val response = client.execute(httpGet) @@ -130,7 +143,7 @@ abstract class DrsPathResolver(drsConfig: DrsConfig) { override def isOpen: Boolean = inner.isOpen - //noinspection ScalaUnusedExpression + // noinspection ScalaUnusedExpression override def close(): Unit = { val innerTry = Try(inner.close()) val responseTry = Try(response.close()) @@ -141,7 +154,6 @@ abstract class DrsPathResolver(drsConfig: DrsConfig) { } } } - } } object DrsPathResolver { @@ -198,14 +210,15 @@ final case class DrsResolverResponse(size: Option[Long] = None, hashes: Option[Map[String, String]] = None, accessUrl: Option[AccessUrl] = None, localizationPath: Option[String] = None - ) +) final case class DrsResolverFailureResponse(response: DrsResolverFailureResponsePayload) final case class DrsResolverFailureResponsePayload(text: String) object DrsResolverResponseSupport { - implicit lazy val drsResolverFieldEncoder: Encoder[DrsResolverField.Value] = Encoder.encodeEnumeration(DrsResolverField) + implicit lazy val drsResolverFieldEncoder: Encoder[DrsResolverField.Value] = + Encoder.encodeEnumeration(DrsResolverField) implicit lazy val drsResolverRequestEncoder: Encoder[DrsResolverRequest] = deriveEncoder implicit lazy val saDataObjectDecoder: Decoder[SADataObject] = deriveDecoder @@ -219,12 +232,17 @@ object DrsResolverResponseSupport { private val GcsScheme = "gs://" def getGcsBucketAndName(gcsUrl: String): (String, String) = { - val array = gcsUrl.substring(GcsScheme.length).split("/", 2) - (array(0), array(1)) + val array = gcsUrl.substring(GcsScheme.length).split("/", 2) + (array(0), array(1)) } - def errorMessageFromResponse(drsPathForDebugging: String, drsResolverResponseEntityOption: Option[String], responseStatusLine: StatusLine, drsResolverUri: String): String = { - val baseMessage = s"Could not access object \'$drsPathForDebugging\'. Status: ${responseStatusLine.getStatusCode}, reason: \'${responseStatusLine.getReasonPhrase}\', DRS Resolver location: \'$drsResolverUri\', message: " + def errorMessageFromResponse(drsPathForDebugging: String, + drsResolverResponseEntityOption: Option[String], + responseStatusLine: StatusLine, + drsResolverUri: String + ): String = { + val baseMessage = + s"Could not access object \'$drsPathForDebugging\'. Status: ${responseStatusLine.getStatusCode}, reason: \'${responseStatusLine.getReasonPhrase}\', DRS Resolver location: \'$drsResolverUri\', message: " drsResolverResponseEntityOption match { case Some(entity) => diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategy.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategy.scala index 39020f5a684..e39e7b49c81 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategy.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategy.scala @@ -8,7 +8,8 @@ import org.apache.http.client.{HttpRequestRetryHandler, ServiceUnavailableRetryS import org.apache.http.protocol.HttpContext class DrsResolverHttpRequestRetryStrategy(drsConfig: DrsConfig) - extends ServiceUnavailableRetryStrategy with HttpRequestRetryHandler { + extends ServiceUnavailableRetryStrategy + with HttpRequestRetryHandler { // We can execute a total of one time, plus the number of retries private val executionMax: Int = drsConfig.numRetries + 1 @@ -17,23 +18,21 @@ class DrsResolverHttpRequestRetryStrategy(drsConfig: DrsConfig) initialInterval = drsConfig.waitInitial, maxInterval = drsConfig.waitMaximum, multiplier = drsConfig.waitMultiplier, - randomizationFactor = drsConfig.waitRandomizationFactor, + randomizationFactor = drsConfig.waitRandomizationFactor ) private var transientFailures: Int = 0 /** Returns true if an IOException should be immediately retried. */ - override def retryRequest(exception: IOException, executionCount: Int, context: HttpContext): Boolean = { + override def retryRequest(exception: IOException, executionCount: Int, context: HttpContext): Boolean = retryRequest(executionCount) - } /** Returns true if HttpResponse should be retried after getRetryInterval. */ - override def retryRequest(response: HttpResponse, executionCount: Int, context: HttpContext): Boolean = { + override def retryRequest(response: HttpResponse, executionCount: Int, context: HttpContext): Boolean = response.getStatusLine.getStatusCode match { case code if code == 408 || code == 429 => retryRequestTransient(executionCount) case code if 500 <= code && code <= 599 => retryRequest(executionCount) case _ => false } - } /** Returns the number of milliseconds to wait before retrying an HttpResponse. */ override def getRetryInterval: Long = { @@ -47,8 +46,7 @@ class DrsResolverHttpRequestRetryStrategy(drsConfig: DrsConfig) retryRequest(executionCount) } - private def retryRequest(executionCount: Int): Boolean = { + private def retryRequest(executionCount: Int): Boolean = // The first execution is executionCount == 1 executionCount - transientFailures <= executionMax - } } diff --git a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/EngineDrsPathResolver.scala b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/EngineDrsPathResolver.scala index 01f7a488eb3..b53ba2cc7d8 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/EngineDrsPathResolver.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/main/scala/cloud/nio/impl/drs/EngineDrsPathResolver.scala @@ -2,10 +2,8 @@ package cloud.nio.impl.drs import common.validation.ErrorOr.ErrorOr -case class EngineDrsPathResolver(drsConfig: DrsConfig, - drsCredentials: DrsCredentials, - ) - extends DrsPathResolver(drsConfig) { +case class EngineDrsPathResolver(drsConfig: DrsConfig, drsCredentials: DrsCredentials) + extends DrsPathResolver(drsConfig) { override def getAccessToken: ErrorOr[String] = drsCredentials.getAccessToken } diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala index f61c608bd71..5edb8fd6a8b 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala @@ -52,11 +52,10 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp "drs://dg.4DFC:0027045b-9ed6-45af-a68e-f55037b5184c", "drs://dg.4503:dg.4503/fc046e84-6cf9-43a3-99cc-ffa2964b88cb", "drs://dg.ANV0:dg.ANV0/0db6577e-57bd-48a1-93c6-327c292bcb6b", - "drs://dg.F82A1A:ed6be7ab-068e-46c8-824a-f39cfbb885cc", + "drs://dg.F82A1A:ed6be7ab-068e-46c8-824a-f39cfbb885cc" ) - for (exampleUri <- exampleUris) { + for (exampleUri <- exampleUris) fileSystemProvider.getHost(exampleUri) should be(exampleUri) - } } it should "check existing drs objects" in { @@ -79,52 +78,44 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp it should "return a file provider that can read bytes from gcs" in { val drsPathResolver = new MockEngineDrsPathResolver() { - override def resolveDrs(drsPath: String, - fields: NonEmptyList[DrsResolverField.Value], - ): IO[DrsResolverResponse] = { + override def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = IO(DrsResolverResponse(gsUri = Option("gs://bucket/object/path"))) - } } val readChannel = mock[ReadableByteChannel] - val drsReadInterpreter: DrsReadInterpreter = (_, drsResolverResponse) => { + val drsReadInterpreter: DrsReadInterpreter = (_, drsResolverResponse) => IO( (drsResolverResponse.gsUri, drsResolverResponse.googleServiceAccount) match { case (Some("gs://bucket/object/path"), None) => readChannel case _ => fail(s"Unexpected parameters passed: $drsResolverResponse") } ) - } val fileSystemProvider = new MockDrsCloudNioFileSystemProvider( mockResolver = Option(drsPathResolver), - drsReadInterpreter = drsReadInterpreter, + drsReadInterpreter = drsReadInterpreter ) fileSystemProvider.fileProvider.read("dg.123", "abc", 0) should be(readChannel) } it should "return a file provider that can read bytes from an access url" in { val drsPathResolver = new MockEngineDrsPathResolver() { - override def resolveDrs(drsPath: String, - fields: NonEmptyList[DrsResolverField.Value], - ): IO[DrsResolverResponse] = { + override def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = IO(DrsResolverResponse(accessUrl = Option(AccessUrl("https://host/object/path", None)))) - } } val readChannel = mock[ReadableByteChannel] - val drsReadInterpreter: DrsReadInterpreter = (_, drsResolverResponse) => { + val drsReadInterpreter: DrsReadInterpreter = (_, drsResolverResponse) => IO( drsResolverResponse.accessUrl match { case Some(AccessUrl("https://host/object/path", None)) => readChannel case _ => fail(s"Unexpected parameters passed: $drsResolverResponse") } ) - } val fileSystemProvider = new MockDrsCloudNioFileSystemProvider( mockResolver = Option(drsPathResolver), - drsReadInterpreter = drsReadInterpreter, + drsReadInterpreter = drsReadInterpreter ) fileSystemProvider.fileProvider.read("dg.123", "abc", 0) should be(readChannel) } @@ -132,8 +123,8 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp it should "return a file provider that can return file attributes" in { val drsPathResolver = new MockEngineDrsPathResolver() { override def resolveDrs(drsPath: String, - fields: NonEmptyList[DrsResolverField.Value], - ): IO[DrsResolverResponse] = { + fields: NonEmptyList[DrsResolverField.Value] + ): IO[DrsResolverResponse] = { val instantCreated = Instant.ofEpochMilli(123L) val instantUpdated = Instant.ofEpochMilli(456L) IO( @@ -141,7 +132,7 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp size = Option(789L), timeCreated = Option(OffsetDateTime.ofInstant(instantCreated, ZoneOffset.UTC).toString), timeUpdated = Option(OffsetDateTime.ofInstant(instantUpdated, ZoneOffset.UTC).toString), - hashes = Option(Map("md5" -> "gg0217869")), + hashes = Option(Map("md5" -> "gg0217869")) ) ) } diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProviderSpec.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProviderSpec.scala index 88b95dc76e8..13df061d7a6 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProviderSpec.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileSystemProviderSpec.scala @@ -17,6 +17,6 @@ class DrsCloudNioFileSystemProviderSpec extends org.scalatest.flatspec.AnyFlatSp val path = fileSystemProvider.getCloudNioPath("drs://foo/bar/") the[UnsupportedOperationException] thrownBy { fileSystemProvider.deleteIfExists(path) - } should have message("DRS currently doesn't support delete.") + } should have message "DRS currently doesn't support delete." } } diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsPathResolverSpec.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsPathResolverSpec.scala index b4e5968e048..9c6cecd73f1 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsPathResolverSpec.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsPathResolverSpec.scala @@ -12,7 +12,7 @@ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers { - private val mockGSA = SADataObject(data = Json.fromJsonObject(JsonObject("key"-> Json.fromString("value")))) + private val mockGSA = SADataObject(data = Json.fromJsonObject(JsonObject("key" -> Json.fromString("value")))) private val crcHashValue = "8a366443" private val md5HashValue = "336ea55913bc261b72875bd259753046" private val shaHashValue = "f76877f8e86ec3932fd2ae04239fbabb8c90199dab0019ae55fa42b31c314c44" @@ -40,54 +40,70 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with .copy(timeUpdated = fullDrsResolverResponse.timeUpdated.map(_.stripSuffix("Z") + "BADTZ")) private val etagHashValue = "something" - private val completeHashesMap = Option(Map( - "betty" -> "abc123", - "charles" -> "456", - "alfred" -> "xrd", - "sha256" -> shaHashValue, - "crc32c" -> crcHashValue, - "md5" -> md5HashValue, - "etag" -> etagHashValue, - )) - - private val missingCRCHashesMap = Option(Map( - "alfred" -> "xrd", - "sha256" -> shaHashValue, - "betty" -> "abc123", - "md5" -> md5HashValue, - "charles" -> "456", - )) - - private val onlySHAHashesMap = Option(Map( - "betty" -> "abc123", - "charles" -> "456", - "alfred" -> "xrd", - "sha256" -> shaHashValue, - )) - - private val onlyEtagHashesMap = Option(Map( - "alfred" -> "xrd", - "betty" -> "abc123", - "charles" -> "456", - "etag" -> etagHashValue, - )) + private val completeHashesMap = Option( + Map( + "betty" -> "abc123", + "charles" -> "456", + "alfred" -> "xrd", + "sha256" -> shaHashValue, + "crc32c" -> crcHashValue, + "md5" -> md5HashValue, + "etag" -> etagHashValue + ) + ) + + private val missingCRCHashesMap = Option( + Map( + "alfred" -> "xrd", + "sha256" -> shaHashValue, + "betty" -> "abc123", + "md5" -> md5HashValue, + "charles" -> "456" + ) + ) + + private val onlySHAHashesMap = Option( + Map( + "betty" -> "abc123", + "charles" -> "456", + "alfred" -> "xrd", + "sha256" -> shaHashValue + ) + ) + + private val onlyEtagHashesMap = Option( + Map( + "alfred" -> "xrd", + "betty" -> "abc123", + "charles" -> "456", + "etag" -> etagHashValue + ) + ) behavior of "fileHash()" it should "return crc32c hash from `hashes` in DRS Resolver response when there is a crc32c" in { - DrsCloudNioRegularFileAttributes.getPreferredHash(completeHashesMap) shouldBe Option(FileHash(HashType.Crc32c, crcHashValue)) + DrsCloudNioRegularFileAttributes.getPreferredHash(completeHashesMap) shouldBe Option( + FileHash(HashType.Crc32c, crcHashValue) + ) } it should "return md5 hash from `hashes` in DRS Resolver response when there is no crc32c" in { - DrsCloudNioRegularFileAttributes.getPreferredHash(missingCRCHashesMap) shouldBe Option(FileHash(HashType.Md5, md5HashValue)) + DrsCloudNioRegularFileAttributes.getPreferredHash(missingCRCHashesMap) shouldBe Option( + FileHash(HashType.Md5, md5HashValue) + ) } it should "return sha256 hash from `hashes` in DRS Resolver response when there is only a sha256" in { - DrsCloudNioRegularFileAttributes.getPreferredHash(onlySHAHashesMap) shouldBe Option(FileHash(HashType.Sha256, shaHashValue)) + DrsCloudNioRegularFileAttributes.getPreferredHash(onlySHAHashesMap) shouldBe Option( + FileHash(HashType.Sha256, shaHashValue) + ) } it should "return etag hash from `hashes` in DRS Resolver response when there is only an etag" in { - DrsCloudNioRegularFileAttributes.getPreferredHash(onlyEtagHashesMap) shouldBe Option(FileHash(HashType.S3Etag, etagHashValue)) + DrsCloudNioRegularFileAttributes.getPreferredHash(onlyEtagHashesMap) shouldBe Option( + FileHash(HashType.S3Etag, etagHashValue) + ) } it should "return None when no hashes object is returned" in { @@ -138,19 +154,27 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with import org.apache.http.message.BasicStatusLine val drsPathForDebugging = "drs://my_awesome_drs" - val responseStatusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 2) , 345, "test-reason") + val responseStatusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 2), 345, "test-reason") val testDrsResolverUri = "www.drshub_v4.com" it should "construct an error message from a populated, well-formed failure response" in { val failureResponse = Option(failureResponseJson) - DrsResolverResponseSupport.errorMessageFromResponse(drsPathForDebugging, failureResponse, responseStatusLine, testDrsResolverUri) shouldBe { + DrsResolverResponseSupport.errorMessageFromResponse(drsPathForDebugging, + failureResponse, + responseStatusLine, + testDrsResolverUri + ) shouldBe { "Could not access object 'drs://my_awesome_drs'. Status: 345, reason: 'test-reason', DRS Resolver location: 'www.drshub_v4.com', message: '{\"msg\":\"User 'null' does not have required action: read_data\",\"status_code\":500}'" } } it should "construct an error message from an empty failure response" in { - DrsResolverResponseSupport.errorMessageFromResponse(drsPathForDebugging, None, responseStatusLine, testDrsResolverUri) shouldBe { + DrsResolverResponseSupport.errorMessageFromResponse(drsPathForDebugging, + None, + responseStatusLine, + testDrsResolverUri + ) shouldBe { "Could not access object 'drs://my_awesome_drs'. Status: 345, reason: 'test-reason', DRS Resolver location: 'www.drshub_v4.com', message: (empty response)" } } @@ -160,7 +184,11 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with it should "construct an error message from a malformed failure response" in { val unparsableFailureResponse = Option("something went horribly wrong") - DrsResolverResponseSupport.errorMessageFromResponse(drsPathForDebugging, unparsableFailureResponse, responseStatusLine, testDrsResolverUri) shouldBe { + DrsResolverResponseSupport.errorMessageFromResponse(drsPathForDebugging, + unparsableFailureResponse, + responseStatusLine, + testDrsResolverUri + ) shouldBe { "Could not access object 'drs://my_awesome_drs'. Status: 345, reason: 'test-reason', DRS Resolver location: 'www.drshub_v4.com', message: 'something went horribly wrong'" } } @@ -169,7 +197,7 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with val lastModifiedTimeIO = convertToFileTime( "drs://my_awesome_drs", DrsResolverField.TimeUpdated, - fullDrsResolverResponse.timeUpdated, + fullDrsResolverResponse.timeUpdated ) lastModifiedTimeIO.unsafeRunSync() should be(Option(FileTime.from(OffsetDateTime.parse("2020-04-27T15:56:09.696Z").toInstant))) @@ -179,7 +207,7 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with val lastModifiedTimeIO = convertToFileTime( "drs://my_awesome_drs", DrsResolverField.TimeUpdated, - fullDrsResolverResponseNoTz.timeUpdated, + fullDrsResolverResponseNoTz.timeUpdated ) lastModifiedTimeIO.unsafeRunSync() should be(Option(FileTime.from(OffsetDateTime.parse("2020-04-27T15:56:09.696Z").toInstant))) @@ -189,7 +217,7 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with val lastModifiedTimeIO = convertToFileTime( "drs://my_awesome_drs", DrsResolverField.TimeUpdated, - fullDrsResolverResponseNoTime.timeUpdated, + fullDrsResolverResponseNoTime.timeUpdated ) lastModifiedTimeIO.unsafeRunSync() should be(None) } @@ -198,10 +226,10 @@ class DrsPathResolverSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with val lastModifiedTimeIO = convertToFileTime( "drs://my_awesome_drs", DrsResolverField.TimeUpdated, - fullDrsResolverResponseBadTz.timeUpdated, + fullDrsResolverResponseBadTz.timeUpdated ) the[RuntimeException] thrownBy lastModifiedTimeIO.unsafeRunSync() should have message "Error while parsing 'timeUpdated' value from DRS Resolver to FileTime for DRS path drs://my_awesome_drs. " + - "Reason: DateTimeParseException: Text '2020-04-27T15:56:09.696BADTZ' could not be parsed at index 23." + "Reason: DateTimeParseException: Text '2020-04-27T15:56:09.696BADTZ' could not be parsed at index 23." } } diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategySpec.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategySpec.scala index b221038bedb..5be2b155391 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategySpec.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsResolverHttpRequestRetryStrategySpec.scala @@ -93,7 +93,7 @@ class DrsResolverHttpRequestRetryStrategySpec extends AnyFlatSpec with Matchers waitInitial = 10.seconds, waitMultiplier = 2.0d, waitMaximum = 1.minute, - waitRandomizationFactor = 0d, + waitRandomizationFactor = 0d ) val retryStrategy = new DrsResolverHttpRequestRetryStrategy(drsConfig) diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockDrsCloudNioFileSystemProvider.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockDrsCloudNioFileSystemProvider.scala index 07a02e096dc..2d0afd33336 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockDrsCloudNioFileSystemProvider.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockDrsCloudNioFileSystemProvider.scala @@ -15,18 +15,19 @@ class MockDrsCloudNioFileSystemProvider(config: Config = mockConfig, IO.raiseError( new UnsupportedOperationException("mock did not specify a read interpreter") ), - mockResolver: Option[EngineDrsPathResolver] = None, - ) - extends DrsCloudNioFileSystemProvider(config, GoogleOauthDrsCredentials(NoCredentials.getInstance, config), drsReadInterpreter) { + mockResolver: Option[EngineDrsPathResolver] = None +) extends DrsCloudNioFileSystemProvider(config, + GoogleOauthDrsCredentials(NoCredentials.getInstance, config), + drsReadInterpreter + ) { - override lazy val drsPathResolver: EngineDrsPathResolver = { + override lazy val drsPathResolver: EngineDrsPathResolver = mockResolver getOrElse new MockEngineDrsPathResolver( drsConfig = drsConfig, httpClientBuilderOverride = httpClientBuilder, - accessTokenAcceptableTTL = Duration.Inf, + accessTokenAcceptableTTL = Duration.Inf ) - } } object MockDrsCloudNioFileSystemProvider { diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala index 9e22544c38b..1aff2f13470 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala @@ -12,9 +12,10 @@ import scala.concurrent.duration.Duration class MockEngineDrsPathResolver(drsConfig: DrsConfig = MockDrsPaths.mockDrsConfig, httpClientBuilderOverride: Option[HttpClientBuilder] = None, - accessTokenAcceptableTTL: Duration = Duration.Inf, - ) - extends EngineDrsPathResolver(drsConfig, GoogleOauthDrsCredentials(NoCredentials.getInstance, accessTokenAcceptableTTL)) { + accessTokenAcceptableTTL: Duration = Duration.Inf +) extends EngineDrsPathResolver(drsConfig, + GoogleOauthDrsCredentials(NoCredentials.getInstance, accessTokenAcceptableTTL) + ) { override protected lazy val httpClientBuilder: HttpClientBuilder = httpClientBuilderOverride getOrElse MockSugar.mock[HttpClientBuilder] @@ -38,13 +39,15 @@ class MockEngineDrsPathResolver(drsConfig: DrsConfig = MockDrsPaths.mockDrsConfi private val drsResolverObjWithFileName = drsResolverObjWithGcsPath.copy(fileName = Option("file.txt")) - private val drsResolverObjWithLocalizationPath = drsResolverObjWithGcsPath.copy(localizationPath = Option("/dir/subdir/file.txt")) + private val drsResolverObjWithLocalizationPath = + drsResolverObjWithGcsPath.copy(localizationPath = Option("/dir/subdir/file.txt")) - private val drsResolverObjWithAllThePaths = drsResolverObjWithLocalizationPath.copy(fileName = drsResolverObjWithFileName.fileName) + private val drsResolverObjWithAllThePaths = + drsResolverObjWithLocalizationPath.copy(fileName = drsResolverObjWithFileName.fileName) private val drsResolverObjWithNoGcsPath = drsResolverObjWithGcsPath.copy(gsUri = None) - override def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = { + override def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = drsPath match { case MockDrsPaths.drsPathResolvingGcsPath => IO(drsResolverObjWithGcsPath) case MockDrsPaths.drsPathWithNonPathChars => IO(drsResolverObjWithGcsPath) @@ -60,7 +63,6 @@ class MockEngineDrsPathResolver(drsConfig: DrsConfig = MockDrsPaths.mockDrsConfi ) ) } - } override lazy val getAccessToken: ErrorOr[String] = MockDrsPaths.mockToken.validNel } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpClientPool.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpClientPool.scala index d93b0aa6f82..b13eb844a24 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpClientPool.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpClientPool.scala @@ -7,25 +7,26 @@ import org.apache.commons.net.ftp.FTPClient import scala.concurrent.duration._ object FtpClientPool extends StrictLogging { - def dispose(ftpClient: FTPClient) = try { + def dispose(ftpClient: FTPClient) = try if (ftpClient.isConnected) { ftpClient.logout() ftpClient.disconnect() } - } catch { + catch { case e: Exception => logger.debug("Failed to disconnect ftp client", e) } } -class FtpClientPool(capacity: Int, maxIdleTime: FiniteDuration, factory: () => FTPClient) extends ExpiringPool[FTPClient]( - capacity = capacity, - maxIdleTime = maxIdleTime, - referenceType = ReferenceType.Strong, - _factory = factory, - // Reset is called every time a client is added or released back to the pool. We don't want to actually reset the connection here - // otherwise we'd need to login again and reconfigure the connection every time - _reset = Function.const(()), - _dispose = FtpClientPool.dispose, - // Could not find a good health check at the moment (isAvailable and isConnected on the socket seem to both return false sometimes even if the client is fine) - _healthCheck = Function.const(true) -) +class FtpClientPool(capacity: Int, maxIdleTime: FiniteDuration, factory: () => FTPClient) + extends ExpiringPool[FTPClient]( + capacity = capacity, + maxIdleTime = maxIdleTime, + referenceType = ReferenceType.Strong, + _factory = factory, + // Reset is called every time a client is added or released back to the pool. We don't want to actually reset the connection here + // otherwise we'd need to login again and reconfigure the connection every time + _reset = Function.const(()), + _dispose = FtpClientPool.dispose, + // Could not find a good health check at the moment (isAvailable and isConnected on the socket seem to both return false sometimes even if the client is fine) + _healthCheck = Function.const(true) + ) diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala index 96e05702034..37ace6c250b 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala @@ -37,17 +37,25 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends * Returns a listing of keys within a bucket starting with prefix. The returned keys should include the prefix. The * paths must be absolute, but the key should not begin with a slash. */ - override def listObjects(cloudHost: String, cloudPathPrefix: String, markerOption: Option[String]): CloudNioFileList = withAutoRelease(cloudHost) { client => - FtpListFiles(cloudHost, cloudPathPrefix, "list objects") - .run(client) - .map({ files => - val cleanFiles = files.map(_.getName).map(cloudPathPrefix.stripPrefix("/").ensureSlashed + _) - CloudNioFileList(cleanFiles, markerOption) - }) - }.unsafeRunSync() - - override def copy(sourceCloudHost: String, sourceCloudPath: String, targetCloudHost: String, targetCloudPath: String): Unit = { - if (sourceCloudHost != targetCloudHost) throw new UnsupportedOperationException(s"Cannot copy files across different ftp servers: Source host: $sourceCloudHost, Target host: $targetCloudHost") + override def listObjects(cloudHost: String, cloudPathPrefix: String, markerOption: Option[String]): CloudNioFileList = + withAutoRelease(cloudHost) { client => + FtpListFiles(cloudHost, cloudPathPrefix, "list objects") + .run(client) + .map { files => + val cleanFiles = files.map(_.getName).map(cloudPathPrefix.stripPrefix("/").ensureSlashed + _) + CloudNioFileList(cleanFiles, markerOption) + } + }.unsafeRunSync() + + override def copy(sourceCloudHost: String, + sourceCloudPath: String, + targetCloudHost: String, + targetCloudPath: String + ): Unit = { + if (sourceCloudHost != targetCloudHost) + throw new UnsupportedOperationException( + s"Cannot copy files across different ftp servers: Source host: $sourceCloudHost, Target host: $targetCloudHost" + ) val fileSystem = findFileSystem(sourceCloudHost) @@ -63,7 +71,11 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends Util.copyStream(ios.inputStream, ios.outputStream) } match { case Success(_) => - case Failure(failure) => throw new IOException(s"Failed to copy ftp://$sourceCloudHost/$sourceCloudPath to ftp://$targetCloudHost/$targetCloudPath", failure) + case Failure(failure) => + throw new IOException( + s"Failed to copy ftp://$sourceCloudHost/$sourceCloudPath to ftp://$targetCloudHost/$targetCloudPath", + failure + ) } } @@ -71,12 +83,15 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends FtpDeleteFile(cloudHost, cloudPath, "delete").run(client) }.unsafeRunSync() - private def inputStream(cloudHost: String, cloudPath: String, offset: Long, lease: Lease[FTPClient]): IO[LeasedInputStream] = { + private def inputStream(cloudHost: String, + cloudPath: String, + offset: Long, + lease: Lease[FTPClient] + ): IO[LeasedInputStream] = FtpInputStream(cloudHost, cloudPath, offset) .run(lease.get()) // Wrap the input stream in a LeasedInputStream so that the lease can be released when the stream is closed .map(new LeasedInputStream(cloudHost, cloudPath, _, lease)) - } override def read(cloudHost: String, cloudPath: String, offset: Long): ReadableByteChannel = { for { @@ -85,11 +100,10 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends } yield Channels.newChannel(is) }.unsafeRunSync() - private def outputStream(cloudHost: String, cloudPath: String, lease: Lease[FTPClient]): IO[LeasedOutputStream] = { + private def outputStream(cloudHost: String, cloudPath: String, lease: Lease[FTPClient]): IO[LeasedOutputStream] = FtpOutputStream(cloudHost, cloudPath) .run(lease.get()) .map(new LeasedOutputStream(cloudHost, cloudPath, _, lease)) - } override def write(cloudHost: String, cloudPath: String): WritableByteChannel = { for { @@ -98,15 +112,16 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends } yield Channels.newChannel(os) }.unsafeRunSync() - override def fileAttributes(cloudHost: String, cloudPath: String): Option[CloudNioRegularFileAttributes] = withAutoRelease(cloudHost) { client => - FtpListFiles(cloudHost, cloudPath, "get file attributes") - .run(client) - .map( - _.headOption map { file => - new FtpCloudNioRegularFileAttributes(file, cloudHost + cloudPath) - } - ) - }.unsafeRunSync() + override def fileAttributes(cloudHost: String, cloudPath: String): Option[CloudNioRegularFileAttributes] = + withAutoRelease(cloudHost) { client => + FtpListFiles(cloudHost, cloudPath, "get file attributes") + .run(client) + .map( + _.headOption map { file => + new FtpCloudNioRegularFileAttributes(file, cloudHost + cloudPath) + } + ) + }.unsafeRunSync() override def createDirectory(cloudHost: String, cloudPath: String) = withAutoRelease(cloudHost) { client => val operation = FtpCreateDirectory(cloudHost, cloudPath) @@ -129,7 +144,8 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends private def findFileSystem(host: String): FtpCloudNioFileSystem = fsProvider.newCloudNioFileSystemFromHost(host) - private def acquireLease[A](host: String): IO[Lease[FTPClient]] = IO { findFileSystem(host).leaseClient } + private def acquireLease[A](host: String): IO[Lease[FTPClient]] = IO(findFileSystem(host).leaseClient) - private def withAutoRelease[A](cloudHost: String): (FTPClient => IO[A]) => IO[A] = autoRelease[A](acquireLease(cloudHost)) + private def withAutoRelease[A](cloudHost: String): (FTPClient => IO[A]) => IO[A] = + autoRelease[A](acquireLease(cloudHost)) } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystem.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystem.scala index 91a26bc8f8e..dd0bb79cfa7 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystem.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystem.scala @@ -12,7 +12,9 @@ object FtpCloudNioFileSystem { val logger = LoggerFactory.getLogger("FtpFileSystem") } -class FtpCloudNioFileSystem(provider: FtpCloudNioFileSystemProvider, host: String) extends CloudNioFileSystem(provider, host) with StrictLogging { +class FtpCloudNioFileSystem(provider: FtpCloudNioFileSystemProvider, host: String) + extends CloudNioFileSystem(provider, host) + with StrictLogging { private val credentials = provider.credentials private val ftpConfig = provider.ftpConfig private val connectionModeFunction: FTPClient => Unit = ftpConfig.connectionMode match { @@ -20,7 +22,7 @@ class FtpCloudNioFileSystem(provider: FtpCloudNioFileSystemProvider, host: Strin case Active => client: FTPClient => client.enterLocalActiveMode() } - private [ftp] lazy val clientFactory = () => { + private[ftp] lazy val clientFactory = () => { val client = new FTPClient() client.setDefaultPort(ftpConfig.connectionPort) client.connect(host) @@ -33,7 +35,10 @@ class FtpCloudNioFileSystem(provider: FtpCloudNioFileSystemProvider, host: Strin private val clientPool = new FtpClientPool(ftpConfig.capacity, ftpConfig.idleConnectionTimeout, clientFactory) def leaseClient = ftpConfig.leaseTimeout match { - case Some(timeout) => clientPool.tryAcquire(timeout).getOrElse(throw new TimeoutException("Timed out waiting for an available connection, try again later.")) + case Some(timeout) => + clientPool + .tryAcquire(timeout) + .getOrElse(throw new TimeoutException("Timed out waiting for an available connection, try again later.")) case _ => clientPool.acquire() } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProvider.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProvider.scala index e20edfc76ff..bb2faaa5ccd 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProvider.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProvider.scala @@ -11,7 +11,11 @@ import com.typesafe.scalalogging.StrictLogging import scala.util.{Failure, Success, Try} -class FtpCloudNioFileSystemProvider(override val config: Config, val credentials: FtpCredentials, ftpFileSystems: FtpFileSystems) extends CloudNioFileSystemProvider with StrictLogging { +class FtpCloudNioFileSystemProvider(override val config: Config, + val credentials: FtpCredentials, + ftpFileSystems: FtpFileSystems +) extends CloudNioFileSystemProvider + with StrictLogging { val ftpConfig = ftpFileSystems.config override def fileProvider = new FtpCloudNioFileProvider(this) @@ -29,18 +33,19 @@ class FtpCloudNioFileSystemProvider(override val config: Config, val credentials * will try to get it using the fileProvider which will require a new client lease and can result in a deadlock of the client pool, since * the read channel holds on to its lease until its closed. */ - val preComputedFileSize = retry.from(() => fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath).map(_.size())) + val preComputedFileSize = + retry.from(() => fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath).map(_.size())) new CloudNioReadChannel(fileProvider, retry, cloudNioPath) { override def fileSize = preComputedFileSize } } - override def createDirectory(dir: Path, attrs: FileAttribute[_]*): Unit = { + override def createDirectory(dir: Path, attrs: FileAttribute[_]*): Unit = Try { - retry.from(() => { + retry.from { () => val cloudNioPath = CloudNioPath.checkPath(dir) fileProvider.createDirectory(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - }) + } } match { case Success(_) => case Failure(f: FileAlreadyExistsException) => throw f @@ -51,15 +56,12 @@ class FtpCloudNioFileSystemProvider(override val config: Config, val credentials throw f case Failure(f) => throw f } - } override def usePseudoDirectories = false - override def newCloudNioFileSystem(uriAsString: String, config: Config) = { + override def newCloudNioFileSystem(uriAsString: String, config: Config) = newCloudNioFileSystemFromHost(getHost(uriAsString)) - } - - def newCloudNioFileSystemFromHost(host: String) = { + + def newCloudNioFileSystemFromHost(host: String) = ftpFileSystems.getFileSystem(host, this) - } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCredentials.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCredentials.scala index 2e553a5f66b..c63efad1de7 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCredentials.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCredentials.scala @@ -11,7 +11,8 @@ sealed trait FtpCredentials { } // Yes, FTP uses plain text username / password -case class FtpAuthenticatedCredentials(username: String, password: String, account: Option[String]) extends FtpCredentials { +case class FtpAuthenticatedCredentials(username: String, password: String, account: Option[String]) + extends FtpCredentials { override def login(ftpClient: FTPClient) = { lazy val replyString = Option(ftpClient.getReplyString).getOrElse("N/A") diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystems.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystems.scala index 006e19982ff..63f0c10f655 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystems.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystems.scala @@ -9,7 +9,7 @@ import scala.concurrent.duration._ object FtpFileSystems { val DefaultConfig = FtpFileSystemsConfiguration(1.day, Option(1.hour), 5, 1.hour, 21, Passive) val Default = new FtpFileSystems(DefaultConfig) - private [ftp] case class FtpCacheKey(host: String, ftpProvider: FtpCloudNioFileSystemProvider) + private[ftp] case class FtpCacheKey(host: String, ftpProvider: FtpCloudNioFileSystemProvider) } /** @@ -21,18 +21,18 @@ class FtpFileSystems(val config: FtpFileSystemsConfiguration) { private val fileSystemTTL = config.cacheTTL - private val fileSystemsCache: LoadingCache[FtpCacheKey, FtpCloudNioFileSystem] = CacheBuilder.newBuilder() + private val fileSystemsCache: LoadingCache[FtpCacheKey, FtpCloudNioFileSystem] = CacheBuilder + .newBuilder() .expireAfterAccess(fileSystemTTL.length, fileSystemTTL.unit) - .removalListener((notification: RemovalNotification[FtpCacheKey, FtpCloudNioFileSystem]) => { + .removalListener { (notification: RemovalNotification[FtpCacheKey, FtpCloudNioFileSystem]) => notification.getValue.close() - }) + } .build[FtpCacheKey, FtpCloudNioFileSystem](new CacheLoader[FtpCacheKey, FtpCloudNioFileSystem] { override def load(key: FtpCacheKey) = createFileSystem(key) }) - - private [ftp] def createFileSystem(key: FtpCacheKey) = new FtpCloudNioFileSystem(key.ftpProvider, key.host) - def getFileSystem(host: String, ftpCloudNioFileProvider: FtpCloudNioFileSystemProvider) = { + private[ftp] def createFileSystem(key: FtpCacheKey) = new FtpCloudNioFileSystem(key.ftpProvider, key.host) + + def getFileSystem(host: String, ftpCloudNioFileProvider: FtpCloudNioFileSystemProvider) = fileSystemsCache.get(FtpCacheKey(host, ftpCloudNioFileProvider)) - } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystemsConfiguration.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystemsConfiguration.scala index 2c305f82edf..8e5071f604a 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystemsConfiguration.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpFileSystemsConfiguration.scala @@ -9,7 +9,8 @@ case class FtpFileSystemsConfiguration(cacheTTL: FiniteDuration, capacity: Int, idleConnectionTimeout: FiniteDuration, connectionPort: Int, - connectionMode: ConnectionMode) + connectionMode: ConnectionMode +) object FtpFileSystemsConfiguration { sealed trait ConnectionMode diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpUtil.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpUtil.scala index 9138401f803..049c54d9d89 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpUtil.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpUtil.scala @@ -12,16 +12,16 @@ object FtpUtil { def ensureSlashed = if (cloudPath.endsWith("/")) cloudPath else s"$cloudPath/" } - case class FtpIoException(message: String, code: Int, replyString: String, cause: Option[Throwable] = None) extends IOException(s"$message: $replyString", cause.orNull) { + case class FtpIoException(message: String, code: Int, replyString: String, cause: Option[Throwable] = None) + extends IOException(s"$message: $replyString", cause.orNull) { def isTransient = FTPReply.isNegativeTransient(code) def isFatal = FTPReply.isNegativePermanent(code) } - def autoRelease[A](acquire: IO[Lease[FTPClient]])(action: FTPClient => IO[A]): IO[A] = { - acquire.bracketCase(lease => action(lease.get()))({ - // If there's a cause, the call to the FTP client threw an exception, assume the connection is compromised and invalidate the lease - case (lease, ExitCase.Error(FtpIoException(_, _, _, Some(_)))) => IO { lease.invalidate() } - case (lease, _) => IO { lease.release() } - }) - } + def autoRelease[A](acquire: IO[Lease[FTPClient]])(action: FTPClient => IO[A]): IO[A] = + acquire.bracketCase(lease => action(lease.get())) { + // If there's a cause, the call to the FTP client threw an exception, assume the connection is compromised and invalidate the lease + case (lease, ExitCase.Error(FtpIoException(_, _, _, Some(_)))) => IO(lease.invalidate()) + case (lease, _) => IO(lease.release()) + } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/InputOutputStreams.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/InputOutputStreams.scala index 2455fa2b86a..a1a8b158ec7 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/InputOutputStreams.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/InputOutputStreams.scala @@ -3,11 +3,9 @@ package cloud.nio.impl.ftp import java.io.{InputStream, OutputStream} class InputOutputStreams(val inputStream: InputStream, val outputStream: OutputStream) extends AutoCloseable { - override def close() = { - try { + override def close() = + try inputStream.close() - } finally { + finally outputStream.close() - } - } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala index 29d0b7c6bf7..2e794a96791 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala @@ -8,7 +8,8 @@ import cloud.nio.impl.ftp.operations.FtpCompletePendingCommand import io.github.andrebeat.pool.Lease import org.apache.commons.net.ftp.FTPClient -class LeasedInputStream(cloudHost: String, cloudPath: String, inputStream: InputStream, lease: Lease[FTPClient]) extends InputStream { +class LeasedInputStream(cloudHost: String, cloudPath: String, inputStream: InputStream, lease: Lease[FTPClient]) + extends InputStream { override def read() = inputStream.read() override def read(b: Array[Byte]) = inputStream.read(b) override def read(b: Array[Byte], off: Int, len: Int): Int = inputStream.read(b, off, len) @@ -16,7 +17,8 @@ class LeasedInputStream(cloudHost: String, cloudPath: String, inputStream: Input override def available = inputStream.available() override def close() = { inputStream.close() - autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void.unsafeRunSync() + autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void + .unsafeRunSync() } override def mark(readlimit: Int) = inputStream.mark(readlimit) override def reset() = inputStream.reset() diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala index 5a4f43acabb..7fe09f3fbf1 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala @@ -8,13 +8,15 @@ import cloud.nio.impl.ftp.operations.FtpCompletePendingCommand import io.github.andrebeat.pool.Lease import org.apache.commons.net.ftp.FTPClient -class LeasedOutputStream(cloudHost: String, cloudPath: String, outputStream: OutputStream, lease: Lease[FTPClient]) extends OutputStream { +class LeasedOutputStream(cloudHost: String, cloudPath: String, outputStream: OutputStream, lease: Lease[FTPClient]) + extends OutputStream { override def write(b: Int) = outputStream.write(b) override def write(b: Array[Byte]) = outputStream.write(b) override def write(b: Array[Byte], off: Int, len: Int): Unit = outputStream.write(b, off, len) override def flush() = outputStream.flush() override def close() = { outputStream.close() - autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void.unsafeRunSync() + autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void + .unsafeRunSync() } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/operations/FtpOperation.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/operations/FtpOperation.scala index 400b0b26740..dee465cff4c 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/operations/FtpOperation.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/operations/FtpOperation.scala @@ -14,93 +14,101 @@ sealed trait FtpOperation[A] { def description: String def action: FTPClient => A def run(client: FTPClient): IO[A] - + def fullPath = s"ftp://$cloudHost/$cloudPath" protected def errorMessage = s"Failed to $description at $fullPath" - protected def fail(client: FTPClient, cause: Option[Throwable] = None) = { + protected def fail(client: FTPClient, cause: Option[Throwable] = None) = IO.raiseError[A](generateException(client, cause)) - } - private [operations] def generateException(client: FTPClient, cause: Option[Throwable]) = cause match { - case None if client.getReplyCode == FTPReply.FILE_UNAVAILABLE && client.getReplyString.toLowerCase.contains("exists") => + private[operations] def generateException(client: FTPClient, cause: Option[Throwable]) = cause match { + case None + if client.getReplyCode == FTPReply.FILE_UNAVAILABLE && client.getReplyString.toLowerCase.contains("exists") => new FileAlreadyExistsException(fullPath) - case None if client.getReplyCode == FTPReply.FILE_UNAVAILABLE && client.getReplyString.toLowerCase.contains("no such file") => + case None + if client.getReplyCode == FTPReply.FILE_UNAVAILABLE && client.getReplyString.toLowerCase.contains( + "no such file" + ) => new NoSuchFileException(fullPath) case None => FtpIoException(errorMessage, client.getReplyCode, Option(client.getReplyString).getOrElse("N/A")) case Some(c) => FtpIoException(errorMessage, client.getReplyCode, Option(client.getReplyString).getOrElse("N/A"), Option(c)) } - + protected def handleError(client: FTPClient)(failure: Throwable) = fail(client, Option(failure)) - protected def commonRun(client: FTPClient, bind: A => IO[A]): IO[A] = { - IO { action(client) } redeemWith (handleError(client), bind) - } - + protected def commonRun(client: FTPClient, bind: A => IO[A]): IO[A] = + IO(action(client)) redeemWith (handleError(client), bind) + override def toString = s"$description at $fullPath" } sealed trait FtpBooleanOperation extends FtpOperation[Boolean] { protected def failOnFalse: Boolean = true - - def run(client: FTPClient): IO[Boolean] = { + + def run(client: FTPClient): IO[Boolean] = commonRun(client, - { - // Operation didn't throw but the result is false which means it failed - case false if failOnFalse => fail(client) - case result => IO.pure(result) - } + { + // Operation didn't throw but the result is false which means it failed + case false if failOnFalse => fail(client) + case result => IO.pure(result) + } ) - } } sealed trait FtpValueOperation[A <: AnyRef] extends FtpOperation[A] { - def run(client: FTPClient): IO[A] = { - commonRun(client, - { - // Operation didn't throw but the result is null which means it failed - case null => fail(client) - case result => IO.pure(result) - } + def run(client: FTPClient): IO[A] = + commonRun(client, + { + // Operation didn't throw but the result is null which means it failed + case null => fail(client) + case result => IO.pure(result) + } ) - } } -case class FtpListFiles(cloudHost: String, cloudPath: String, description: String = "List files") extends FtpValueOperation[Array[FTPFile]] { +case class FtpListFiles(cloudHost: String, cloudPath: String, description: String = "List files") + extends FtpValueOperation[Array[FTPFile]] { override val action = _.listFiles(cloudPath.ensureSlashedPrefix) } -case class FtpListDirectories(cloudHost: String, cloudPath: String, description: String = "List files") extends FtpValueOperation[Array[FTPFile]] { +case class FtpListDirectories(cloudHost: String, cloudPath: String, description: String = "List files") + extends FtpValueOperation[Array[FTPFile]] { // We need to list the directories in the parent and see if any matches the name, hence the string manipulations - lazy val parts = cloudPath.ensureSlashedPrefix.stripSuffix(CloudNioFileSystem.Separator).split(CloudNioFileSystem.Separator) + lazy val parts = + cloudPath.ensureSlashedPrefix.stripSuffix(CloudNioFileSystem.Separator).split(CloudNioFileSystem.Separator) lazy val parent = parts.init.mkString(CloudNioFileSystem.Separator) lazy val directoryName = parts.last override val action = _.listDirectories(parent) } -case class FtpDeleteFile(cloudHost: String, cloudPath: String, description: String = "delete file") extends FtpBooleanOperation { +case class FtpDeleteFile(cloudHost: String, cloudPath: String, description: String = "delete file") + extends FtpBooleanOperation { override val action = _.deleteFile(cloudPath.ensureSlashedPrefix) override val failOnFalse = false } -case class FtpInputStream(cloudHost: String, cloudPath: String, offset: Long, description: String = "read") extends FtpValueOperation[InputStream] { +case class FtpInputStream(cloudHost: String, cloudPath: String, offset: Long, description: String = "read") + extends FtpValueOperation[InputStream] { override val action = { client => client.setRestartOffset(offset) client.retrieveFileStream(cloudPath.ensureSlashedPrefix) } } -case class FtpOutputStream(cloudHost: String, cloudPath: String, description: String = "write") extends FtpValueOperation[OutputStream] { +case class FtpOutputStream(cloudHost: String, cloudPath: String, description: String = "write") + extends FtpValueOperation[OutputStream] { override val action = _.storeFileStream(cloudPath.ensureSlashedPrefix) } -case class FtpCreateDirectory(cloudHost: String, cloudPath: String, description: String = "create directory") extends FtpBooleanOperation { +case class FtpCreateDirectory(cloudHost: String, cloudPath: String, description: String = "create directory") + extends FtpBooleanOperation { override val action = _.makeDirectory(cloudPath.ensureSlashedPrefix) } -case class FtpCompletePendingCommand(cloudHost: String, cloudPath: String, description: String = "close stream") extends FtpBooleanOperation { +case class FtpCompletePendingCommand(cloudHost: String, cloudPath: String, description: String = "close stream") + extends FtpBooleanOperation { override val action = _.completePendingCommand() } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala index 91b69bf7e94..1837ee27139 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala @@ -17,15 +17,13 @@ class FtpClientPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche var loggedOut: Boolean = false var disconnected: Boolean = false client.isConnected.returns(true) - client.logout().responds(_ => { + client.logout().responds { _ => loggedOut = true true - }) - client.disconnect().responds(_ => { - disconnected = true - }) + } + client.disconnect().responds(_ => disconnected = true) - val clientPool = new FtpClientPool(1, 10.minutes, () => { client }) + val clientPool = new FtpClientPool(1, 10.minutes, () => client) clientPool.acquire().invalidate() loggedOut shouldBe true diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileProviderSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileProviderSpec.scala index f68f67cbf67..f1ca24c847a 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileProviderSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileProviderSpec.scala @@ -46,7 +46,8 @@ class FtpCloudNioFileProviderSpec extends AnyFlatSpec with CromwellTimeoutSpec w fakeUnixFileSystem.add(new DirectoryEntry(directory)) fileProvider.listObjects("localhost", root, None).paths should contain theSameElementsAs List( - file.stripPrefix("/"), directory.stripPrefix("/") + file.stripPrefix("/"), + directory.stripPrefix("/") ) } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala index e23e8ba450e..952e171f9ec 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala @@ -13,8 +13,12 @@ import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar - with MockFtpFileSystem { +class FtpCloudNioFileSystemProviderSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with MockSugar + with MockFtpFileSystem { behavior of "FtpCloudNioFileSystemProviderSpec" @@ -45,7 +49,9 @@ class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeout it should "pre compute the size before opening a read channel to avoid deadlocks" in { val mockSizeFunction = mock[() => Long] val provider: FtpCloudNioFileSystemProvider = new FtpCloudNioFileSystemProvider( - ConfigFactory.empty, FtpAnonymousCredentials, ftpFileSystems + ConfigFactory.empty, + FtpAnonymousCredentials, + ftpFileSystems ) { override def fileProvider: FtpCloudNioFileProvider = new FtpCloudNioFileProvider(this) { @@ -59,9 +65,8 @@ class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeout } ) - override def read(cloudHost: String, cloudPath: String, offset: Long): ReadableByteChannel = { + override def read(cloudHost: String, cloudPath: String, offset: Long): ReadableByteChannel = mock[ReadableByteChannel] - } } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemSpec.scala index d12db7ec612..f2b86fbd621 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemSpec.scala @@ -11,7 +11,6 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.TimeoutException import scala.concurrent.duration._ - class FtpCloudNioFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Eventually { behavior of "FtpCloudNioFileSystemSpec" @@ -20,11 +19,12 @@ class FtpCloudNioFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec wit implicit val patience = patienceConfig it should "lease the number of clients configured, not more, not less" in { - val fileSystems = new FtpFileSystems(FtpFileSystems.DefaultConfig.copy(leaseTimeout = Option(1.second), capacity = 3)) + val fileSystems = + new FtpFileSystems(FtpFileSystems.DefaultConfig.copy(leaseTimeout = Option(1.second), capacity = 3)) val provider = new FtpCloudNioFileSystemProvider(ConfigFactory.empty, FtpAnonymousCredentials, fileSystems) val fileSystem = new FtpCloudNioFileSystem(provider, "ftp.example.com") { // Override so we don't try to connect to anything - override private[ftp] lazy val clientFactory = () => { new FTPClient() } + override private[ftp] lazy val clientFactory = () => new FTPClient() } val client1 = fileSystem.leaseClient @@ -46,7 +46,7 @@ class FtpCloudNioFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val provider = new FtpCloudNioFileSystemProvider(ConfigFactory.empty, FtpAnonymousCredentials, fileSystems) val fileSystem = new FtpCloudNioFileSystem(provider, "ftp.example.com") { // Override so we don't try to connect to anything - override private[ftp] lazy val clientFactory = () => { new FTPClient() } + override private[ftp] lazy val clientFactory = () => new FTPClient() override def leaseClient = { val lease = super.leaseClient diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala index 0ba19060ab2..b4262452e35 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala @@ -17,21 +17,21 @@ class FtpCredentialsSpec extends AnyFlatSpec with Matchers with MockSugar with C var loggedInWithAccount: Boolean = false var loggedInWithoutAccount: Boolean = false val client = mock[FTPClient] - client.login(anyString, anyString).responds(_ => { + client.login(anyString, anyString).responds { _ => loggedInWithoutAccount = true true - }) - client.login(anyString, anyString, anyString).responds(_ => { + } + client.login(anyString, anyString, anyString).responds { _ => loggedInWithAccount = true true - }) + } FtpAuthenticatedCredentials("user", "password", None).login(client) loggedInWithoutAccount shouldBe true loggedInWithAccount shouldBe false // reset - loggedInWithoutAccount= false + loggedInWithoutAccount = false FtpAuthenticatedCredentials("user", "password", Option("account")).login(client) loggedInWithAccount shouldBe true diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala index 86f86914103..6a186a0d5f2 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala @@ -35,8 +35,8 @@ class FtpFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match verify(mockCreateFunction).apply(ftpCacheKey) } - class MockFtpFileSystems(conf: FtpFileSystemsConfiguration, - mockCreateFunction: FtpCacheKey => FtpCloudNioFileSystem) extends FtpFileSystems(conf) { + class MockFtpFileSystems(conf: FtpFileSystemsConfiguration, mockCreateFunction: FtpCacheKey => FtpCloudNioFileSystem) + extends FtpFileSystems(conf) { override private[ftp] def createFileSystem(key: FtpCacheKey) = mockCreateFunction(key) } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala index 0e86194ecbd..1796cef16b9 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala @@ -16,7 +16,7 @@ class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "autoRelease" it should "release the lease when the client fails the operation without throwing" in { - val clientPool = new FtpClientPool(1, 10.minutes, () => { new FTPClient }) + val clientPool = new FtpClientPool(1, 10.minutes, () => new FTPClient) val lease = clientPool.acquire() val action = autoRelease(IO.pure(lease)) { _ => @@ -28,7 +28,7 @@ class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "invalidate the lease when the client fails the operation by throwing" in { - val clientPool = new FtpClientPool(1, 10.minutes, () => { new FTPClient }) + val clientPool = new FtpClientPool(1, 10.minutes, () => new FTPClient) val lease = clientPool.acquire() val action = autoRelease(IO.pure(lease)) { _ => @@ -40,7 +40,7 @@ class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "release the lease when the operation succeeds" in { - val clientPool = new FtpClientPool(1, 10.minutes, () => { new FTPClient }) + val clientPool = new FtpClientPool(1, 10.minutes, () => new FTPClient) val lease = clientPool.acquire() val action = autoRelease(IO.pure(lease)) { _ => diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala index 6b70679a239..7042571b993 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala @@ -22,11 +22,11 @@ class LeaseInputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } val mockClient = mock[FTPClient] var completed: Boolean = false - mockClient.completePendingCommand().returns({ + mockClient.completePendingCommand().returns { completed = true true - }) - val clientPool = new FtpClientPool(1, 10.minutes, () => { mockClient }) + } + val clientPool = new FtpClientPool(1, 10.minutes, () => mockClient) val lease = clientPool.acquire() val leasedInputStream = new LeasedInputStream("host", "path", is, lease) diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala index ee82de6ffea..5c5bef9b9f4 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala @@ -17,11 +17,11 @@ class LeaseOutputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma val os = new TestOutputStream val mockClient = mock[FTPClient] var completed: Boolean = false - mockClient.completePendingCommand().returns({ + mockClient.completePendingCommand().returns { completed = true true - }) - val clientPool = new FtpClientPool(1, 10.minutes, () => { mockClient }) + } + val clientPool = new FtpClientPool(1, 10.minutes, () => mockClient) val lease = clientPool.acquire() val leasedOutputStream = new LeasedOutputStream("host", "path", os, lease) diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala index e3c954c8d24..42716da92cd 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala @@ -22,15 +22,18 @@ trait MockFtpFileSystem extends BeforeAndAfterAll { this: Suite => connectionPort = Option(fakeFtpServer.getServerControlPort) } - override def afterAll() = { + override def afterAll() = fakeFtpServer.stop() - } - lazy val ftpFileSystemsConfiguration = FtpFileSystems.DefaultConfig.copy(connectionPort = connectionPort.getOrElse(throw new RuntimeException("Fake FTP server has not been started"))) + lazy val ftpFileSystemsConfiguration = FtpFileSystems.DefaultConfig.copy(connectionPort = + connectionPort.getOrElse(throw new RuntimeException("Fake FTP server has not been started")) + ) lazy val ftpFileSystems = new FtpFileSystems(ftpFileSystemsConfiguration) // Do not call this before starting the server - lazy val mockProvider = { - new FtpCloudNioFileSystemProvider(ConfigFactory.empty, FtpAuthenticatedCredentials("test_user", "test_password", None), ftpFileSystems) - } + lazy val mockProvider = + new FtpCloudNioFileSystemProvider(ConfigFactory.empty, + FtpAuthenticatedCredentials("test_user", "test_password", None), + ftpFileSystems + ) } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioBackoff.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioBackoff.scala index e55031f5caa..eb0ae65df29 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioBackoff.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioBackoff.scala @@ -6,8 +6,10 @@ import com.google.api.client.util.ExponentialBackOff import scala.concurrent.duration.{Duration, FiniteDuration} trait CloudNioBackoff { + /** Next interval in millis */ def backoffMillis: Long + /** Get the next instance of backoff. This should be called after every call to backoffMillis */ def next: CloudNioBackoff } @@ -16,8 +18,8 @@ object CloudNioBackoff { private[spi] def newExponentialBackOff(initialInterval: FiniteDuration, maxInterval: FiniteDuration, multiplier: Double, - randomizationFactor: Double, - ): ExponentialBackOff = { + randomizationFactor: Double + ): ExponentialBackOff = new ExponentialBackOff.Builder() .setInitialIntervalMillis(initialInterval.toMillis.toInt) .setMaxIntervalMillis(maxInterval.toMillis.toInt) @@ -25,7 +27,6 @@ object CloudNioBackoff { .setRandomizationFactor(randomizationFactor) .setMaxElapsedTimeMillis(Int.MaxValue) .build() - } } object CloudNioInitialGapBackoff { @@ -33,24 +34,25 @@ object CloudNioInitialGapBackoff { initialInterval: FiniteDuration, maxInterval: FiniteDuration, multiplier: Double, - randomizationFactor: Double = ExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR, - ): CloudNioInitialGapBackoff = { + randomizationFactor: Double = ExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR + ): CloudNioInitialGapBackoff = new CloudNioInitialGapBackoff( initialGap, newExponentialBackOff( initialInterval = initialInterval, maxInterval = maxInterval, multiplier = multiplier, - randomizationFactor = randomizationFactor, + randomizationFactor = randomizationFactor ) ) - } } -case class CloudNioInitialGapBackoff(initialGapMillis: FiniteDuration, googleBackoff: ExponentialBackOff) extends CloudNioBackoff { +case class CloudNioInitialGapBackoff(initialGapMillis: FiniteDuration, googleBackoff: ExponentialBackOff) + extends CloudNioBackoff { assert(initialGapMillis.compareTo(Duration.Zero) != 0, "Initial gap cannot be null, use SimpleBackoff instead.") override val backoffMillis: Long = initialGapMillis.toMillis + /** Switch to a SimpleExponentialBackoff after the initial gap has been used */ override def next = new CloudNioSimpleExponentialBackoff(googleBackoff) } @@ -59,21 +61,21 @@ object CloudNioSimpleExponentialBackoff { def apply(initialInterval: FiniteDuration, maxInterval: FiniteDuration, multiplier: Double, - randomizationFactor: Double = ExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR, - ): CloudNioSimpleExponentialBackoff = { + randomizationFactor: Double = ExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR + ): CloudNioSimpleExponentialBackoff = new CloudNioSimpleExponentialBackoff( newExponentialBackOff( initialInterval = initialInterval, maxInterval = maxInterval, multiplier = multiplier, - randomizationFactor = randomizationFactor, + randomizationFactor = randomizationFactor ) ) - } } case class CloudNioSimpleExponentialBackoff(googleBackoff: ExponentialBackOff) extends CloudNioBackoff { override def backoffMillis: Long = googleBackoff.nextBackOffMillis() + /** google ExponentialBackOff is mutable so we can keep returning the same instance */ override def next: CloudNioBackoff = this } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala index 909c1fc54db..25d0f34e7dc 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala @@ -13,24 +13,19 @@ class CloudNioDirectoryStream( override def iterator(): java.util.Iterator[Path] = pathStream().filterNot(_ == prefix).iterator.asJava - private[this] def pathStream(markerOption: Option[String] = None): LazyList[Path] = { + private[this] def pathStream(markerOption: Option[String] = None): LazyList[Path] = listNext(markerOption) match { case CloudNioFileList(keys, Some(marker)) => keys.to(LazyList).map(toPath) ++ pathStream(Option(marker)) case CloudNioFileList(keys, None) => keys.to(LazyList).map(toPath) } - } - private[this] def toPath(key: String): Path = { + private[this] def toPath(key: String): Path = prefix.getFileSystem.getPath("/" + key) - } - private[this] def listNext(markerOption: Option[String]): CloudNioFileList = { - retry.from( - () => fileProvider.listObjects(prefix.cloudHost, prefix.cloudPath, markerOption) - ) - } + private[this] def listNext(markerOption: Option[String]): CloudNioFileList = + retry.from(() => fileProvider.listObjects(prefix.cloudHost, prefix.cloudPath, markerOption)) override def close(): Unit = {} diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileAttributeView.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileAttributeView.scala index 7cc34400676..852951784ac 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileAttributeView.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileAttributeView.scala @@ -11,21 +11,17 @@ final case class CloudNioFileAttributeView( ) extends BasicFileAttributeView { override def name(): String = CloudNioFileAttributeView.Name - override def readAttributes(): CloudNioFileAttributes = { + override def readAttributes(): CloudNioFileAttributes = if (isDirectory) { CloudNioDirectoryAttributes(cloudNioPath) } else { retry - .from( - () => fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - ) + .from(() => fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath)) .getOrElse(throw new FileNotFoundException(cloudNioPath.uriAsString)) } - } - override def setTimes(lastModifiedTime: FileTime, lastAccessTime: FileTime, createTime: FileTime): Unit = { + override def setTimes(lastModifiedTime: FileTime, lastAccessTime: FileTime, createTime: FileTime): Unit = throw new UnsupportedOperationException - } } object CloudNioFileAttributeView { diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala index 8b93419f350..43384330388 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala @@ -20,49 +20,40 @@ class CloudNioFileSystem(override val provider: CloudNioFileSystemProvider, val // do nothing currently. } - override def isOpen: Boolean = { + override def isOpen: Boolean = true - } - override def isReadOnly: Boolean = { + override def isReadOnly: Boolean = false - } - override def getSeparator: String = { + override def getSeparator: String = CloudNioFileSystem.Separator - } - override def getRootDirectories: java.lang.Iterable[Path] = { + override def getRootDirectories: java.lang.Iterable[Path] = Set[Path](getPath(UnixPath.Root)).asJava - } - override def getFileStores: java.lang.Iterable[FileStore] = { + override def getFileStores: java.lang.Iterable[FileStore] = Set.empty[FileStore].asJava - } - override def getPathMatcher(syntaxAndPattern: String): PathMatcher = { + override def getPathMatcher(syntaxAndPattern: String): PathMatcher = FileSystems.getDefault.getPathMatcher(syntaxAndPattern) - } - override def getUserPrincipalLookupService: UserPrincipalLookupService = { + override def getUserPrincipalLookupService: UserPrincipalLookupService = throw new UnsupportedOperationException - } - override def newWatchService(): WatchService = { + override def newWatchService(): WatchService = throw new UnsupportedOperationException - } - override def supportedFileAttributeViews(): java.util.Set[String] = { + override def supportedFileAttributeViews(): java.util.Set[String] = Set("basic", CloudNioFileAttributeView.Name).asJava - } def canEqual(other: Any): Boolean = other.isInstanceOf[CloudNioFileSystem] override def equals(other: Any): Boolean = other match { case that: CloudNioFileSystem => (that canEqual this) && - provider == that.provider && - host == that.host + provider == that.provider && + host == that.host case _ => false } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala index 9b79e308afd..c78d30d2dce 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala @@ -3,7 +3,7 @@ package cloud.nio.spi import java.net.URI import java.nio.channels.SeekableByteChannel import java.nio.file._ -import java.nio.file.attribute.{BasicFileAttributeView, BasicFileAttributes, FileAttribute, FileAttributeView} +import java.nio.file.attribute.{BasicFileAttributes, BasicFileAttributeView, FileAttribute, FileAttributeView} import java.nio.file.spi.FileSystemProvider import com.typesafe.config.{Config, ConfigFactory} @@ -64,9 +64,8 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { newCloudNioFileSystem(uri.toString, config) } - override def getPath(uri: URI): CloudNioPath = { + override def getPath(uri: URI): CloudNioPath = getFileSystem(uri).getPath(uri.getPath) - } override def newByteChannel( path: Path, @@ -75,7 +74,7 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { ): SeekableByteChannel = { val cloudNioPath = CloudNioPath.checkPath(path) - for (opt <- options.asScala) { + for (opt <- options.asScala) opt match { case StandardOpenOption.READ | StandardOpenOption.WRITE | StandardOpenOption.SPARSE | StandardOpenOption.TRUNCATE_EXISTING | StandardOpenOption.CREATE | StandardOpenOption.CREATE_NEW => @@ -84,7 +83,6 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { StandardOpenOption.SYNC => throw new UnsupportedOperationException(opt.toString) } - } if (options.contains(StandardOpenOption.READ) && options.contains(StandardOpenOption.WRITE)) { throw new UnsupportedOperationException("Cannot open a READ+WRITE channel") @@ -95,70 +93,64 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { } } - protected def cloudNioReadChannel(retry: CloudNioRetry, cloudNioPath: CloudNioPath): CloudNioReadChannel = new CloudNioReadChannel(fileProvider, retry, cloudNioPath) - protected def cloudNioWriteChannel(retry: CloudNioRetry, cloudNioPath: CloudNioPath): CloudNioWriteChannel = new CloudNioWriteChannel(fileProvider, retry, cloudNioPath) + protected def cloudNioReadChannel(retry: CloudNioRetry, cloudNioPath: CloudNioPath): CloudNioReadChannel = + new CloudNioReadChannel(fileProvider, retry, cloudNioPath) + protected def cloudNioWriteChannel(retry: CloudNioRetry, cloudNioPath: CloudNioPath): CloudNioWriteChannel = + new CloudNioWriteChannel(fileProvider, retry, cloudNioPath) - override def createDirectory(dir: Path, attrs: FileAttribute[_]*): Unit = retry.from(() => { + override def createDirectory(dir: Path, attrs: FileAttribute[_]*): Unit = retry.from { () => val cloudNioPath = CloudNioPath.checkPath(dir) fileProvider.createDirectory(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - }) + } override def deleteIfExists(path: Path): Boolean = { val cloudNioPath = CloudNioPath.checkPath(path) if (checkDirectoryExists(cloudNioPath)) { - val hasObjects = retry.from( - () => fileProvider.existsPaths(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - ) + val hasObjects = retry.from(() => fileProvider.existsPaths(cloudNioPath.cloudHost, cloudNioPath.cloudPath)) if (hasObjects) { throw new UnsupportedOperationException("Can not delete a non-empty directory") } else { true } } else { - retry.from( - () => fileProvider.deleteIfExists(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - ) + retry.from(() => fileProvider.deleteIfExists(cloudNioPath.cloudHost, cloudNioPath.cloudPath)) } } - override def delete(path: Path): Unit = { + override def delete(path: Path): Unit = if (!deleteIfExists(path)) { val cloudNioPath = CloudNioPath.checkPath(path) throw new NoSuchFileException(cloudNioPath.uriAsString) } - } override def copy(source: Path, target: Path, options: CopyOption*): Unit = { val sourceCloudNioPath = CloudNioPath.checkPath(source) val targetCloudNioPath = CloudNioPath.checkPath(target) if (sourceCloudNioPath != targetCloudNioPath) { - retry.from( - () => - fileProvider.copy( - sourceCloudNioPath.cloudHost, - sourceCloudNioPath.cloudPath, - targetCloudNioPath.cloudHost, - targetCloudNioPath.cloudPath - ) + retry.from(() => + fileProvider.copy( + sourceCloudNioPath.cloudHost, + sourceCloudNioPath.cloudPath, + targetCloudNioPath.cloudHost, + targetCloudNioPath.cloudPath + ) ) } } override def move(source: Path, target: Path, options: CopyOption*): Unit = { - for (option <- options) { + for (option <- options) if (option == StandardCopyOption.ATOMIC_MOVE) throw new AtomicMoveNotSupportedException(null, null, "Atomic move unsupported") - } copy(source, target, options: _*) delete(source) () } - override def isSameFile(path: Path, path2: Path): Boolean = { + override def isSameFile(path: Path, path2: Path): Boolean = CloudNioPath.checkPath(path).equals(CloudNioPath.checkPath(path2)) - } override def isHidden(path: Path): Boolean = { CloudNioPath.checkPath(path) @@ -174,8 +166,8 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { val cloudNioPath = CloudNioPath.checkPath(path) - val exists = checkDirectoryExists(cloudNioPath) || retry.from( - () => fileProvider.existsPath(cloudNioPath.cloudHost, cloudNioPath.cloudPath) + val exists = checkDirectoryExists(cloudNioPath) || retry.from(() => + fileProvider.existsPath(cloudNioPath.cloudHost, cloudNioPath.cloudPath) ) if (!exists) { @@ -183,12 +175,11 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { } } - def checkDirectoryExists(cloudNioPath: CloudNioPath): Boolean = { + def checkDirectoryExists(cloudNioPath: CloudNioPath): Boolean = // Anything that "seems" like a directory exists. Otherwise see if the path with a "/" contains files on the cloud. - (usePseudoDirectories && cloudNioPath.seemsLikeDirectory) || retry.from( - () => fileProvider.existsPaths(cloudNioPath.cloudHost, cloudNioPath.cloudPath + "/") + (usePseudoDirectories && cloudNioPath.seemsLikeDirectory) || retry.from(() => + fileProvider.existsPaths(cloudNioPath.cloudHost, cloudNioPath.cloudPath + "/") ) - } override def getFileAttributeView[V <: FileAttributeView]( path: Path, @@ -205,9 +196,8 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { CloudNioFileAttributeView(fileProvider, retry, cloudNioPath, isDirectory).asInstanceOf[V] } - override def readAttributes(path: Path, attributes: String, options: LinkOption*): java.util.Map[String, AnyRef] = { + override def readAttributes(path: Path, attributes: String, options: LinkOption*): java.util.Map[String, AnyRef] = throw new UnsupportedOperationException - } override def readAttributes[A <: BasicFileAttributes]( path: Path, @@ -224,9 +214,7 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { CloudNioDirectoryAttributes(cloudNioPath).asInstanceOf[A] } else { retry - .from( - () => fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - ) + .from(() => fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath)) .map(_.asInstanceOf[A]) .getOrElse(throw new NoSuchFileException(cloudNioPath.uriAsString)) } @@ -237,16 +225,15 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { new CloudNioDirectoryStream(fileProvider, retry, cloudNioPath, filter) } - override def setAttribute(path: Path, attribute: String, value: scala.Any, options: LinkOption*): Unit = { + override def setAttribute(path: Path, attribute: String, value: scala.Any, options: LinkOption*): Unit = throw new UnsupportedOperationException - } def canEqual(other: Any): Boolean = other.isInstanceOf[CloudNioFileSystemProvider] override def equals(other: Any): Boolean = other match { case that: CloudNioFileSystemProvider => (that canEqual this) && - config == that.config + config == that.config case _ => false } @@ -258,7 +245,6 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { object CloudNioFileSystemProvider { - def defaultConfig(scheme: String): Config = { + def defaultConfig(scheme: String): Config = ConfigFactory.load.getOrElse(s"cloud.nio.default.$scheme", ConfigFactory.empty) - } } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala index ec0c701a9b9..b6803670d39 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala @@ -9,12 +9,11 @@ import scala.jdk.CollectionConverters._ object CloudNioPath { - def checkPath(path: Path): CloudNioPath = { + def checkPath(path: Path): CloudNioPath = path match { case cloudNioPath: CloudNioPath => cloudNioPath - case _ => throw new ProviderMismatchException(s"Not a CloudNioPath: $path") + case _ => throw new ProviderMismatchException(s"Not a CloudNioPath: $path") } - } } class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: UnixPath) extends Path { @@ -65,13 +64,12 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un /** * If is relative, returns just the normalized path. If is absolute, return the host + the absolute path. */ - def relativeDependentPath: String = { + def relativeDependentPath: String = if (unixPath.isAbsolute) { cloudHost + "/" + unixPath.toString.stripPrefix("/") } else { unixPath.normalize().toString } - } /** * Returns true if the path probably represents a directory, but won't be known until contacting the host. @@ -84,30 +82,25 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un override def isAbsolute: Boolean = unixPath.isAbsolute - override def getRoot: CloudNioPath = { + override def getRoot: CloudNioPath = unixPath.getRoot.map(newPath).orNull - } - override def getFileName: CloudNioPath = { + override def getFileName: CloudNioPath = unixPath.getFileName.map(newPath).orNull - } - override def getParent: CloudNioPath = { + override def getParent: CloudNioPath = unixPath.getParent.map(newPath).orNull - } override def getNameCount: Int = unixPath.getNameCount - override def getName(index: Int): CloudNioPath = { + override def getName(index: Int): CloudNioPath = unixPath.getName(index).map(newPath).getOrElse(throw new IllegalArgumentException(s"Bad index $index")) - } - override def subpath(beginIndex: Int, endIndex: Int): CloudNioPath = { + override def subpath(beginIndex: Int, endIndex: Int): CloudNioPath = unixPath .subPath(beginIndex, endIndex) .map(newPath) .getOrElse(throw new IllegalArgumentException(s"Bad range $beginIndex-$endIndex")) - } override def startsWith(other: Path): Boolean = { if (!other.isInstanceOf[CloudNioPath]) { @@ -122,9 +115,8 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un unixPath.startsWith(that.unixPath) } - override def startsWith(other: String): Boolean = { + override def startsWith(other: String): Boolean = unixPath.startsWith(UnixPath.getPath(other)) - } override def endsWith(other: Path): Boolean = { if (!other.isInstanceOf[CloudNioPath]) { @@ -138,9 +130,8 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un unixPath.endsWith(that.unixPath) } - override def endsWith(other: String): Boolean = { + override def endsWith(other: String): Boolean = unixPath.endsWith(UnixPath.getPath(other)) - } override def normalize(): CloudNioPath = newPath(unixPath.normalize()) @@ -150,9 +141,8 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un newPath(unixPath.resolve(that.unixPath)) } - override def resolve(other: String): CloudNioPath = { + override def resolve(other: String): CloudNioPath = newPath(unixPath.resolve(UnixPath.getPath(other))) - } override def resolveSibling(other: Path): CloudNioPath = { val that = CloudNioPath.checkPath(other) @@ -160,9 +150,8 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un newPath(unixPath.resolveSibling(that.unixPath)) } - override def resolveSibling(other: String): CloudNioPath = { + override def resolveSibling(other: String): CloudNioPath = newPath(unixPath.resolveSibling(UnixPath.getPath(other))) - } override def relativize(other: Path): CloudNioPath = { val that = CloudNioPath.checkPath(other) @@ -170,9 +159,8 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un newPath(unixPath.relativize(that.unixPath)) } - override def toAbsolutePath: CloudNioPath = { + override def toAbsolutePath: CloudNioPath = newPath(unixPath.toAbsolutePath) - } override def toRealPath(options: LinkOption*): CloudNioPath = toAbsolutePath @@ -187,13 +175,12 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un modifiers: WatchEvent.Modifier* ): WatchKey = throw new UnsupportedOperationException - override def iterator(): java.util.Iterator[Path] = { + override def iterator(): java.util.Iterator[Path] = if (unixPath.isEmpty || unixPath.isRoot) { java.util.Collections.emptyIterator() } else { unixPath.split().to(LazyList).map(part => newPath(UnixPath.getPath(part)).asInstanceOf[Path]).iterator.asJava } - } override def compareTo(other: Path): Int = { if (other.isInstanceOf[CloudNioPath]) { @@ -209,22 +196,19 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un unixPath.compareTo(that.unixPath) } - override def equals(obj: scala.Any): Boolean = { + override def equals(obj: scala.Any): Boolean = (this eq obj.asInstanceOf[AnyRef]) || - obj.isInstanceOf[CloudNioPath] && - obj.asInstanceOf[CloudNioPath].cloudHost.equals(cloudHost) && - obj.asInstanceOf[CloudNioPath].unixPath.equals(unixPath) - } + obj.isInstanceOf[CloudNioPath] && + obj.asInstanceOf[CloudNioPath].cloudHost.equals(cloudHost) && + obj.asInstanceOf[CloudNioPath].unixPath.equals(unixPath) - override def hashCode(): Int = { + override def hashCode(): Int = Objects.hash(cloudHost, unixPath) - } - protected def newPath(unixPath: UnixPath): CloudNioPath = { + protected def newPath(unixPath: UnixPath): CloudNioPath = if (this.unixPath == unixPath) { this } else { new CloudNioPath(filesystem, unixPath) } - } } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioReadChannel.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioReadChannel.scala index 1e6020307fa..b8260b9a599 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioReadChannel.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioReadChannel.scala @@ -2,35 +2,28 @@ package cloud.nio.spi import java.io.FileNotFoundException import java.nio.ByteBuffer -import java.nio.channels.{ - ClosedChannelException, - NonWritableChannelException, - ReadableByteChannel, - SeekableByteChannel -} +import java.nio.channels.{ClosedChannelException, NonWritableChannelException, ReadableByteChannel, SeekableByteChannel} class CloudNioReadChannel(fileProvider: CloudNioFileProvider, retry: CloudNioRetry, cloudNioPath: CloudNioPath) - extends SeekableByteChannel { + extends SeekableByteChannel { private var internalPosition: Long = 0 private var channel: ReadableByteChannel = channelPosition(0) override def read(dst: ByteBuffer): Int = { var resetConnection = false - val count = retry.from( - () => { - try { - if (resetConnection) { - if (channel.isOpen) channel.close() - channel = fileProvider.read(cloudNioPath.cloudHost, cloudNioPath.cloudPath, internalPosition) - } - channel.read(dst) - } catch { - case exception: Exception => - resetConnection = true - throw exception + val count = retry.from { () => + try { + if (resetConnection) { + if (channel.isOpen) channel.close() + channel = fileProvider.read(cloudNioPath.cloudHost, cloudNioPath.cloudPath, internalPosition) } + channel.read(dst) + } catch { + case exception: Exception => + resetConnection = true + throw exception } - ) + } if (count > 0) internalPosition += count count @@ -50,25 +43,19 @@ class CloudNioReadChannel(fileProvider: CloudNioFileProvider, retry: CloudNioRet this } - private def channelPosition(newPosition: Long): ReadableByteChannel = { - retry.from( - () => fileProvider.read(cloudNioPath.cloudHost, cloudNioPath.cloudPath, newPosition) - ) - } + private def channelPosition(newPosition: Long): ReadableByteChannel = + retry.from(() => fileProvider.read(cloudNioPath.cloudHost, cloudNioPath.cloudPath, newPosition)) - override def size(): Long = { + override def size(): Long = retry - .from( - () => fileSize - ) + .from(() => fileSize) .getOrElse(throw new FileNotFoundException(cloudNioPath.uriAsString)) - } override def truncate(size: Long): SeekableByteChannel = throw new NonWritableChannelException override def isOpen: Boolean = channel.isOpen override def close(): Unit = channel.close() - + protected def fileSize = fileProvider.fileAttributes(cloudNioPath.cloudHost, cloudNioPath.cloudPath).map(_.size()) } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioRetry.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioRetry.scala index 9a4028ed859..40c8cba7377 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioRetry.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioRetry.scala @@ -24,7 +24,7 @@ class CloudNioRetry(config: Config) { val delay = backoff.backoffMillis f() match { - case Success(ret) => ret + case Success(ret) => ret case Failure(exception: Exception) if isFatal(exception) => throw exception case Failure(exception: Exception) if !isFatal(exception) => val retriesLeft = if (isTransient(exception)) maxRetries else maxRetries map { _ - 1 } @@ -38,11 +38,13 @@ class CloudNioRetry(config: Config) { } } - def from[A](f: () => A, maxRetries: Option[Int] = Option(defaultMaxRetries), backoff: CloudNioBackoff = defaultBackOff): A = { + def from[A](f: () => A, + maxRetries: Option[Int] = Option(defaultMaxRetries), + backoff: CloudNioBackoff = defaultBackOff + ): A = fromTry[A]( () => Try(f()), maxRetries, backoff ) - } } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioWriteChannel.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioWriteChannel.scala index 15a9d97213b..232cc25297b 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioWriteChannel.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioWriteChannel.scala @@ -6,11 +6,8 @@ import java.nio.channels._ class CloudNioWriteChannel(fileProvider: CloudNioFileProvider, retry: CloudNioRetry, cloudNioPath: CloudNioPath) extends SeekableByteChannel { private var internalPosition: Long = 0 - private val channel: WritableByteChannel = { - retry.from( - () => fileProvider.write(cloudNioPath.cloudHost, cloudNioPath.cloudPath) - ) - } + private val channel: WritableByteChannel = + retry.from(() => fileProvider.write(cloudNioPath.cloudHost, cloudNioPath.cloudPath)) override def read(dst: ByteBuffer): Int = throw new NonReadableChannelException diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala index fb3a2bf8f50..b7d9ed02035 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala @@ -24,7 +24,7 @@ private[spi] object UnixPath { private def hasTrailingSeparator(path: String): Boolean = !path.isEmpty && path.charAt(path.length - 1) == Separator - def getPath(path: String): UnixPath = { + def getPath(path: String): UnixPath = if (path.isEmpty) { EmptyPath } else if (isRoot(path)) { @@ -32,7 +32,6 @@ private[spi] object UnixPath { } else { UnixPath(path) } - } def getPath(first: String, more: String*): UnixPath = { if (more.isEmpty) { @@ -40,7 +39,7 @@ private[spi] object UnixPath { } val builder = new StringBuilder(first) - for ((part, index) <- more.view.zipWithIndex) { + for ((part, index) <- more.view.zipWithIndex) if (part.isEmpty) { // do nothing } else if (isAbsolute(part)) { @@ -55,7 +54,6 @@ private[spi] object UnixPath { builder.append(Separator) builder.append(part) } - } UnixPath(builder.toString) } @@ -73,14 +71,13 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { def hasTrailingSeparator: Boolean = UnixPath.hasTrailingSeparator(path) - def seemsLikeDirectory(): Boolean = { + def seemsLikeDirectory(): Boolean = path.isEmpty || - hasTrailingSeparator || - path.endsWith(".") && (length == 1 || path.charAt(length - 2) == UnixPath.Separator) || - path.endsWith("..") && (length == 2 || path.charAt(length - 3) == UnixPath.Separator) - } + hasTrailingSeparator || + path.endsWith(".") && (length == 1 || path.charAt(length - 2) == UnixPath.Separator) || + path.endsWith("..") && (length == 2 || path.charAt(length - 3) == UnixPath.Separator) - def getFileName: Option[UnixPath] = { + def getFileName: Option[UnixPath] = if (path.isEmpty || isRoot) { None } else { @@ -90,7 +87,6 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { Some(UnixPath(parts.last)) } } - } def getParent: Option[UnixPath] = { if (path.isEmpty || isRoot) { @@ -103,7 +99,7 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { else path.lastIndexOf(UnixPath.Separator.toInt) index match { - case -1 => if (isAbsolute) Some(UnixPath.RootPath) else None + case -1 => if (isAbsolute) Some(UnixPath.RootPath) else None case pos => Some(UnixPath(path.substring(0, pos + 1))) } } @@ -122,7 +118,7 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { Try(UnixPath(parts.slice(beginIndex, endIndex).mkString(UnixPath.Separator.toString))) } - def getNameCount: Int = { + def getNameCount: Int = if (path.isEmpty) { 1 } else if (isRoot) { @@ -130,7 +126,6 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { } else { parts.length } - } def getName(index: Int): Try[UnixPath] = { if (path.isEmpty) { @@ -144,7 +139,7 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { Success(UnixPath(parts(2))) } - def resolve(other: UnixPath): UnixPath = { + def resolve(other: UnixPath): UnixPath = if (other.path.isEmpty) { this } else if (other.isAbsolute) { @@ -154,15 +149,13 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { } else { new UnixPath(path + UnixPath.Separator.toString + other.path) } - } - def resolveSibling(other: UnixPath): UnixPath = { + def resolveSibling(other: UnixPath): UnixPath = getParent match { case Some(parent: UnixPath) => parent.resolve(other) case None => other } - } def relativize(other: UnixPath): UnixPath = { if (path.isEmpty) { @@ -247,21 +240,18 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { def splitReverse(): Iterator[String] = parts.reverseIterator - def removeBeginningSeparator(): UnixPath = { + def removeBeginningSeparator(): UnixPath = if (isAbsolute) new UnixPath(path.substring(1)) else this - } - def addTrailingSeparator(): UnixPath = { + def addTrailingSeparator(): UnixPath = if (hasTrailingSeparator) this else new UnixPath(path + UnixPath.Separator) - } - def removeTrailingSeparator(): UnixPath = { + def removeTrailingSeparator(): UnixPath = if (!isRoot && hasTrailingSeparator) { new UnixPath(path.substring(0, length - 1)) } else { this } - } def startsWith(other: UnixPath): Boolean = { val me = removeTrailingSeparator() @@ -279,11 +269,10 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { } def startsWith(left: Iterator[String], right: Iterator[String]): Boolean = { - while (right.hasNext) { + while (right.hasNext) if (!left.hasNext || right.next() != left.next()) { return false } - } true } @@ -310,9 +299,8 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { if (isAbsolute) Success(this) else Success(currentWorkingDirectory.resolve(this)) } - def toAbsolutePath: UnixPath = { + def toAbsolutePath: UnixPath = if (isAbsolute) this else UnixPath.RootPath.resolve(this) - } def compareTo(other: UnixPath): Int = { val me = parts.toList @@ -327,29 +315,24 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { } } - override def equals(obj: scala.Any): Boolean = { + override def equals(obj: scala.Any): Boolean = (this eq obj.asInstanceOf[AnyRef]) || { obj.isInstanceOf[UnixPath] && obj.asInstanceOf[UnixPath].path.equals(path) } - } - override def length(): Int = { + override def length(): Int = path.length - } - override def charAt(index: Int): Char = { + override def charAt(index: Int): Char = path.charAt(index) - } - override def subSequence(start: Int, end: Int): CharSequence = { + override def subSequence(start: Int, end: Int): CharSequence = path.subSequence(start, end) - } - override def toString: String = { + override def toString: String = path - } - def initParts(): Array[String] = { + def initParts(): Array[String] = if (path.isEmpty) { Array.empty[String] } else { @@ -359,5 +342,4 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { path.split(UnixPath.Separator) } } - } } diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/ChannelUtil.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/ChannelUtil.scala index 04c9d4cdd3f..bbd1cbaf4db 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/ChannelUtil.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/ChannelUtil.scala @@ -26,13 +26,12 @@ object ChannelUtil { def pipedStreamWriter(threadName: String)(consumer: InputStream => Unit): WritableByteChannel = { val pipe = Pipe.open() var threadResult: Option[Try[Unit]] = None - val runnable: Runnable = () => { + val runnable: Runnable = () => threadResult = Option( Try( consumer(Channels.newInputStream(pipe.source)) ) ) - } val thread = new Thread(runnable, threadName) thread.setDaemon(true) thread.start() diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala index 162d47d08db..e15cb574031 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala @@ -12,13 +12,12 @@ object CloudNioFiles { /** * Lists all files under a path. */ - def listRegularFiles(path: Path): Iterator[Path] = { + def listRegularFiles(path: Path): Iterator[Path] = Files .walk(path, Int.MaxValue) .iterator .asScala .filter(Files.isRegularFile(_)) - } /** * Returns an iterator of all regular files under sourcePath mapped relatively to targetPath. diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioPaths.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioPaths.scala index 7448b272426..66ab48100c6 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioPaths.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioPaths.scala @@ -21,15 +21,14 @@ object CloudNioPaths { * @see [[cloud.nio.util.CloudNioPaths#showAbsolute(java.nio.file.Path)]] * @see [[cloud.nio.spi.CloudNioPath#uriAsString()]] */ - def get(filePath: String): Path = { - try { + def get(filePath: String): Path = + try // TODO: softer parsing using Guava UrlEscapers. May also be better to list the providers ourselves if possible. Paths.get(new URI(filePath)) - } catch { - case _: URISyntaxException => Paths.get(filePath) + catch { + case _: URISyntaxException => Paths.get(filePath) case iae: IllegalArgumentException if iae.getMessage == "Missing scheme" => Paths.get(filePath) } - } /** * Return a path in a way reciprocal with [[cloud.nio.util.CloudNioPaths#get]]. @@ -38,12 +37,11 @@ object CloudNioPaths { * @see [[cloud.nio.util.CloudNioPaths#showRelative(java.nio.file.Path)]] * @see [[cloud.nio.spi.CloudNioPath#uriAsString()]] */ - def showAbsolute(path: Path): String = { + def showAbsolute(path: Path): String = path match { case cloudNioPath: CloudNioPath => cloudNioPath.uriAsString - case _ => path.toAbsolutePath.toString + case _ => path.toAbsolutePath.toString } - } /** * When the path is relative returns a relative path in a way reciprocal with resolve. @@ -53,11 +51,10 @@ object CloudNioPaths { * @see [[java.nio.file.Path#resolve(java.nio.file.Path)]] * @see [[cloud.nio.spi.CloudNioPath#uriAsString()]] */ - def showRelative(path: Path): String = { + def showRelative(path: Path): String = path match { case cloudNioPath: CloudNioPath => cloudNioPath.relativeDependentPath - case _ if !path.isAbsolute => path.normalize().toString - case _ => path.getRoot.relativize(path).normalize().toString + case _ if !path.isAbsolute => path.normalize().toString + case _ => path.getRoot.relativize(path).normalize().toString } - } } diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/IoUtil.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/IoUtil.scala index 0ff44ce2dd0..f8c7921d0d8 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/IoUtil.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/IoUtil.scala @@ -11,24 +11,21 @@ object IoUtil { type ACC = IO[Either[NonEmptyList[Exception], A]] - def attemptHead(headIo: IO[A]): ACC = { + def attemptHead(headIo: IO[A]): ACC = attemptIo(NonEmptyList.one)(headIo) - } - def attemptAcc(accIo: ACC, nextIo: IO[A]): ACC = { + def attemptAcc(accIo: ACC, nextIo: IO[A]): ACC = accIo flatMap { - case Right(previousSuccess) => IO.pure(Right(previousSuccess)) + case Right(previousSuccess) => IO.pure(Right(previousSuccess)) case Left(previousExceptions) => attemptIo(_ :: previousExceptions)(nextIo) } - } - def attemptIo(f: Exception => NonEmptyList[Exception])(io: IO[A]): ACC = { + def attemptIo(f: Exception => NonEmptyList[Exception])(io: IO[A]): ACC = io.attempt flatMap { - case Right(success) => IO.pure(Right(success)) + case Right(success) => IO.pure(Right(success)) case Left(exception: Exception) => IO.pure(Left(f(exception))) - case Left(throwable) => throw throwable + case Left(throwable) => throw throwable } - } val res: ACC = tries.tail.foldLeft(attemptHead(tries.head))(attemptAcc) diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/TryWithResource.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/TryWithResource.scala index 326a1821d84..f43ad24cb1f 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/TryWithResource.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/TryWithResource.scala @@ -20,17 +20,17 @@ object TryWithResource { case x: Throwable => t = Option(x) throw x - } finally { + } finally resource foreach { r => - try { + try r.close() - } catch { - case y: Throwable => t match { - case Some(_t) => _t.addSuppressed(y) - case None => throw y - } + catch { + case y: Throwable => + t match { + case Some(_t) => _t.addSuppressed(y) + case None => throw y + } } } - } } } diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala index 019dbe81caf..c11651879fa 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala @@ -35,19 +35,17 @@ object VersionUtil { * @param default What to return when the version cannot be found. The parameter passed is the `projectName`. * @return The version from the conf or the default */ - def getVersion(projectName: String, default: String => String = defaultMessage): String = { + def getVersion(projectName: String, default: String => String = defaultMessage): String = ConfigFactory .load(versionConf(projectName)) .as[Option[String]](versionProperty(projectName)) .getOrElse(default(projectName)) - } /** * Instead of returning a version, states that the version conf will be generated by sbt. */ - def defaultMessage(projectName: String): String = { + def defaultMessage(projectName: String): String = s"${versionConf(projectName)}-to-be-generated-by-sbt" - } /** * A regex compatible with the dependency constants in project/Dependencies.scala. @@ -62,7 +60,7 @@ object VersionUtil { * @return The dependency version from project/Dependencies.scala * @throws RuntimeException If the dependency cannot be found */ - def sbtDependencyVersion(dependencyName: String)(projectName: String): String = { + def sbtDependencyVersion(dependencyName: String)(projectName: String): String = try { val dependencies = Paths.get("project/Dependencies.scala").toAbsolutePath val lines = Files.readAllLines(dependencies).asScala @@ -79,6 +77,5 @@ object VersionUtil { e ) } - } } diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/AwsConfiguration.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/AwsConfiguration.scala index 1aca98c9efd..df2cef13bde 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/AwsConfiguration.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/AwsConfiguration.scala @@ -46,16 +46,16 @@ import software.amazon.awssdk.regions.Region final case class AwsConfiguration private (applicationName: String, authsByName: Map[String, AwsAuthMode], - strRegion: Option[String]) { + strRegion: Option[String] +) { - def auth(name: String): ErrorOr[AwsAuthMode] = { + def auth(name: String): ErrorOr[AwsAuthMode] = authsByName.get(name) match { case None => val knownAuthNames = authsByName.keys.mkString(", ") s"`aws` configuration stanza does not contain an auth named '$name'. Known auth names: $knownAuthNames".invalidNel case Some(a) => a.validNel } - } def region: Option[Region] = strRegion.map(Region.of) } @@ -77,7 +77,7 @@ object AwsConfiguration { val awsConfig = config.getConfig("aws") - val appName = validate { awsConfig.as[String]("application-name") } + val appName = validate(awsConfig.as[String]("application-name")) val region: Option[String] = awsConfig.getAs[String]("region") @@ -88,13 +88,16 @@ object AwsConfiguration { (authConfig.getAs[String]("access-key"), authConfig.getAs[String]("secret-key")) match { case (Some(accessKey), Some(secretKey)) => CustomKeyMode(name, accessKey, secretKey, region) - case _ => throw new ConfigException.Generic(s"""Access key and/or secret """ + - s"""key missing for service account "$name". See reference.conf under the aws.auth, """ + - s"""custom key section for details of required configuration.""") + case _ => + throw new ConfigException.Generic( + s"""Access key and/or secret """ + + s"""key missing for service account "$name". See reference.conf under the aws.auth, """ + + s"""custom key section for details of required configuration.""" + ) } } - def defaultAuth(authConfig: Config, name: String, region: Option[String]): ErrorOr[AwsAuthMode] = validate { + def defaultAuth(authConfig: Config, name: String, region: Option[String]): ErrorOr[AwsAuthMode] = validate { DefaultMode(name, region) } diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/auth/AwsAuthMode.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/auth/AwsAuthMode.scala index b8c351c4185..7335441ea80 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/auth/AwsAuthMode.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/auth/AwsAuthMode.scala @@ -52,9 +52,8 @@ sealed trait AwsAuthMode { /** * Validate the auth mode against provided options */ - def validate(options: OptionLookup): Unit = { + def validate(options: OptionLookup): Unit = () - } /** * The name of the auth mode @@ -72,27 +71,30 @@ sealed trait AwsAuthMode { * All traits in this file are sealed, all classes final, meaning things * like Mockito or other java/scala overrides cannot work. */ - private[auth] var credentialValidation: (AwsCredentialsProvider, Option[String]) => Unit = - (provider: AwsCredentialsProvider, region: Option[String]) => { - val builder = StsClient.builder - - //If the region argument exists in config, set it in the builder. - //Otherwise it is left unset and the AwsCredentialsProvider will be responsible for sourcing a region - region.map(Region.of).foreach(builder.region) - - // make an essentially no-op call just to assure ourselves the credentials from our provider are valid - builder.credentialsProvider(provider) - .build - .getCallerIdentity(GetCallerIdentityRequest.builder.build) - () - } - - protected def validateCredential(provider: AwsCredentialsProvider, region: Option[String]) = { + private[auth] var credentialValidation: (AwsCredentialsProvider, Option[String]) => Unit = + (provider: AwsCredentialsProvider, region: Option[String]) => { + val builder = StsClient.builder + + // If the region argument exists in config, set it in the builder. + // Otherwise it is left unset and the AwsCredentialsProvider will be responsible for sourcing a region + region.map(Region.of).foreach(builder.region) + + // make an essentially no-op call just to assure ourselves the credentials from our provider are valid + builder + .credentialsProvider(provider) + .build + .getCallerIdentity(GetCallerIdentityRequest.builder.build) + () + } + + protected def validateCredential(provider: AwsCredentialsProvider, region: Option[String]) = Try(credentialValidation(provider, region)) match { - case Failure(ex) => throw new RuntimeException(s"Credentials produced by the AWS provider ${name} are invalid: ${ex.getMessage}", ex) + case Failure(ex) => + throw new RuntimeException(s"Credentials produced by the AWS provider ${name} are invalid: ${ex.getMessage}", + ex + ) case Success(_) => provider } - } } /** @@ -114,11 +116,8 @@ object CustomKeyMode * @param secretKey static AWS secret key * @param region an optional AWS region */ -final case class CustomKeyMode(override val name: String, - accessKey: String, - secretKey: String, - region: Option[String] - ) extends AwsAuthMode { +final case class CustomKeyMode(override val name: String, accessKey: String, secretKey: String, region: Option[String]) + extends AwsAuthMode { private lazy val _provider: AwsCredentialsProvider = { // make a provider locked to the given access and secret val p = StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKey, secretKey)) @@ -159,17 +158,17 @@ final case class DefaultMode(override val name: String, region: Option[String]) * @param region an optional AWS region */ final case class AssumeRoleMode(override val name: String, - baseAuthName: String, - roleArn: String, - externalId: String, - region: Option[String] - ) extends AwsAuthMode { + baseAuthName: String, + roleArn: String, + externalId: String, + region: Option[String] +) extends AwsAuthMode { private lazy val _provider: AwsCredentialsProvider = { // we need to perform operations on STS using the credentials provided from the baseAuthName val stsBuilder = StsClient.builder region.foreach(str => stsBuilder.region(Region.of(str))) - baseAuthObj match{ + baseAuthObj match { case Some(auth) => stsBuilder.credentialsProvider(auth.provider()) case _ => throw new RuntimeException(s"Base auth configuration required for assume role") } @@ -179,7 +178,7 @@ final case class AssumeRoleMode(override val name: String, .roleArn(roleArn) .durationSeconds(3600) .roleSessionName("cromwell") - if (! externalId.isEmpty) assumeRoleBuilder.externalId(externalId) + if (!externalId.isEmpty) assumeRoleBuilder.externalId(externalId) // this provider is one that will handle refreshing the assume-role creds when needed val p = StsAssumeRoleCredentialsProvider.builder @@ -195,23 +194,21 @@ final case class AssumeRoleMode(override val name: String, // start a background thread to perform the refresh override def provider(): AwsCredentialsProvider = _provider - private var baseAuthObj : Option[AwsAuthMode] = None + private var baseAuthObj: Option[AwsAuthMode] = None - def assign(baseAuth: AwsAuthMode) : Unit = { + def assign(baseAuth: AwsAuthMode): Unit = baseAuthObj match { case None => baseAuthObj = Some(baseAuth) case _ => throw new RuntimeException(s"Base auth object has already been assigned") } - } // We want to allow our tests access to the value // of the baseAuthObj - def baseAuthentication() : AwsAuthMode = { + def baseAuthentication(): AwsAuthMode = baseAuthObj match { case Some(o) => o case _ => throw new RuntimeException(s"Base auth object has not been set") } - } } class OptionLookupException(val key: String, cause: Throwable) extends RuntimeException(key, cause) diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala index 8ab07f37064..8caf8aa3fb1 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala @@ -60,13 +60,13 @@ object S3Storage { builder.build } - def s3Client(provider: AwsCredentialsProvider, region: Option[Region]): S3Client = { + def s3Client(provider: AwsCredentialsProvider, region: Option[Region]): S3Client = s3Client(s3Configuration(), provider, region) - } def s3Configuration(accelerateModeEnabled: Boolean = false, dualstackEnabled: Boolean = false, - pathStyleAccessEnabled: Boolean = false): S3Configuration = { + pathStyleAccessEnabled: Boolean = false + ): S3Configuration = { @nowarn("msg=method dualstackEnabled in trait Builder is deprecated") val builder = S3Configuration.builder diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureCredentials.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureCredentials.scala index 200b162c614..d3d66e1bafc 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureCredentials.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureCredentials.scala @@ -34,9 +34,11 @@ case object AzureCredentials { .authorityHost(azureProfile.getEnvironment.getActiveDirectoryEndpoint) def getAccessToken(identityClientId: Option[String] = None): ErrorOr[String] = { - val credentials = identityClientId.foldLeft(defaultCredentialBuilder) { - (builder, clientId) => builder.managedIdentityClientId(clientId) - }.build() + val credentials = identityClientId + .foldLeft(defaultCredentialBuilder) { (builder, clientId) => + builder.managedIdentityClientId(clientId) + } + .build() Try( credentials diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureUtils.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureUtils.scala index 09cf5f3869d..dd379ed3564 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureUtils.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/azure/AzureUtils.scala @@ -14,6 +14,7 @@ import scala.jdk.CollectionConverters.IterableHasAsScala import scala.util.{Failure, Success, Try} object AzureUtils { + /** * Generates a BlobContainerClient that can interact with the specified container. Authenticates using the local azure client running on the same machine. * @param blobContainer Name of the blob container. Looks something like "my-blob-container". @@ -21,10 +22,16 @@ object AzureUtils { * @param subscription Azure subscription. A globally unique identifier. If not provided, a default subscription will be used. * @return A blob container client capable of interacting with the specified container. */ - def buildContainerClientFromLocalEnvironment(blobContainer: String, azureEndpoint: String, subscription : Option[String]): Try[BlobContainerClient] = { + def buildContainerClientFromLocalEnvironment(blobContainer: String, + azureEndpoint: String, + subscription: Option[String] + ): Try[BlobContainerClient] = { def parseURI(string: String): Try[URI] = Try(URI.create(UrlEscapers.urlFragmentEscaper().escape(string))) - def parseStorageAccount(uri: URI): Try[String] = uri.getHost.split("\\.").find(_.nonEmpty) - .map(Success(_)).getOrElse(Failure(new Exception("Could not parse storage account"))) + def parseStorageAccount(uri: URI): Try[String] = uri.getHost + .split("\\.") + .find(_.nonEmpty) + .map(Success(_)) + .getOrElse(Failure(new Exception("Could not parse storage account"))) val azureProfile = new AzureProfile(AzureEnvironment.AZURE) @@ -32,29 +39,37 @@ object AzureUtils { .authorityHost(azureProfile.getEnvironment.getActiveDirectoryEndpoint) .build - def authenticateWithSubscription(sub: String) = AzureResourceManager.authenticate(azureCredentialBuilder, azureProfile).withSubscription(sub) + def authenticateWithSubscription(sub: String) = + AzureResourceManager.authenticate(azureCredentialBuilder, azureProfile).withSubscription(sub) - def authenticateWithDefaultSubscription = AzureResourceManager.authenticate(azureCredentialBuilder, azureProfile).withDefaultSubscription() + def authenticateWithDefaultSubscription = + AzureResourceManager.authenticate(azureCredentialBuilder, azureProfile).withDefaultSubscription() def azure = subscription.map(authenticateWithSubscription(_)).getOrElse(authenticateWithDefaultSubscription) - def findAzureStorageAccount(storageAccountName: String) = azure.storageAccounts.list.asScala.find(_.name.equals(storageAccountName)) - .map(Success(_)).getOrElse(Failure(new Exception("Azure Storage Account not found."))) + def findAzureStorageAccount(storageAccountName: String) = azure.storageAccounts.list.asScala + .find(_.name.equals(storageAccountName)) + .map(Success(_)) + .getOrElse(Failure(new Exception("Azure Storage Account not found."))) - def buildBlobContainerClient(credential: StorageSharedKeyCredential, endpointURL: String, blobContainerName: String): BlobContainerClient = { + def buildBlobContainerClient(credential: StorageSharedKeyCredential, + endpointURL: String, + blobContainerName: String + ): BlobContainerClient = new BlobContainerClientBuilder() .credential(credential) .endpoint(endpointURL) .containerName(blobContainerName) .buildClient() - } def generateBlobContainerClient: Try[BlobContainerClient] = for { uri <- parseURI(azureEndpoint) configuredAccount <- parseStorageAccount(uri) azureAccount <- findAzureStorageAccount(configuredAccount) keys = azureAccount.getKeys.asScala - key <- keys.headOption.fold[Try[StorageAccountKey]](Failure(new Exception("Storage account has no keys")))(Success(_)) + key <- keys.headOption.fold[Try[StorageAccountKey]](Failure(new Exception("Storage account has no keys")))( + Success(_) + ) first = key.value sskc = new StorageSharedKeyCredential(configuredAccount, first) bcc = buildBlobContainerClient(sskc, azureEndpoint, blobContainer) diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/GoogleConfiguration.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/GoogleConfiguration.scala index 2b4a183c121..999a9c2867a 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/GoogleConfiguration.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/GoogleConfiguration.scala @@ -18,14 +18,13 @@ import org.slf4j.LoggerFactory final case class GoogleConfiguration private (applicationName: String, authsByName: Map[String, GoogleAuthMode]) { - def auth(name: String): ErrorOr[GoogleAuthMode] = { + def auth(name: String): ErrorOr[GoogleAuthMode] = authsByName.get(name) match { case None => val knownAuthNames = authsByName.keys.mkString(", ") s"`google` configuration stanza does not contain an auth named '$name'. Known auth names: $knownAuthNames".invalidNel case Some(a) => a.validNel } - } } object GoogleConfiguration { @@ -37,7 +36,8 @@ object GoogleConfiguration { def withCustomTimeouts(httpRequestInitializer: HttpRequestInitializer, connectionTimeout: FiniteDuration = DefaultConnectionTimeout, - readTimeout: FiniteDuration = DefaultReadTimeout): HttpRequestInitializer = { + readTimeout: FiniteDuration = DefaultReadTimeout + ): HttpRequestInitializer = new HttpRequestInitializer() { @throws[IOException] override def initialize(httpRequest: HttpRequest): Unit = { @@ -47,7 +47,6 @@ object GoogleConfiguration { () } } - } private val log = LoggerFactory.getLogger("GoogleConfiguration") @@ -59,20 +58,27 @@ object GoogleConfiguration { val googleConfig = config.getConfig("google") - val appName = validate { googleConfig.as[String]("application-name") } + val appName = validate(googleConfig.as[String]("application-name")) def buildAuth(authConfig: Config): ErrorOr[GoogleAuthMode] = { def serviceAccountAuth(authConfig: Config, name: String): ErrorOr[GoogleAuthMode] = validate { (authConfig.getAs[String]("pem-file"), authConfig.getAs[String]("json-file")) match { - case (Some(pem), None) => ServiceAccountMode(name, PemFileFormat(authConfig.as[String]("service-account-id"), pem)) + case (Some(pem), None) => + ServiceAccountMode(name, PemFileFormat(authConfig.as[String]("service-account-id"), pem)) case (None, Some(json)) => ServiceAccountMode(name, JsonFileFormat(json)) - case (None, None) => throw new ConfigException.Generic(s"""No credential configuration was found for service account "$name". See reference.conf under the google.auth, service-account section for supported credential formats.""") - case (Some(_), Some(_)) => throw new ConfigException.Generic(s"""Both a pem file and a json file were supplied for service account "$name" in the configuration file. Only one credential file can be supplied for the same service account. Please choose between the two.""") + case (None, None) => + throw new ConfigException.Generic( + s"""No credential configuration was found for service account "$name". See reference.conf under the google.auth, service-account section for supported credential formats.""" + ) + case (Some(_), Some(_)) => + throw new ConfigException.Generic( + s"""Both a pem file and a json file were supplied for service account "$name" in the configuration file. Only one credential file can be supplied for the same service account. Please choose between the two.""" + ) } } - def userAccountAuth(authConfig: Config, name: String): ErrorOr[GoogleAuthMode] = validate { + def userAccountAuth(authConfig: Config, name: String): ErrorOr[GoogleAuthMode] = validate { UserMode(name, authConfig.as[String]("secrets-file")) } diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala index e850b53807a..52118303262 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala @@ -34,9 +34,8 @@ object GoogleAuthMode { type CredentialsValidation = Credentials => Unit private[auth] val NoCredentialsValidation = mouse.ignore _ - private def noOptionLookup(string: String): Nothing = { + private def noOptionLookup(string: String): Nothing = throw new UnsupportedOperationException(s"cannot lookup $string") - } lazy val jsonFactory: GsonFactory = GsonFactory.getDefaultInstance lazy val httpTransport: HttpTransport = GoogleNetHttpTransport.newTrustedTransport @@ -46,33 +45,29 @@ object GoogleAuthMode { val DockerCredentialsEncryptionKeyNameKey = "docker_credentials_key_name" val DockerCredentialsTokenKey = "docker_credentials_token" - def checkReadable(file: File): Unit = { + def checkReadable(file: File): Unit = if (!file.isReadable) throw new FileNotFoundException(s"File $file does not exist or is not readable") - } - def isFatal(ex: Throwable): Boolean = { + def isFatal(ex: Throwable): Boolean = ex match { case http: HttpResponseException => // Using HttpURLConnection fields as com.google.api.client.http.HttpStatusCodes doesn't have Bad Request (400) http.getStatusCode == HTTP_UNAUTHORIZED || - http.getStatusCode == HTTP_FORBIDDEN || - http.getStatusCode == HTTP_BAD_REQUEST + http.getStatusCode == HTTP_FORBIDDEN || + http.getStatusCode == HTTP_BAD_REQUEST case _: OptionLookupException => true case _ => false } - } - def extract(options: OptionLookup, key: String): String = { + def extract(options: OptionLookup, key: String): String = Try(options(key)) match { case Success(result) => result case Failure(throwable) => throw new OptionLookupException(key, throwable) } - } /** Used for both checking that the credential is valid and creating a fresh credential. */ - private def refreshCredentials(credentials: Credentials): Unit = { + private def refreshCredentials(credentials: Credentials): Unit = credentials.refresh() - } } sealed trait GoogleAuthMode extends LazyLogging { @@ -87,25 +82,22 @@ sealed trait GoogleAuthMode extends LazyLogging { * Alias for credentials(GoogleAuthMode.NoOptionLookup, scopes). * Only valid for credentials that are NOT externally provided, such as ApplicationDefault. */ - def credentials(scopes: Iterable[String]): OAuth2Credentials = { + def credentials(scopes: Iterable[String]): OAuth2Credentials = credentials(GoogleAuthMode.NoOptionLookup, scopes) - } /** * Alias for credentials(GoogleAuthMode.NoOptionLookup, Nil). * Only valid for credentials that are NOT externally provided and do not need scopes, such as ApplicationDefault. */ - private[auth] def credentials(): OAuth2Credentials = { + private[auth] def credentials(): OAuth2Credentials = credentials(GoogleAuthMode.NoOptionLookup, Nil) - } /** * Alias for credentials(options, Nil). * Only valid for credentials that are NOT externally provided and do not need scopes, such as ApplicationDefault. */ - private[auth] def credentials(options: OptionLookup): OAuth2Credentials = { + private[auth] def credentials(options: OptionLookup): OAuth2Credentials = credentials(options, Nil) - } /** * Enables swapping out credential validation for various testing purposes ONLY. @@ -116,7 +108,8 @@ sealed trait GoogleAuthMode extends LazyLogging { private[auth] var credentialsValidation: CredentialsValidation = refreshCredentials protected def validateCredentials[A <: GoogleCredentials](credential: A, - scopes: Iterable[String]): GoogleCredentials = { + scopes: Iterable[String] + ): GoogleCredentials = { val scopedCredentials = credential.createScoped(scopes.asJavaCollection) Try(credentialsValidation(scopedCredentials)) match { case Failure(ex) => throw new RuntimeException(s"Google credentials are invalid: ${ex.getMessage}", ex) @@ -126,9 +119,8 @@ sealed trait GoogleAuthMode extends LazyLogging { } case class MockAuthMode(override val name: String) extends GoogleAuthMode { - override def credentials(unusedOptions: OptionLookup, unusedScopes: Iterable[String]): NoCredentials = { + override def credentials(unusedOptions: OptionLookup, unusedScopes: Iterable[String]): NoCredentials = NoCredentials.getInstance - } } object ServiceAccountMode { @@ -143,35 +135,29 @@ object ServiceAccountMode { } -final case class ServiceAccountMode(override val name: String, - fileFormat: CredentialFileFormat) - extends GoogleAuthMode { +final case class ServiceAccountMode(override val name: String, fileFormat: CredentialFileFormat) + extends GoogleAuthMode { private val credentialsFile = File(fileFormat.file) checkReadable(credentialsFile) - private lazy val serviceAccountCredentials: ServiceAccountCredentials = { + private lazy val serviceAccountCredentials: ServiceAccountCredentials = fileFormat match { case PemFileFormat(accountId, _) => logger.warn("The PEM file format will be deprecated in the upcoming Cromwell version. Please use JSON instead.") ServiceAccountCredentials.fromPkcs8(accountId, accountId, credentialsFile.contentAsString, null, null) case _: JsonFileFormat => ServiceAccountCredentials.fromStream(credentialsFile.newInputStream) } - } - override def credentials(unusedOptions: OptionLookup, - scopes: Iterable[String]): GoogleCredentials = { + override def credentials(unusedOptions: OptionLookup, scopes: Iterable[String]): GoogleCredentials = validateCredentials(serviceAccountCredentials, scopes) - } } final case class UserServiceAccountMode(override val name: String) extends GoogleAuthMode { - private def extractServiceAccount(options: OptionLookup): String = { + private def extractServiceAccount(options: OptionLookup): String = extract(options, UserServiceAccountKey) - } - private def credentialStream(options: OptionLookup): InputStream = { + private def credentialStream(options: OptionLookup): InputStream = new ByteArrayInputStream(extractServiceAccount(options).getBytes(StandardCharsets.UTF_8)) - } override def credentials(options: OptionLookup, scopes: Iterable[String]): GoogleCredentials = { val newCredentials = ServiceAccountCredentials.fromStream(credentialStream(options)) @@ -179,7 +165,6 @@ final case class UserServiceAccountMode(override val name: String) extends Googl } } - final case class UserMode(override val name: String, secretsPath: String) extends GoogleAuthMode { private lazy val secretsStream = { @@ -190,9 +175,8 @@ final case class UserMode(override val name: String, secretsPath: String) extend private lazy val userCredentials: UserCredentials = UserCredentials.fromStream(secretsStream) - override def credentials(unusedOptions: OptionLookup, scopes: Iterable[String]): GoogleCredentials = { + override def credentials(unusedOptions: OptionLookup, scopes: Iterable[String]): GoogleCredentials = validateCredentials(userCredentials, scopes) - } } object ApplicationDefaultMode { @@ -200,10 +184,8 @@ object ApplicationDefaultMode { } final case class ApplicationDefaultMode(name: String) extends GoogleAuthMode { - override def credentials(unusedOptions: OptionLookup, - scopes: Iterable[String]): GoogleCredentials = { + override def credentials(unusedOptions: OptionLookup, scopes: Iterable[String]): GoogleCredentials = validateCredentials(applicationDefaultCredentials, scopes) - } } sealed trait ClientSecrets { diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/gcs/GcsStorage.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/gcs/GcsStorage.scala index 29ed842377d..6ae6e88542c 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/gcs/GcsStorage.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/gcs/GcsStorage.scala @@ -18,9 +18,11 @@ object GcsStorage { val HttpTransport = GoogleNetHttpTransport.newTrustedTransport val DefaultCloudStorageConfiguration = { - val UploadBufferBytes = ConfigFactory.load().as[Option[Int]]("google.upload-buffer-bytes").getOrElse(MediaHttpUploader.MINIMUM_CHUNK_SIZE) + val UploadBufferBytes = + ConfigFactory.load().as[Option[Int]]("google.upload-buffer-bytes").getOrElse(MediaHttpUploader.MINIMUM_CHUNK_SIZE) - CloudStorageConfiguration.builder() + CloudStorageConfiguration + .builder() .blockSize(UploadBufferBytes) .permitEmptyPathComponents(true) .stripPrefixSlash(true) @@ -28,23 +30,24 @@ object GcsStorage { .build() } - def gcsStorage(applicationName: String, - storageOptions: StorageOptions): Storage = { - new Storage.Builder(HttpTransport, + def gcsStorage(applicationName: String, storageOptions: StorageOptions): Storage = + new Storage.Builder( + HttpTransport, JsonFactory, - GoogleConfiguration.withCustomTimeouts(TransportOptions.getHttpRequestInitializer(storageOptions))) + GoogleConfiguration.withCustomTimeouts(TransportOptions.getHttpRequestInitializer(storageOptions)) + ) .setApplicationName(applicationName) .build() - } - def gcsStorage(applicationName: String, credentials: Credentials, retrySettings: RetrySettings): Storage = { + def gcsStorage(applicationName: String, credentials: Credentials, retrySettings: RetrySettings): Storage = gcsStorage(applicationName, gcsStorageOptions(credentials, retrySettings)) - } def gcsStorageOptions(credentials: Credentials, retrySettings: RetrySettings, - project: Option[String] = None): StorageOptions = { - val storageOptionsBuilder = StorageOptions.newBuilder() + project: Option[String] = None + ): StorageOptions = { + val storageOptionsBuilder = StorageOptions + .newBuilder() .setTransportOptions(TransportOptions) .setCredentials(credentials) diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/http/GoogleHttpTransportOptions.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/http/GoogleHttpTransportOptions.scala index 13faef01e0c..f328756cf67 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/http/GoogleHttpTransportOptions.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/http/GoogleHttpTransportOptions.scala @@ -4,7 +4,8 @@ import com.google.cloud.http.HttpTransportOptions import scala.concurrent.duration._ object GoogleHttpTransportOptions { - val TransportOptions = HttpTransportOptions.newBuilder() + val TransportOptions = HttpTransportOptions + .newBuilder() .setReadTimeout(3.minutes.toMillis.toInt) .build() } diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala index 6ead310c925..27c66305466 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala @@ -39,7 +39,6 @@ import cromwell.cloudsupport.aws.auth.{AssumeRoleMode, CustomKeyMode, DefaultMod import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class AwsConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "AwsConfiguration" @@ -47,38 +46,38 @@ class AwsConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "parse all manner of well-formed auths" in { val righteousAwsConfig = s""" - |aws { - | application-name = "cromwell" - | - | auths = [ - | { - | name = "default" - | scheme = "default" - | }, - | { - | name = "custom-keys" - | scheme = "custom_keys" - | access-key = "access_key_id" - | secret-key = "secret_key" - | }, - | { - | name = "assume-role-based-on-another-with-external" - | scheme = "assume_role" - | base-auth = "default" - | role-arn = "my-role-arn" - | external-id = "my-external-id" - | }, - | { - | name = "assume-role-based-on-another" - | scheme = "assume_role" - | base-auth = "default" - | role-arn = "my-role-arn" - | } - | ] - | - | region = "region" - |} - | + |aws { + | application-name = "cromwell" + | + | auths = [ + | { + | name = "default" + | scheme = "default" + | }, + | { + | name = "custom-keys" + | scheme = "custom_keys" + | access-key = "access_key_id" + | secret-key = "secret_key" + | }, + | { + | name = "assume-role-based-on-another-with-external" + | scheme = "assume_role" + | base-auth = "default" + | role-arn = "my-role-arn" + | external-id = "my-external-id" + | }, + | { + | name = "assume-role-based-on-another" + | scheme = "assume_role" + | base-auth = "default" + | role-arn = "my-role-arn" + | } + | ] + | + | region = "region" + |} + | """.stripMargin val conf = AwsConfiguration(ConfigFactory.parseString(righteousAwsConfig)) @@ -146,8 +145,8 @@ class AwsConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val conf = AwsConfiguration(ConfigFactory.parseString(config)) conf.auth("name-botched") should be( - "`aws` configuration stanza does not contain an auth named 'name-botched'. Known auth names: name-default" - .invalidNel) + "`aws` configuration stanza does not contain an auth named 'name-botched'. Known auth names: name-default".invalidNel + ) } it should "not parse a configuration stanza without applicationName" in { diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/s3/S3StorageSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/s3/S3StorageSpec.scala index 5311714e89c..e50b24d928f 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/s3/S3StorageSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/s3/S3StorageSpec.scala @@ -36,7 +36,6 @@ import org.scalatest.matchers.should.Matchers import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider import software.amazon.awssdk.regions.Region - class S3StorageSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "S3Storage" diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/GoogleConfigurationSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/GoogleConfigurationSpec.scala index 95a2380034a..7ed9162a6d1 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/GoogleConfigurationSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/GoogleConfigurationSpec.scala @@ -14,7 +14,6 @@ import cromwell.cloudsupport.gcp.auth._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class GoogleConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "GoogleConfiguration" @@ -25,39 +24,39 @@ class GoogleConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with val righteousGoogleConfig = s""" - |google { - | application-name = "cromwell" - | - | auths = [ - | { - | name = "name-default" - | scheme = "application_default" - | }, - | { - | name = "name-user" - | scheme = "user_account" - | user = "me" - | secrets-file = "${pemMockFile.pathAsString}" - | data-store-dir = "/where/the/data/at" - | }, - | { - | name = "name-pem-service" - | scheme = "service_account" - | service-account-id = "my-google-account" - | pem-file = "${pemMockFile.pathAsString}" - | }, - | { - | name = "name-json-service" - | scheme = "service_account" - | json-file = "${jsonMockFile.pathAsString}" - | }, - | { - | name = "name-user-service-account" - | scheme = "user_service_account" - | } - | ] - |} - | + |google { + | application-name = "cromwell" + | + | auths = [ + | { + | name = "name-default" + | scheme = "application_default" + | }, + | { + | name = "name-user" + | scheme = "user_account" + | user = "me" + | secrets-file = "${pemMockFile.pathAsString}" + | data-store-dir = "/where/the/data/at" + | }, + | { + | name = "name-pem-service" + | scheme = "service_account" + | service-account-id = "my-google-account" + | pem-file = "${pemMockFile.pathAsString}" + | }, + | { + | name = "name-json-service" + | scheme = "service_account" + | json-file = "${jsonMockFile.pathAsString}" + | }, + | { + | name = "name-user-service-account" + | scheme = "user_service_account" + | } + | ] + |} + | """.stripMargin val gconf = GoogleConfiguration(ConfigFactory.parseString(righteousGoogleConfig)) @@ -125,16 +124,16 @@ class GoogleConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with val googleConfiguration = GoogleConfiguration(ConfigFactory.parseString(config)) googleConfiguration.auth("name-botched") should be( - "`google` configuration stanza does not contain an auth named 'name-botched'. Known auth names: name-default" - .invalidNel) + "`google` configuration stanza does not contain an auth named 'name-botched'. Known auth names: name-default".invalidNel + ) } it should "create an initializer with custom timeouts" in { val transport = new MockHttpTransport() - val initializer = GoogleConfiguration.withCustomTimeouts(request => { + val initializer = GoogleConfiguration.withCustomTimeouts { request => request.getHeaders.set("custom_init", "ok") () - }) + } val factory = transport.createRequestFactory(initializer) val request = factory.buildGetRequest(new GenericUrl(new URL("http://example.com"))) request.getConnectTimeout should be(180000) diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ApplicationDefaultModeSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ApplicationDefaultModeSpec.scala index bb661ce9742..dde1f82a3a5 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ApplicationDefaultModeSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ApplicationDefaultModeSpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class ApplicationDefaultModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ApplicationDefaultMode" diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthModeSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthModeSpec.scala index 368ce5d2472..bf77f65850d 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthModeSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthModeSpec.scala @@ -10,14 +10,12 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.util.{Failure, Try} - class GoogleAuthModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { behavior of "GoogleAuthMode" - private def mockHttpResponseException(statusCode: Int): HttpResponseException = { + private def mockHttpResponseException(statusCode: Int): HttpResponseException = new HttpResponseException.Builder(statusCode, "mock message", new HttpHeaders).build() - } private val testedExceptions = Table( ("description", "exception", "isFatal"), @@ -55,14 +53,13 @@ object GoogleAuthModeSpec extends ServiceAccountTestSupport { () } - lazy val userCredentialsContents: String = { + lazy val userCredentialsContents: String = toJson( "type" -> "authorized_user", "client_id" -> "the_id", "client_secret" -> "the_secret", "refresh_token" -> "the_token" ) - } lazy val refreshTokenOptions: OptionLookup = Map("refresh_token" -> "the_refresh_token") lazy val userServiceAccountOptions: OptionLookup = Map("user_service_account_json" -> serviceAccountJsonContents) diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/MockAuthModeSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/MockAuthModeSpec.scala index 591da3354a7..b0c89e368c5 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/MockAuthModeSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/MockAuthModeSpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class MockAuthModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "MockAuthMode" diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountModeSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountModeSpec.scala index 94831b1410b..b10a823a6ca 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountModeSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountModeSpec.scala @@ -30,7 +30,7 @@ class ServiceAccountModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with M .write(serviceAccountPemContents) val serviceAccountMode = ServiceAccountMode( "service-account", - ServiceAccountMode.PemFileFormat("the_account_id", pemMockFile.pathAsString), + ServiceAccountMode.PemFileFormat("the_account_id", pemMockFile.pathAsString) ) val exception = intercept[RuntimeException](serviceAccountMode.credentials()) exception.getMessage should startWith("Google credentials are invalid: ") @@ -53,7 +53,7 @@ class ServiceAccountModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with M val exception = intercept[FileNotFoundException] { ServiceAccountMode( "service-account", - ServiceAccountMode.PemFileFormat("the_account_id", pemMockFile.pathAsString), + ServiceAccountMode.PemFileFormat("the_account_id", pemMockFile.pathAsString) ) } exception.getMessage should fullyMatch regex "File .*/service-account..*.pem does not exist or is not readable" @@ -79,7 +79,7 @@ class ServiceAccountModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with M .write(serviceAccountPemContents) val serviceAccountMode = ServiceAccountMode( "service-account", - ServiceAccountMode.PemFileFormat("the_account_id", pemMockFile.pathAsString), + ServiceAccountMode.PemFileFormat("the_account_id", pemMockFile.pathAsString) ) serviceAccountMode.credentialsValidation = GoogleAuthMode.NoCredentialsValidation val credentials = serviceAccountMode.credentials() diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountTestSupport.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountTestSupport.scala index 1624674b875..a87df58e21d 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountTestSupport.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/ServiceAccountTestSupport.scala @@ -23,7 +23,7 @@ trait ServiceAccountTestSupport { // Hide me from git secrets false positives private val theStringThatShallNotBeNamed = List("private", "key").mkString("_") - def serviceAccountJsonContents: String = { + def serviceAccountJsonContents: String = toJson( "type" -> "service_account", "client_id" -> "the_account_id", @@ -31,7 +31,6 @@ trait ServiceAccountTestSupport { theStringThatShallNotBeNamed -> serviceAccountPemContents, s"${theStringThatShallNotBeNamed}_id" -> "the_key_id" ) - } def toJson(contents: (String, String)*): String = { // Generator doesn't matter as long as it generates JSON. Using `jsonFactory` to get an extra line hit of coverage. @@ -40,10 +39,9 @@ trait ServiceAccountTestSupport { val generator = factory.createJsonGenerator(writer) generator.enablePrettyPrint() generator.writeStartObject() - contents foreach { - case (key, value) => - generator.writeFieldName(key) - generator.writeString(value) + contents foreach { case (key, value) => + generator.writeFieldName(key) + generator.writeString(value) } generator.writeEndObject() generator.close() diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserModeSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserModeSpec.scala index cfc22b0ca9b..95a64ee1e6c 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserModeSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserModeSpec.scala @@ -7,7 +7,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class UserModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "UserMode" diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserServiceAccountModeSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserServiceAccountModeSpec.scala index 70a1f470d32..092072f7511 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserServiceAccountModeSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/auth/UserServiceAccountModeSpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class UserServiceAccountModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "UserServiceAccountMode" diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/gcs/GcsStorageSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/gcs/GcsStorageSpec.scala index 558c9b74c4a..1f673acec67 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/gcs/GcsStorageSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/gcp/gcs/GcsStorageSpec.scala @@ -6,7 +6,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class GcsStorageSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "GcsStorage" @@ -19,7 +18,8 @@ class GcsStorageSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } it should "build gcs storage" in { - val configuration = GcsStorage.gcsStorage("gcs-storage-spec", NoCredentials.getInstance(), RetrySettings.newBuilder().build()) + val configuration = + GcsStorage.gcsStorage("gcs-storage-spec", NoCredentials.getInstance(), RetrySettings.newBuilder().build()) configuration.getApplicationName should be("gcs-storage-spec") } diff --git a/codegen_java/project/Artifactory.scala b/codegen_java/project/Artifactory.scala index a13c9cc8c21..6d385ffbbc7 100644 --- a/codegen_java/project/Artifactory.scala +++ b/codegen_java/project/Artifactory.scala @@ -1,4 +1,4 @@ object Artifactory { val artifactoryHost = "broadinstitute.jfrog.io" val artifactory = s"https://$artifactoryHost/broadinstitute/" -} \ No newline at end of file +} diff --git a/codegen_java/project/Publishing.scala b/codegen_java/project/Publishing.scala index 6c3de9881fc..880cf40f1ff 100644 --- a/codegen_java/project/Publishing.scala +++ b/codegen_java/project/Publishing.scala @@ -2,10 +2,10 @@ import sbt.Keys._ import sbt._ import Artifactory._ - object Publishing { +object Publishing { private val buildTimestamp = System.currentTimeMillis() / 1000 - private def artifactoryResolver(isSnapshot: Boolean): Resolver = { + private def artifactoryResolver(isSnapshot: Boolean): Resolver = { val repoType = if (isSnapshot) "snapshot" else "release" val repoUrl = s"${artifactory}libs-$repoType-local;build.timestamp=$buildTimestamp" @@ -13,15 +13,15 @@ import Artifactory._ repoName at repoUrl } - private val artifactoryCredentials: Credentials = { + private val artifactoryCredentials: Credentials = { val username = sys.env.getOrElse("ARTIFACTORY_USERNAME", "") val password = sys.env.getOrElse("ARTIFACTORY_PASSWORD", "") Credentials("Artifactory Realm", artifactoryHost, username, password) } - val publishSettings: Seq[Setting[_]] = - //we only publish to libs-release-local because of a bug in sbt that makes snapshots take - //priority over the local package cache. see here: https://github.com/sbt/sbt/issues/2687#issuecomment-236586241 + val publishSettings: Seq[Setting[_]] = + // we only publish to libs-release-local because of a bug in sbt that makes snapshots take + // priority over the local package cache. see here: https://github.com/sbt/sbt/issues/2687#issuecomment-236586241 Seq( publishTo := Option(artifactoryResolver(false)), Compile / publishArtifact := true, @@ -29,9 +29,9 @@ import Artifactory._ credentials += artifactoryCredentials ) - val noPublishSettings: Seq[Setting[_]] = + val noPublishSettings: Seq[Setting[_]] = Seq( publish := {}, publishLocal := {} ) -} \ No newline at end of file +} diff --git a/codegen_java/project/Version.scala b/codegen_java/project/Version.scala index 875a80e2e10..be7c57bc527 100644 --- a/codegen_java/project/Version.scala +++ b/codegen_java/project/Version.scala @@ -1,20 +1,20 @@ import scala.sys.process._ - object Version { +object Version { - def createVersion(baseVersion: String) = { - def getLastCommitFromGit = { s"""git rev-parse --short HEAD""" !! } + def createVersion(baseVersion: String) = { + def getLastCommitFromGit = s"""git rev-parse --short HEAD""" !! - // either specify git hash as an env var or derive it + // either specify git hash as an env var or derive it // if building from the broadinstitute/scala-baseimage docker image use env var // (scala-baseimage doesn't have git in it) - val lastCommit = sys.env.getOrElse("GIT_HASH", getLastCommitFromGit ).trim() + val lastCommit = sys.env.getOrElse("GIT_HASH", getLastCommitFromGit).trim() val version = baseVersion + "-" + lastCommit - // The project isSnapshot string passed in via command line settings, if desired. + // The project isSnapshot string passed in via command line settings, if desired. val isSnapshot = sys.props.getOrElse("project.isSnapshot", "true").toBoolean - // For now, obfuscate SNAPSHOTs from sbt's developers: https://github.com/sbt/sbt/issues/2687#issuecomment-236586241 + // For now, obfuscate SNAPSHOTs from sbt's developers: https://github.com/sbt/sbt/issues/2687#issuecomment-236586241 if (isSnapshot) s"$version-SNAP" else version } -} \ No newline at end of file +} diff --git a/common/src/main/scala/common/collections/EnhancedCollections.scala b/common/src/main/scala/common/collections/EnhancedCollections.scala index beede1f9510..e1eb983029c 100644 --- a/common/src/main/scala/common/collections/EnhancedCollections.scala +++ b/common/src/main/scala/common/collections/EnhancedCollections.scala @@ -15,7 +15,10 @@ object EnhancedCollections { * After trying and failing to do this myself, I got this to work by copying the answer from here: * https://stackoverflow.com/questions/29886246/scala-filter-by-type */ - implicit class EnhancedIterableOps[T2, Repr[x] <: IterableOps[x, Repr, Repr[x]]](val iterableOps: IterableOps[T2, Repr, Repr[T2]]) extends AnyVal { + implicit class EnhancedIterableOps[T2, Repr[x] <: IterableOps[x, Repr, Repr[x]]]( + val iterableOps: IterableOps[T2, Repr, Repr[T2]] + ) extends AnyVal { + /** * Lets you filter a collection by type. * @@ -58,27 +61,29 @@ object EnhancedCollections { def takeWhileWeighted[W](maxWeight: W, weightFunction: A => W, maxHeadLength: Option[Int], - strict: Boolean = false) - (implicit n: Numeric[W], c: Ordering[W]): DeQueued[A] = { + strict: Boolean = false + )(implicit n: Numeric[W], c: Ordering[W]): DeQueued[A] = { import n._ @tailrec - def takeWhileWeightedRec(tail: Queue[A], head: Vector[A], weight: W): (Vector[A], Queue[A]) = { + def takeWhileWeightedRec(tail: Queue[A], head: Vector[A], weight: W): (Vector[A], Queue[A]) = // Stay under maxHeadLength if it's specified if (maxHeadLength.exists(head.size >= _)) head -> tail - else tail.dequeueOption - .map({ - // Compute the dequeued element's weight - case (element, dequeued) => (element, weightFunction(element), dequeued) - }) match { - // If the element's weight is > maxWeight and strict is true, drop the element - case Some((_, elementWeight, dequeued)) if c.gteq(elementWeight, maxWeight) && strict => takeWhileWeightedRec(dequeued, head, weight) - // If we're under the max weight, add the element to the head and recurse - case Some((element, elementWeight, dequeued)) if c.lteq(elementWeight + weight, maxWeight) => takeWhileWeightedRec(dequeued, head :+ element, weight + elementWeight) - // Otherwise stop here (make sure to return the original queue so we don't lose the last dequeued element) - case _ => head -> tail - } - } + else + tail.dequeueOption + .map { + // Compute the dequeued element's weight + case (element, dequeued) => (element, weightFunction(element), dequeued) + } match { + // If the element's weight is > maxWeight and strict is true, drop the element + case Some((_, elementWeight, dequeued)) if c.gteq(elementWeight, maxWeight) && strict => + takeWhileWeightedRec(dequeued, head, weight) + // If we're under the max weight, add the element to the head and recurse + case Some((element, elementWeight, dequeued)) if c.lteq(elementWeight + weight, maxWeight) => + takeWhileWeightedRec(dequeued, head :+ element, weight + elementWeight) + // Otherwise stop here (make sure to return the original queue so we don't lose the last dequeued element) + case _ => head -> tail + } if (queue.isEmpty || maxHeadLength.contains(0)) DeQueued(Vector.empty, queue) // If strict is enabled, we should never return a head with a weight > maxWeight. So start from the original queue and drop elements over maxWeight if necessary @@ -88,13 +93,17 @@ object EnhancedCollections { } // Otherwise to ensure we don't deadlock, start the recursion with the head of the queue, this way even if it's over maxWeight it'll return a single element head else { - val (head, tail) = takeWhileWeightedRec(queue.tail, queue.headOption.toVector, queue.headOption.map(weightFunction).getOrElse(n.zero)) + val (head, tail) = takeWhileWeightedRec(queue.tail, + queue.headOption.toVector, + queue.headOption.map(weightFunction).getOrElse(n.zero) + ) DeQueued(head, tail) } } } implicit class EnhancedMapLike[A, +B, +This <: Map[A, B]](val mapLike: Map[A, B]) { + /** * 'safe' in that unlike the implementation hiding behind `MapLike#mapValues` this is strict. i.e. this will only * evaluate the supplied function once on each value and at the time this method is called. @@ -104,17 +113,15 @@ object EnhancedCollections { /** * Based on scalaz's intersectWith, applies `f` to values of keys found in this `mapLike` and map */ - def intersectWith[C, D](map: Map[A, C])(f: (B, C) => D): Map[A, D] = { + def intersectWith[C, D](map: Map[A, C])(f: (B, C) => D): Map[A, D] = mapLike collect { case (mapLikeKey, mapLikeValue) if map.contains(mapLikeKey) => mapLikeKey -> f(mapLikeValue, map(mapLikeKey)) } - } } implicit class EnhancedNonEmptyList[A](val nel: NonEmptyList[A]) extends AnyVal { - def foreach(f: A => Unit): Unit = { + def foreach(f: A => Unit): Unit = nel.toList foreach f - } } } diff --git a/common/src/main/scala/common/collections/Table.scala b/common/src/main/scala/common/collections/Table.scala index 57c1b3f4a57..3f8b262bfb1 100644 --- a/common/src/main/scala/common/collections/Table.scala +++ b/common/src/main/scala/common/collections/Table.scala @@ -3,6 +3,7 @@ package common.collections import scala.collection.immutable object Table { + /** * Instantiates an empty table */ @@ -22,7 +23,7 @@ object Table { * @tparam V type of the value */ case class Table[R, C, V](table: Map[R, Map[C, V]]) { - + /** * Returns true if the table contains a value at row / column */ @@ -51,18 +52,16 @@ case class Table[R, C, V](table: Map[R, Map[C, V]]) { /** * Add a value at row / column */ - def add(row: R, column: C, value: V): Table[R, C, V] = { + def add(row: R, column: C, value: V): Table[R, C, V] = this.copy( table = table.updated(row, table.getOrElse(row, Map.empty).updated(column, value)) ) - } /** * Add all values */ - def addAll(values: Iterable[(R, C, V)]): Table[R, C, V] = { + def addAll(values: Iterable[(R, C, V)]): Table[R, C, V] = values.foldLeft(this)(_.addTriplet(_)) - } /** * Add a value as a triplet diff --git a/common/src/main/scala/common/collections/WeightedQueue.scala b/common/src/main/scala/common/collections/WeightedQueue.scala index 1ba5d869a64..fecb857db9b 100644 --- a/common/src/main/scala/common/collections/WeightedQueue.scala +++ b/common/src/main/scala/common/collections/WeightedQueue.scala @@ -4,9 +4,8 @@ import common.collections.EnhancedCollections._ import scala.collection.immutable.Queue object WeightedQueue { - def empty[T, W](weightFunction: T => W)(implicit n: Numeric[W]) = { + def empty[T, W](weightFunction: T => W)(implicit n: Numeric[W]) = WeightedQueue(Queue.empty[T], weightFunction, n.zero) - } } /** @@ -14,27 +13,24 @@ object WeightedQueue { * In addition to the queue, a weight function is provided that provides the weight W of an element T. * The total weight of the queue is accessible, as well as a method to take the head of the queue based on a max weight value. */ -final case class WeightedQueue[T, W](innerQueue: Queue[T], - private val weightFunction: T => W, - weight: W)(implicit n: Numeric[W]) { +final case class WeightedQueue[T, W](innerQueue: Queue[T], private val weightFunction: T => W, weight: W)(implicit + n: Numeric[W] +) { import n._ - def enqueue(element: T): WeightedQueue[T, W] = { + def enqueue(element: T): WeightedQueue[T, W] = this.copy(innerQueue = innerQueue.enqueue(element), weight = weight + weightFunction(element)) - } def dequeue: (T, WeightedQueue[T, W]) = { val (element, tail) = innerQueue.dequeue element -> this.copy(innerQueue = tail, weight = weight - weightFunction(element)) } - def dequeueOption: Option[(T, WeightedQueue[T, W])] = { - innerQueue.dequeueOption map { - case (element, tail) => - element -> this.copy(innerQueue = tail, weight = weight - weightFunction(element)) + def dequeueOption: Option[(T, WeightedQueue[T, W])] = + innerQueue.dequeueOption map { case (element, tail) => + element -> this.copy(innerQueue = tail, weight = weight - weightFunction(element)) } - } - + def behead(maxWeight: W, maxLength: Option[Int] = None, strict: Boolean = false): (Vector[T], WeightedQueue[T, W]) = { val DeQueued(head, tail) = innerQueue.takeWhileWeighted(maxWeight, weightFunction, maxLength, strict) head -> this.copy(innerQueue = tail, weight = weight - head.map(weightFunction).sum) diff --git a/common/src/main/scala/common/exception/ExceptionAggregation.scala b/common/src/main/scala/common/exception/ExceptionAggregation.scala index bd7f030331c..94059e94f50 100644 --- a/common/src/main/scala/common/exception/ExceptionAggregation.scala +++ b/common/src/main/scala/common/exception/ExceptionAggregation.scala @@ -8,12 +8,11 @@ import common.exception.Aggregation._ import scala.annotation.tailrec object Aggregation { - def formatMessageWithList(message: String, list: Iterable[String]) = { + def formatMessageWithList(message: String, list: Iterable[String]) = if (list.nonEmpty) { val messages = s"\n${list.mkString("\n")}" s"$message:$messages" } else message - } def flattenThrowable(throwable: Throwable) = { @tailrec @@ -57,12 +56,12 @@ trait ThrowableAggregation extends MessageAggregation { override def errorMessages = throwables map buildMessage private def buildMessage(t: Throwable): String = t match { - // The message for file not found exception only contains the file name, so add the actual reason + // The message for file not found exception only contains the file name, so add the actual reason case _: FileNotFoundException | _: NoSuchFileException => s"File not found ${t.getMessage}" case aggregation: ThrowableAggregation => formatMessageWithList(aggregation.exceptionContext, aggregation.throwables.map(buildMessage).map("\t" + _)) case other => - val cause = Option(other.getCause) map { c => s"\n\t${buildMessage(c)}" } getOrElse "" + val cause = Option(other.getCause) map { c => s"\n\t${buildMessage(c)}" } getOrElse "" s"${other.getMessage}$cause" } } @@ -70,6 +69,14 @@ trait ThrowableAggregation extends MessageAggregation { /** * Generic convenience case class for aggregated exceptions. */ -case class AggregatedException(exceptionContext: String, throwables: Iterable[Throwable]) extends Exception with ThrowableAggregation -case class AggregatedMessageException(exceptionContext: String, errorMessages: Iterable[String]) extends Exception with MessageAggregation -case class CompositeException(exceptionContext: String, throwables: Iterable[Throwable], override val errorMessages: Iterable[String]) extends Exception with ThrowableAggregation +case class AggregatedException(exceptionContext: String, throwables: Iterable[Throwable]) + extends Exception + with ThrowableAggregation +case class AggregatedMessageException(exceptionContext: String, errorMessages: Iterable[String]) + extends Exception + with MessageAggregation +case class CompositeException(exceptionContext: String, + throwables: Iterable[Throwable], + override val errorMessages: Iterable[String] +) extends Exception + with ThrowableAggregation diff --git a/common/src/main/scala/common/exception/package.scala b/common/src/main/scala/common/exception/package.scala index dccd49f2643..48d3dd43fb3 100644 --- a/common/src/main/scala/common/exception/package.scala +++ b/common/src/main/scala/common/exception/package.scala @@ -4,7 +4,6 @@ import cats.effect.IO package object exception { - def toIO[A](option: Option[A], errorMsg: String): IO[A] = { + def toIO[A](option: Option[A], errorMsg: String): IO[A] = IO.fromEither(option.toRight(new RuntimeException(errorMsg))) - } } diff --git a/common/src/main/scala/common/numeric/IntegerUtil.scala b/common/src/main/scala/common/numeric/IntegerUtil.scala index 4c33c5ace64..e24e478d400 100644 --- a/common/src/main/scala/common/numeric/IntegerUtil.scala +++ b/common/src/main/scala/common/numeric/IntegerUtil.scala @@ -2,14 +2,13 @@ package common.numeric object IntegerUtil { - private def ordinal(int: Int): String = { + private def ordinal(int: Int): String = int match { case 1 => "st" case 2 => "nd" case 3 => "rd" case _ => "th" } - } implicit class IntEnhanced(val value: Int) extends AnyVal { def toOrdinal: String = value match { @@ -19,9 +18,8 @@ object IntegerUtil { s"$v$suffix" } - def isBetweenInclusive(min: Int, max: Int): Boolean = { + def isBetweenInclusive(min: Int, max: Int): Boolean = min <= value && value <= max - } } } diff --git a/common/src/main/scala/common/transforms/package.scala b/common/src/main/scala/common/transforms/package.scala index 028f426d54a..112fb22d682 100644 --- a/common/src/main/scala/common/transforms/package.scala +++ b/common/src/main/scala/common/transforms/package.scala @@ -12,30 +12,42 @@ package object transforms { object CheckedAtoB { def apply[A, B](implicit runner: CheckedAtoB[A, B]): CheckedAtoB[A, B] = runner def fromCheck[A, B](run: A => Checked[B]): CheckedAtoB[A, B] = Kleisli(run) - def fromCheck[A, B](context: String)(run: A => Checked[B]): CheckedAtoB[A, B] = Kleisli(runCheckWithContext(run, _ => context)) - def fromCheck[A, B](context: A => String)(run: A => Checked[B]): CheckedAtoB[A, B] = Kleisli(runCheckWithContext(run, context)) + def fromCheck[A, B](context: String)(run: A => Checked[B]): CheckedAtoB[A, B] = Kleisli( + runCheckWithContext(run, _ => context) + ) + def fromCheck[A, B](context: A => String)(run: A => Checked[B]): CheckedAtoB[A, B] = Kleisli( + runCheckWithContext(run, context) + ) def fromErrorOr[A, B](run: A => ErrorOr[B]): CheckedAtoB[A, B] = Kleisli(runThenCheck(run)) - def fromErrorOr[A, B](context: String)(run: A => ErrorOr[B]): CheckedAtoB[A, B] = Kleisli(runErrorOrWithContext(run, _ => context)) - def fromErrorOr[A, B](context: A => String)(run: A => ErrorOr[B]): CheckedAtoB[A, B] = Kleisli(runErrorOrWithContext(run, context)) - private def runThenCheck[A, B](run: A => ErrorOr[B]): A => Checked[B] = (a: A) => { run(a).toEither } - private def runErrorOrWithContext[A, B](run: A => ErrorOr[B], context: A => String): A => Checked[B] = (a: A) => { run(a).toEither.contextualizeErrors(context(a)) } - private def runCheckWithContext[A, B](run: A => Checked[B], context: A => String): A => Checked[B] = (a: A) => { run(a).contextualizeErrors(context(a)) } + def fromErrorOr[A, B](context: String)(run: A => ErrorOr[B]): CheckedAtoB[A, B] = Kleisli( + runErrorOrWithContext(run, _ => context) + ) + def fromErrorOr[A, B](context: A => String)(run: A => ErrorOr[B]): CheckedAtoB[A, B] = Kleisli( + runErrorOrWithContext(run, context) + ) + private def runThenCheck[A, B](run: A => ErrorOr[B]): A => Checked[B] = (a: A) => run(a).toEither + private def runErrorOrWithContext[A, B](run: A => ErrorOr[B], context: A => String): A => Checked[B] = (a: A) => + run(a).toEither.contextualizeErrors(context(a)) + private def runCheckWithContext[A, B](run: A => Checked[B], context: A => String): A => Checked[B] = (a: A) => + run(a).contextualizeErrors(context(a)) - def firstSuccess[A, B](options: List[CheckedAtoB[A, B]], operationName: String): CheckedAtoB[A, B] = Kleisli[Checked, A, B] { a => - if (options.isEmpty) { - s"Unable to $operationName: No import resolvers provided".invalidNelCheck - } else { - val firstAttempt = options.head.run(a) - options.tail.foldLeft[Checked[B]](firstAttempt) { (currentResult, nextOption) => - currentResult match { - case v: Right[_, _] => v - case Left(currentErrors) => nextOption.run(a) match { + def firstSuccess[A, B](options: List[CheckedAtoB[A, B]], operationName: String): CheckedAtoB[A, B] = + Kleisli[Checked, A, B] { a => + if (options.isEmpty) { + s"Unable to $operationName: No import resolvers provided".invalidNelCheck + } else { + val firstAttempt = options.head.run(a) + options.tail.foldLeft[Checked[B]](firstAttempt) { (currentResult, nextOption) => + currentResult match { case v: Right[_, _] => v - case Left(newErrors) => Left(currentErrors ++ newErrors.toList) + case Left(currentErrors) => + nextOption.run(a) match { + case v: Right[_, _] => v + case Left(newErrors) => Left(currentErrors ++ newErrors.toList) + } } } } } - } } } diff --git a/common/src/main/scala/common/util/Backoff.scala b/common/src/main/scala/common/util/Backoff.scala index f748785eb10..747508083aa 100644 --- a/common/src/main/scala/common/util/Backoff.scala +++ b/common/src/main/scala/common/util/Backoff.scala @@ -3,8 +3,10 @@ package common.util import scala.concurrent.duration.FiniteDuration trait Backoff { + /** Next interval in millis */ def backoffMillis: Long + /** Get the next instance of backoff. This should be called after every call to backoffMillis */ def next: Backoff } diff --git a/common/src/main/scala/common/util/IORetry.scala b/common/src/main/scala/common/util/IORetry.scala index a8a7a63a209..db983fa7711 100644 --- a/common/src/main/scala/common/util/IORetry.scala +++ b/common/src/main/scala/common/util/IORetry.scala @@ -7,7 +7,7 @@ import scala.util.control.NonFatal object IORetry { def noOpOnRetry[S]: (Throwable, S) => S = (_, s) => s - + object StatefulIoError { def noop[S] = new StatefulIoError[S] { override def toThrowable(state: S, throwable: Throwable) = throwable @@ -35,8 +35,8 @@ object IORetry { backoff: Backoff, isRetryable: Throwable => Boolean = throwableToTrue, isInfinitelyRetryable: Throwable => Boolean = throwableToFalse, - onRetry: (Throwable, S) => S = noOpOnRetry[S]) - (implicit timer: Timer[IO], statefulIoException: StatefulIoError[S]): IO[A] = { + onRetry: (Throwable, S) => S = noOpOnRetry[S] + )(implicit timer: Timer[IO], statefulIoException: StatefulIoError[S]): IO[A] = { lazy val delay = backoff.backoffMillis.millis def fail(throwable: Throwable) = IO.raiseError(statefulIoException.toThrowable(state, throwable)) @@ -49,10 +49,16 @@ object IORetry { if (retriesLeft.forall(_ > 0)) { for { _ <- IO.sleep(delay) - retried <- withRetry(io, onRetry(throwable, state), retriesLeft, backoff.next, isRetryable, isInfinitelyRetryable, onRetry) + retried <- withRetry(io, + onRetry(throwable, state), + retriesLeft, + backoff.next, + isRetryable, + isInfinitelyRetryable, + onRetry + ) } yield retried - } - else fail(throwable) + } else fail(throwable) case fatal => throw fatal } diff --git a/common/src/main/scala/common/util/IntrospectableLazy.scala b/common/src/main/scala/common/util/IntrospectableLazy.scala index e578f49c1a1..7fe05dd1f4c 100644 --- a/common/src/main/scala/common/util/IntrospectableLazy.scala +++ b/common/src/main/scala/common/util/IntrospectableLazy.scala @@ -21,20 +21,21 @@ object IntrospectableLazy { } -class IntrospectableLazy[A] private(f: => A) { +class IntrospectableLazy[A] private (f: => A) { private var option: Option[A] = None - def apply(): A = { + def apply(): A = option match { case Some(a) => a case None => - synchronized { option match { - case Some(a) => a - case None => val a = f; option = Some(a); a - }} + synchronized { + option match { + case Some(a) => a + case None => val a = f; option = Some(a); a + } + } } - } def exists: Boolean = option.isDefined diff --git a/common/src/main/scala/common/util/StringUtil.scala b/common/src/main/scala/common/util/StringUtil.scala index 044ef890dea..2ddce884112 100644 --- a/common/src/main/scala/common/util/StringUtil.scala +++ b/common/src/main/scala/common/util/StringUtil.scala @@ -64,11 +64,10 @@ object StringUtil { */ def relativeDirectory: String = string.ensureNoLeadingSlash.ensureSlashed - def elided(limit: Int): String = { + def elided(limit: Int): String = if (string.length > limit) { s"(elided) ${string.take(limit)}..." } else string - } /** * Removes userInfo and sensitive query parts from strings that are RFC 2396 URIs. @@ -84,10 +83,9 @@ object StringUtil { * - the StringUtilSpec for current expectations * - https://stackoverflow.com/questions/4571346/how-to-encode-url-to-avoid-special-characters-in-java#answer-4571518 */ - def maskSensitiveUri: String = { + def maskSensitiveUri: String = Try(new URI(string)) .map(_.maskSensitive.toASCIIString) .getOrElse(string) - } } } diff --git a/common/src/main/scala/common/util/TerminalUtil.scala b/common/src/main/scala/common/util/TerminalUtil.scala index cb7c980aecb..33e30f612df 100644 --- a/common/src/main/scala/common/util/TerminalUtil.scala +++ b/common/src/main/scala/common/util/TerminalUtil.scala @@ -1,15 +1,15 @@ package common.util object TerminalUtil { - def highlight(colorCode:Int, string:String) = s"\u001B[38;5;${colorCode}m$string\u001B[0m" + def highlight(colorCode: Int, string: String) = s"\u001B[38;5;${colorCode}m$string\u001B[0m" def mdTable(rows: Seq[Seq[String]], header: Seq[String]): String = { - def maxWidth(lengths: Seq[Seq[Int]], column: Int) = lengths.map { length => length(column) }.max - val widths = (rows :+ header).map { row => row.map { s => s.length } } - val maxWidths = widths.head.indices.map { column => maxWidth(widths, column) } - val tableHeader = header.indices.map { i => header(i).padTo(maxWidths(i), ' ').mkString("") }.mkString("|") - val tableDivider = header.indices.map { i => "-" * maxWidths(i) }.mkString("|") + def maxWidth(lengths: Seq[Seq[Int]], column: Int) = lengths.map(length => length(column)).max + val widths = (rows :+ header).map(row => row.map(s => s.length)) + val maxWidths = widths.head.indices.map(column => maxWidth(widths, column)) + val tableHeader = header.indices.map(i => header(i).padTo(maxWidths(i), ' ').mkString("")).mkString("|") + val tableDivider = header.indices.map(i => "-" * maxWidths(i)).mkString("|") val tableRows = rows.map { row => - val mdRow = row.indices.map { i => row(i).padTo(maxWidths(i), ' ').mkString("") }.mkString("|") + val mdRow = row.indices.map(i => row(i).padTo(maxWidths(i), ' ').mkString("")).mkString("|") s"|$mdRow|" } s"|$tableHeader|\n|$tableDivider|\n${tableRows.mkString("\n")}\n" diff --git a/common/src/main/scala/common/util/TimeUtil.scala b/common/src/main/scala/common/util/TimeUtil.scala index dbd94c99b80..8c64dd341ce 100644 --- a/common/src/main/scala/common/util/TimeUtil.scala +++ b/common/src/main/scala/common/util/TimeUtil.scala @@ -4,6 +4,7 @@ import java.time.format.DateTimeFormatter import java.time.{OffsetDateTime, ZoneOffset} object TimeUtil { + /** * Instead of "one of" the valid ISO-8601 formats, standardize on this one: * https://github.com/openjdk/jdk/blob/jdk8-b120/jdk/src/share/classes/java/time/OffsetDateTime.java#L1886 @@ -11,12 +12,15 @@ object TimeUtil { private val Iso8601MillisecondsFormat = DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH:mm:ss.SSSXXXXX") implicit class EnhancedOffsetDateTime(val offsetDateTime: OffsetDateTime) extends AnyVal { + /** * Discards the original timezone and shifts the time to UTC, then returns the ISO-8601 formatted string with * exactly three digits of milliseconds. */ - def toUtcMilliString: String = Option(offsetDateTime).map( - _.atZoneSameInstant(ZoneOffset.UTC).format(Iso8601MillisecondsFormat) - ).orNull + def toUtcMilliString: String = Option(offsetDateTime) + .map( + _.atZoneSameInstant(ZoneOffset.UTC).format(Iso8601MillisecondsFormat) + ) + .orNull } } diff --git a/common/src/main/scala/common/util/TryUtil.scala b/common/src/main/scala/common/util/TryUtil.scala index e8edf01f8e5..02a90a5e983 100644 --- a/common/src/main/scala/common/util/TryUtil.scala +++ b/common/src/main/scala/common/util/TryUtil.scala @@ -22,14 +22,13 @@ object TryUtil { def stringifyFailures[T](possibleFailures: Iterable[Try[T]]): Iterable[String] = possibleFailures.collect { case failure: Failure[T] => stringifyFailure(failure) } - private def sequenceIterable[T](tries: Iterable[Try[_]], unbox: () => T, prefixErrorMessage: String): Try[T] = { + private def sequenceIterable[T](tries: Iterable[Try[_]], unbox: () => T, prefixErrorMessage: String): Try[T] = tries collect { case f: Failure[_] => f } match { case failures if failures.nonEmpty => val exceptions = failures.toSeq.map(_.exception) Failure(AggregatedException(prefixErrorMessage, exceptions.toList)) case _ => Success(unbox()) } - } def sequence[T](tries: Seq[Try[T]], prefixErrorMessage: String = ""): Try[Seq[T]] = { def unbox = tries map { _.get } diff --git a/common/src/main/scala/common/util/UriUtil.scala b/common/src/main/scala/common/util/UriUtil.scala index eb8c125768c..24625be1edb 100644 --- a/common/src/main/scala/common/util/UriUtil.scala +++ b/common/src/main/scala/common/util/UriUtil.scala @@ -15,20 +15,19 @@ object UriUtil { * - the StringUtilSpec for current expectations * - https://stackoverflow.com/questions/4571346/how-to-encode-url-to-avoid-special-characters-in-java#answer-4571518 */ - def maskSensitive: URI = { + def maskSensitive: URI = Try { - new URI( - uri.getScheme, - null, // Remove all userInfo - uri.getHost, - uri.getPort, - uri.getPath, - Option(uri.getQuery).map(maskSensitiveQuery).orNull, - uri.getFragment, - ) + new URI( + uri.getScheme, + null, // Remove all userInfo + uri.getHost, + uri.getPort, + uri.getPath, + Option(uri.getQuery).map(maskSensitiveQuery).orNull, + uri.getFragment + ) } - .getOrElse(uri) - } + .getOrElse(uri) } private def maskSensitiveQuery(query: String): String = { @@ -85,7 +84,7 @@ object UriUtil { private val SensitiveKeyParts = List( "credential", - "signature", + "signature" ) private def isSensitiveKey(name: String): Boolean = { diff --git a/common/src/main/scala/common/util/VersionUtil.scala b/common/src/main/scala/common/util/VersionUtil.scala index 3ddea0750d5..fcbffca77a4 100644 --- a/common/src/main/scala/common/util/VersionUtil.scala +++ b/common/src/main/scala/common/util/VersionUtil.scala @@ -35,19 +35,17 @@ object VersionUtil { * @param default What to return when the version cannot be found. The parameter passed is the `projectName`. * @return The version from the conf or the default */ - def getVersion(projectName: String, default: String => String = defaultMessage): String = { + def getVersion(projectName: String, default: String => String = defaultMessage): String = ConfigFactory .load(versionConf(projectName)) .as[Option[String]](versionProperty(projectName)) .getOrElse(default(projectName)) - } /** * Instead of returning a version, states that the version conf will be generated by sbt. */ - def defaultMessage(projectName: String): String = { + def defaultMessage(projectName: String): String = s"${versionConf(projectName)}-to-be-generated-by-sbt" - } /** * A regex compatible with the dependency constants in project/Dependencies.scala. @@ -62,7 +60,7 @@ object VersionUtil { * @return The dependency version from project/Dependencies.scala * @throws RuntimeException If the dependency cannot be found */ - def sbtDependencyVersion(dependencyName: String)(projectName: String): String = { + def sbtDependencyVersion(dependencyName: String)(projectName: String): String = try { val dependencies = Paths.get("project/Dependencies.scala").toAbsolutePath val lines = Files.readAllLines(dependencies).asScala @@ -79,6 +77,5 @@ object VersionUtil { e ) } - } } diff --git a/common/src/main/scala/common/validation/ErrorOr.scala b/common/src/main/scala/common/validation/ErrorOr.scala index 4be2aa633c9..3afdfcc5fb7 100644 --- a/common/src/main/scala/common/validation/ErrorOr.scala +++ b/common/src/main/scala/common/validation/ErrorOr.scala @@ -12,9 +12,8 @@ import scala.util.Try object ErrorOr { type ErrorOr[+A] = Validated[NonEmptyList[String], A] - def apply[A](f: => A): ErrorOr[A] = { + def apply[A](f: => A): ErrorOr[A] = Try(f).toErrorOr - } implicit class EnhancedErrorOr[A](val eoa: ErrorOr[A]) extends AnyVal { def contextualizeErrors(s: => String): ErrorOr[A] = eoa.leftMap { errors => @@ -31,17 +30,17 @@ object ErrorOr { } implicit class ShortCircuitingFlatMap[A](val fa: ErrorOr[A]) extends AnyVal { + /** * Not consistent with `Applicative#ap` but useful in for comprehensions. * * @see http://typelevel.org/cats/tut/validated.html#of-flatmaps-and-xors */ - def flatMap[B](f: A => ErrorOr[B]): ErrorOr[B] = { + def flatMap[B](f: A => ErrorOr[B]): ErrorOr[B] = fa match { case Valid(v) => ErrorOr(f(v)).flatten case i @ Invalid(_) => i } - } } implicit class MapErrorOrRhs[A, B](val m: Map[A, ErrorOr[B]]) extends AnyVal { @@ -49,8 +48,8 @@ object ErrorOr { } implicit class MapTraversal[A, B](val m: Map[A, B]) extends AnyVal { - def traverse[C,D](f: ((A,B)) => ErrorOr[(C,D)]): ErrorOr[Map[C,D]] = m.toList.traverse(f).map(_.toMap) - def traverseValues[C](f: B => ErrorOr[C]): ErrorOr[Map[A,C]] = m.traverse { case (a, b) => f(b).map(c => (a,c)) } + def traverse[C, D](f: ((A, B)) => ErrorOr[(C, D)]): ErrorOr[Map[C, D]] = m.toList.traverse(f).map(_.toMap) + def traverseValues[C](f: B => ErrorOr[C]): ErrorOr[Map[A, C]] = m.traverse { case (a, b) => f(b).map(c => (a, c)) } } // Note! See the bottom of this file for a generator function for 2 through 22 of these near-identical ShortCircuitingFlatMapTupleNs... @@ -67,83 +66,346 @@ object ErrorOr { def flatMapN[T_OUT](f3: (A, B, C) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t3.tupled flatMap f3.tupled } - implicit class ShortCircuitingFlatMapTuple4[A, B, C, D](val t4: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D])) extends AnyVal { + implicit class ShortCircuitingFlatMapTuple4[A, B, C, D](val t4: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D])) + extends AnyVal { def flatMapN[T_OUT](f4: (A, B, C, D) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t4.tupled flatMap f4.tupled } - implicit class ShortCircuitingFlatMapTuple5[A, B, C, D, E](val t5: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E])) extends AnyVal { + implicit class ShortCircuitingFlatMapTuple5[A, B, C, D, E]( + val t5: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E]) + ) extends AnyVal { def flatMapN[T_OUT](f5: (A, B, C, D, E) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t5.tupled flatMap f5.tupled } - implicit class ShortCircuitingFlatMapTuple6[A, B, C, D, E, F](val t6: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F])) extends AnyVal { + implicit class ShortCircuitingFlatMapTuple6[A, B, C, D, E, F]( + val t6: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F]) + ) extends AnyVal { def flatMapN[T_OUT](f6: (A, B, C, D, E, F) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t6.tupled flatMap f6.tupled } - implicit class ShortCircuitingFlatMapTuple7[A, B, C, D, E, F, G](val t7: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G])) extends AnyVal { + implicit class ShortCircuitingFlatMapTuple7[A, B, C, D, E, F, G]( + val t7: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G]) + ) extends AnyVal { def flatMapN[T_OUT](f7: (A, B, C, D, E, F, G) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t7.tupled flatMap f7.tupled } - implicit class ShortCircuitingFlatMapTuple8[A, B, C, D, E, F, G, H](val t8: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H])) extends AnyVal { + implicit class ShortCircuitingFlatMapTuple8[A, B, C, D, E, F, G, H]( + val t8: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H]) + ) extends AnyVal { def flatMapN[T_OUT](f8: (A, B, C, D, E, F, G, H) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t8.tupled flatMap f8.tupled } - implicit class ShortCircuitingFlatMapTuple9[A, B, C, D, E, F, G, H, I](val t9: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I])) extends AnyVal { + implicit class ShortCircuitingFlatMapTuple9[A, B, C, D, E, F, G, H, I]( + val t9: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I]) + ) extends AnyVal { def flatMapN[T_OUT](f9: (A, B, C, D, E, F, G, H, I) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t9.tupled flatMap f9.tupled } - implicit class ShortCircuitingFlatMapTuple10[A, B, C, D, E, F, G, H, I, J](val t10: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J])) extends AnyVal { - def flatMapN[T_OUT](f10: (A, B, C, D, E, F, G, H, I, J) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t10.tupled flatMap f10.tupled + implicit class ShortCircuitingFlatMapTuple10[A, B, C, D, E, F, G, H, I, J]( + val t10: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f10: (A, B, C, D, E, F, G, H, I, J) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t10.tupled flatMap f10.tupled } - implicit class ShortCircuitingFlatMapTuple11[A, B, C, D, E, F, G, H, I, J, K](val t11: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K])) extends AnyVal { - def flatMapN[T_OUT](f11: (A, B, C, D, E, F, G, H, I, J, K) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t11.tupled flatMap f11.tupled + implicit class ShortCircuitingFlatMapTuple11[A, B, C, D, E, F, G, H, I, J, K]( + val t11: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f11: (A, B, C, D, E, F, G, H, I, J, K) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t11.tupled flatMap f11.tupled } - implicit class ShortCircuitingFlatMapTuple12[A, B, C, D, E, F, G, H, I, J, K, L](val t12: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L])) extends AnyVal { - def flatMapN[T_OUT](f12: (A, B, C, D, E, F, G, H, I, J, K, L) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t12.tupled flatMap f12.tupled + implicit class ShortCircuitingFlatMapTuple12[A, B, C, D, E, F, G, H, I, J, K, L]( + val t12: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f12: (A, B, C, D, E, F, G, H, I, J, K, L) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t12.tupled flatMap f12.tupled } - implicit class ShortCircuitingFlatMapTuple13[A, B, C, D, E, F, G, H, I, J, K, L, M](val t13: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M])) extends AnyVal { - def flatMapN[T_OUT](f13: (A, B, C, D, E, F, G, H, I, J, K, L, M) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t13.tupled flatMap f13.tupled + implicit class ShortCircuitingFlatMapTuple13[A, B, C, D, E, F, G, H, I, J, K, L, M]( + val t13: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f13: (A, B, C, D, E, F, G, H, I, J, K, L, M) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t13.tupled flatMap f13.tupled } - implicit class ShortCircuitingFlatMapTuple14[A, B, C, D, E, F, G, H, I, J, K, L, M, N](val t14: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N])) extends AnyVal { - def flatMapN[T_OUT](f14: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t14.tupled flatMap f14.tupled + implicit class ShortCircuitingFlatMapTuple14[A, B, C, D, E, F, G, H, I, J, K, L, M, N]( + val t14: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f14: (A, B, C, D, E, F, G, H, I, J, K, L, M, N) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t14.tupled flatMap f14.tupled } - implicit class ShortCircuitingFlatMapTuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](val t15: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O])) extends AnyVal { - def flatMapN[T_OUT](f15: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t15.tupled flatMap f15.tupled + implicit class ShortCircuitingFlatMapTuple15[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O]( + val t15: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f15: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t15.tupled flatMap f15.tupled } - implicit class ShortCircuitingFlatMapTuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](val t16: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P])) extends AnyVal { - def flatMapN[T_OUT](f16: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t16.tupled flatMap f16.tupled + implicit class ShortCircuitingFlatMapTuple16[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P]( + val t16: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f16: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t16.tupled flatMap f16.tupled } - implicit class ShortCircuitingFlatMapTuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](val t17: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P], ErrorOr[Q])) extends AnyVal { - def flatMapN[T_OUT](f17: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t17.tupled flatMap f17.tupled + implicit class ShortCircuitingFlatMapTuple17[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q]( + val t17: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P], + ErrorOr[Q] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f17: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t17.tupled flatMap f17.tupled } - implicit class ShortCircuitingFlatMapTuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](val t18: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P], ErrorOr[Q], ErrorOr[R])) extends AnyVal { - def flatMapN[T_OUT](f18: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t18.tupled flatMap f18.tupled + implicit class ShortCircuitingFlatMapTuple18[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R]( + val t18: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P], + ErrorOr[Q], + ErrorOr[R] + ) + ) extends AnyVal { + def flatMapN[T_OUT](f18: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = + t18.tupled flatMap f18.tupled } - implicit class ShortCircuitingFlatMapTuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](val t19: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P], ErrorOr[Q], ErrorOr[R], ErrorOr[S])) extends AnyVal { - def flatMapN[T_OUT](f19: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t19.tupled flatMap f19.tupled + implicit class ShortCircuitingFlatMapTuple19[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S]( + val t19: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P], + ErrorOr[Q], + ErrorOr[R], + ErrorOr[S] + ) + ) extends AnyVal { + def flatMapN[T_OUT]( + f19: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S) => ErrorOr[T_OUT] + ): ErrorOr[T_OUT] = t19.tupled flatMap f19.tupled } - implicit class ShortCircuitingFlatMapTuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](val t20: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P], ErrorOr[Q], ErrorOr[R], ErrorOr[S], ErrorOr[T])) extends AnyVal { - def flatMapN[T_OUT](f20: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t20.tupled flatMap f20.tupled + implicit class ShortCircuitingFlatMapTuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( + val t20: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P], + ErrorOr[Q], + ErrorOr[R], + ErrorOr[S], + ErrorOr[T] + ) + ) extends AnyVal { + def flatMapN[T_OUT]( + f20: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => ErrorOr[T_OUT] + ): ErrorOr[T_OUT] = t20.tupled flatMap f20.tupled } - implicit class ShortCircuitingFlatMapTuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](val t21: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P], ErrorOr[Q], ErrorOr[R], ErrorOr[S], ErrorOr[T], ErrorOr[U])) extends AnyVal { - def flatMapN[T_OUT](f21: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t21.tupled flatMap f21.tupled + implicit class ShortCircuitingFlatMapTuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]( + val t21: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P], + ErrorOr[Q], + ErrorOr[R], + ErrorOr[S], + ErrorOr[T], + ErrorOr[U] + ) + ) extends AnyVal { + def flatMapN[T_OUT]( + f21: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => ErrorOr[T_OUT] + ): ErrorOr[T_OUT] = t21.tupled flatMap f21.tupled } - implicit class ShortCircuitingFlatMapTuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](val t22: (ErrorOr[A], ErrorOr[B], ErrorOr[C], ErrorOr[D], ErrorOr[E], ErrorOr[F], ErrorOr[G], ErrorOr[H], ErrorOr[I], ErrorOr[J], ErrorOr[K], ErrorOr[L], ErrorOr[M], ErrorOr[N], ErrorOr[O], ErrorOr[P], ErrorOr[Q], ErrorOr[R], ErrorOr[S], ErrorOr[T], ErrorOr[U], ErrorOr[V])) extends AnyVal { - def flatMapN[T_OUT](f22: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t22.tupled flatMap f22.tupled + implicit class ShortCircuitingFlatMapTuple22[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V]( + val t22: (ErrorOr[A], + ErrorOr[B], + ErrorOr[C], + ErrorOr[D], + ErrorOr[E], + ErrorOr[F], + ErrorOr[G], + ErrorOr[H], + ErrorOr[I], + ErrorOr[J], + ErrorOr[K], + ErrorOr[L], + ErrorOr[M], + ErrorOr[N], + ErrorOr[O], + ErrorOr[P], + ErrorOr[Q], + ErrorOr[R], + ErrorOr[S], + ErrorOr[T], + ErrorOr[U], + ErrorOr[V] + ) + ) extends AnyVal { + def flatMapN[T_OUT]( + f22: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) => ErrorOr[T_OUT] + ): ErrorOr[T_OUT] = t22.tupled flatMap f22.tupled } object ErrorOrGen { + /** * Because maintaining 22 near-identical functions is a bore... * This function can regenerate them if we need to make changes. @@ -168,8 +430,7 @@ object ErrorOr { | ${line2(n)} |} | - |""".stripMargin + |""".stripMargin } } } - diff --git a/common/src/main/scala/common/validation/IOChecked.scala b/common/src/main/scala/common/validation/IOChecked.scala index f838e5ceb32..62e61f87eb4 100644 --- a/common/src/main/scala/common/validation/IOChecked.scala +++ b/common/src/main/scala/common/validation/IOChecked.scala @@ -1,6 +1,5 @@ package common.validation - import cats.arrow.FunctionK import cats.data.EitherT.fromEither import cats.data.{EitherT, NonEmptyList, ValidatedNel} @@ -22,6 +21,7 @@ object IOChecked { * The monad transformer allows to flatMap over the value while keeping the IO effect as well as the list of potential errors */ type IOChecked[A] = EitherT[IO, NonEmptyList[String], A] + /** * Fixes the left type of Either to Throwable * This is useful when calling on IO[A].attempt, transforming it to IO[ Either[Throwable, A] ] == IO[ Attempt[A] ] @@ -40,22 +40,22 @@ object IOChecked { */ implicit val eitherThrowableApplicative = new Applicative[Attempt] { override def pure[A](x: A) = Right(x) - override def ap[A, B](ff: Attempt[A => B])(fa: Attempt[A]): Attempt[B] = { + override def ap[A, B](ff: Attempt[A => B])(fa: Attempt[A]): Attempt[B] = (fa, ff) match { - // Both have a list or error messages, combine them in a single one - case (Left(t1: MessageAggregation), Left(t2: MessageAggregation)) => Left(AggregatedMessageException("", t1.errorMessages ++ t2.errorMessages)) - // Only one of them is a MessageAggregation, combined the errors and the other exception in a CompositeException + // Both have a list or error messages, combine them in a single one + case (Left(t1: MessageAggregation), Left(t2: MessageAggregation)) => + Left(AggregatedMessageException("", t1.errorMessages ++ t2.errorMessages)) + // Only one of them is a MessageAggregation, combined the errors and the other exception in a CompositeException case (Left(t1: MessageAggregation), Left(t2)) => Left(CompositeException("", List(t2), t1.errorMessages)) case (Left(t2), Left(t1: MessageAggregation)) => Left(CompositeException("", List(t2), t1.errorMessages)) - // None of them is a MessageAggregation, combine the 2 throwables in an AggregatedException + // None of them is a MessageAggregation, combine the 2 throwables in an AggregatedException case (Left(t1), Left(t2)) => Left(AggregatedException("", List(t1, t2))) - // Success case, apply f on v + // Success case, apply f on v case (Right(v), Right(f)) => Right(f(v)) - // Default failure case, just keep the failure + // Default failure case, just keep the failure case (Left(t1), _) => Left(t1) case (_, Left(t1)) => Left(t1) } - } } /** @@ -100,7 +100,7 @@ object IOChecked { * We now have an IO[ Either[NonEmptyList[String], A] ] which we can wrap into an IOChecked with EitherT.apply */ override def sequential: FunctionK[IOCheckedPar, IOChecked] = - new FunctionK[IOCheckedPar, IOChecked] { + new FunctionK[IOCheckedPar, IOChecked] { def apply[A](fa: IOCheckedPar[A]): IOChecked[A] = EitherT { IO.Par.unwrap(fa: IO.Par[Attempt[A]]) flatMap { case Left(t: MessageAggregation) => IO.pure(Left(NonEmptyList.fromListUnsafe(t.errorMessages.toList))) @@ -138,7 +138,7 @@ object IOChecked { def error[A](error: String, tail: String*): IOChecked[A] = EitherT.leftT { NonEmptyList.of(error, tail: _*) } - + def pure[A](value: A): IOChecked[A] = EitherT.pure(value) def goIOChecked[A](f: => A): IOChecked[A] = Try(f).toIOChecked @@ -150,12 +150,11 @@ object IOChecked { implicit class EnhancedIOChecked[A](val p: IOChecked[A]) extends AnyVal { import cats.syntax.either._ - def toChecked: Checked[A] = { + def toChecked: Checked[A] = Try(p.value.unsafeRunSync()) match { case Success(r) => r case Failure(f) => NonEmptyList.one(f.getMessage).asLeft } - } def toErrorOr: ErrorOr[A] = toChecked.toValidated @@ -163,12 +162,11 @@ object IOChecked { def unsafe(context: String) = unsafeToEither().unsafe(context) - def contextualizeErrors(context: String): IOChecked[A] = { - p.leftMap({ errors => + def contextualizeErrors(context: String): IOChecked[A] = + p.leftMap { errors => val total = errors.size errors.zipWithIndex map { case (e, i) => s"Failed to $context (reason ${i + 1} of $total): $e" } - }) - } + } } implicit class TryIOChecked[A](val t: Try[A]) extends AnyVal { @@ -176,9 +174,8 @@ object IOChecked { } implicit class FutureIOChecked[A](val future: Future[A]) extends AnyVal { - def toIOChecked(implicit cs: ContextShift[IO]): IOChecked[A] = { + def toIOChecked(implicit cs: ContextShift[IO]): IOChecked[A] = IO.fromFuture(IO(future)).to[IOChecked] - } } implicit class ErrorOrIOChecked[A](val e: ErrorOr[A]) extends AnyVal { @@ -198,9 +195,8 @@ object IOChecked { } implicit class OptionIOChecked[A](val o: Option[A]) extends AnyVal { - def toIOChecked(errorMessage: String): IOChecked[A] = { + def toIOChecked(errorMessage: String): IOChecked[A] = EitherT.fromOption(o, NonEmptyList.of(errorMessage)) - } } type IOCheckedValidated[A] = IO[ValidatedNel[String, A]] diff --git a/common/src/main/scala/common/validation/Validation.scala b/common/src/main/scala/common/validation/Validation.scala index 4099eff8ac6..af568909b5b 100644 --- a/common/src/main/scala/common/validation/Validation.scala +++ b/common/src/main/scala/common/validation/Validation.scala @@ -46,12 +46,12 @@ object Validation { case Failure(f) => defaultThrowableToString(f).invalidNel } - implicit class ValidationOps[B,A](val v: ValidatedNel[B, A]) { - //Convert this into a future by folding over the state and returning the corresponding Future terminal state. + implicit class ValidationOps[B, A](val v: ValidatedNel[B, A]) { + // Convert this into a future by folding over the state and returning the corresponding Future terminal state. def toFuture(f: NonEmptyList[B] => Throwable) = - v fold( - //Use f to turn the failure list into a Throwable, then fail a future with it. - //Function composition lets us ignore the actual argument of the error list + v fold ( + // Use f to turn the failure list into a Throwable, then fail a future with it. + // Function composition lets us ignore the actual argument of the error list Future.failed _ compose f, Future.successful ) @@ -59,31 +59,30 @@ object Validation { implicit class TryValidation[A](val t: Try[A]) extends AnyVal { def toErrorOr: ErrorOr[A] = toErrorOr(defaultThrowableToString) - def toErrorOr(throwableToStringFunction: ThrowableToStringFunction): ErrorOr[A] = { + def toErrorOr(throwableToStringFunction: ThrowableToStringFunction): ErrorOr[A] = Validated.fromTry(t).leftMap(throwableToStringFunction).toValidatedNel[String, A] - } def toErrorOrWithContext(context: String): ErrorOr[A] = toErrorOrWithContext(context, defaultThrowableToString) - def toErrorOrWithContext(context: String, - throwableToStringFunction: ThrowableToStringFunction): ErrorOr[A] = toChecked(throwableToStringFunction) - .contextualizeErrors(context) - .leftMap({contextualizedErrors => - if (t.failed.isFailure) { - val errors = new StringWriter - t.failed.get.printStackTrace(new PrintWriter(errors)) - contextualizedErrors.::(s"Stacktrace: ${errors.toString}") - } else contextualizedErrors - }) - .toValidated + def toErrorOrWithContext(context: String, throwableToStringFunction: ThrowableToStringFunction): ErrorOr[A] = + toChecked(throwableToStringFunction) + .contextualizeErrors(context) + .leftMap { contextualizedErrors => + if (t.failed.isFailure) { + val errors = new StringWriter + t.failed.get.printStackTrace(new PrintWriter(errors)) + contextualizedErrors.::(s"Stacktrace: ${errors.toString}") + } else contextualizedErrors + } + .toValidated def toChecked: Checked[A] = toChecked(defaultThrowableToString) - def toChecked(throwableToStringFunction: ThrowableToStringFunction): Checked[A] = { - Either.fromTry(t).leftMap { ex => NonEmptyList.one(throwableToStringFunction(ex)) } - } + def toChecked(throwableToStringFunction: ThrowableToStringFunction): Checked[A] = + Either.fromTry(t).leftMap(ex => NonEmptyList.one(throwableToStringFunction(ex))) - def toCheckedWithContext(context: String): Checked[A] = toErrorOrWithContext(context, defaultThrowableToString).toEither - def toCheckedWithContext(context: String, - throwableToStringFunction: ThrowableToStringFunction): Checked[A] = toErrorOrWithContext(context, throwableToStringFunction).toEither + def toCheckedWithContext(context: String): Checked[A] = + toErrorOrWithContext(context, defaultThrowableToString).toEither + def toCheckedWithContext(context: String, throwableToStringFunction: ThrowableToStringFunction): Checked[A] = + toErrorOrWithContext(context, throwableToStringFunction).toEither } implicit class ValidationTry[A](val e: ErrorOr[A]) extends AnyVal { @@ -109,12 +108,10 @@ object Validation { } implicit class OptionValidation[A](val o: Option[A]) extends AnyVal { - def toErrorOr(errorMessage: => String): ErrorOr[A] = { + def toErrorOr(errorMessage: => String): ErrorOr[A] = Validated.fromOption(o, NonEmptyList.of(errorMessage)) - } - def toChecked(errorMessage: => String): Checked[A] = { + def toChecked(errorMessage: => String): Checked[A] = Either.fromOption(o, NonEmptyList.of(errorMessage)) - } } } diff --git a/common/src/test/scala/common/assertion/CaseClassAssertions.scala b/common/src/test/scala/common/assertion/CaseClassAssertions.scala index 4cc191df2f6..f273898de1c 100644 --- a/common/src/test/scala/common/assertion/CaseClassAssertions.scala +++ b/common/src/test/scala/common/assertion/CaseClassAssertions.scala @@ -5,10 +5,9 @@ import org.scalatest.matchers.should.Matchers object CaseClassAssertions extends Matchers { implicit class ComparableCaseClass[A <: Product](actualA: A) { // Assumes that expectedA and actualA are the same type. If we don't subtype case classes, that should hold... - def shouldEqualFieldwise(expectedA: A): Unit = { + def shouldEqualFieldwise(expectedA: A): Unit = (0 until actualA.productArity) foreach { i => actualA.productElement(i) should be(expectedA.productElement(i)) } - } } } diff --git a/common/src/test/scala/common/assertion/ErrorOrAssertions.scala b/common/src/test/scala/common/assertion/ErrorOrAssertions.scala index 02a5e594bb4..00c3ab4fcfc 100644 --- a/common/src/test/scala/common/assertion/ErrorOrAssertions.scala +++ b/common/src/test/scala/common/assertion/ErrorOrAssertions.scala @@ -6,7 +6,6 @@ import common.validation.ErrorOr.ErrorOr import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers - object ErrorOrAssertions { implicit class ErrorOrWithAssertions[A](errorOr: ErrorOr[A]) extends Matchers { def shouldBeValid(other: A): Assertion = errorOr match { diff --git a/common/src/test/scala/common/collections/TableSpec.scala b/common/src/test/scala/common/collections/TableSpec.scala index 7000e151df4..581fcf466e8 100644 --- a/common/src/test/scala/common/collections/TableSpec.scala +++ b/common/src/test/scala/common/collections/TableSpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class TableSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "Table" @@ -21,10 +20,14 @@ class TableSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "fill a table" in { - Table.fill(List( - ("a", "b", "c"), - ("d", "e", "f") - )).table shouldBe Map( + Table + .fill( + List( + ("a", "b", "c"), + ("d", "e", "f") + ) + ) + .table shouldBe Map( "a" -> Map("b" -> "c"), "d" -> Map("e" -> "f") ) @@ -48,7 +51,7 @@ class TableSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { someTable.rowOptional("a") shouldBe Some(Map("b" -> "c")) someTable.rowOptional("b") shouldBe None } - + it should "implement row" in { someTable.row("a") shouldBe Map("b" -> "c") someTable.row("b") shouldBe empty @@ -87,7 +90,7 @@ class TableSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { it should "implement addTriplet" in { someTable.addTriplet( - ("0", "1", "2") + ("0", "1", "2") ) shouldBe Table( Map( "a" -> Map("b" -> "c"), diff --git a/common/src/test/scala/common/collections/WeightedQueueSpec.scala b/common/src/test/scala/common/collections/WeightedQueueSpec.scala index 5d29b54949e..ba9cd81d908 100644 --- a/common/src/test/scala/common/collections/WeightedQueueSpec.scala +++ b/common/src/test/scala/common/collections/WeightedQueueSpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class WeightedQueueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WeightedQueue" @@ -39,7 +38,8 @@ class WeightedQueueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche it should "behead the queue" in { // A queue of strings for which the weight is the number of char in the string val q = WeightedQueue.empty[String, Int](_.length) - val q2 = q.enqueue("hello") + val q2 = q + .enqueue("hello") .enqueue("hola") .enqueue("bonjour") val (head, q3) = q2.behead(10) diff --git a/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala b/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala index cd425b626f0..071b7836ceb 100644 --- a/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala +++ b/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala @@ -7,7 +7,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers - class ExceptionAggregationSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers { "MessageAggregation" should "aggregate messages" in { diff --git a/common/src/test/scala/common/mock/MockImplicits.scala b/common/src/test/scala/common/mock/MockImplicits.scala index 3e90e183d8e..424dbfbdfde 100644 --- a/common/src/test/scala/common/mock/MockImplicits.scala +++ b/common/src/test/scala/common/mock/MockImplicits.scala @@ -11,31 +11,27 @@ trait MockImplicits { * https://github.com/etorreborre/specs2/commit/6d56660e70980b5958e6c4ed8fd4158bf1cecf70#diff-a2627f56c432e4bc37f36bc56e13852225813aa604918471b61ec2080462d722 */ implicit class MockEnhanced[A](methodCall: A) { - def returns(result: A): OngoingStubbing[A] = { + def returns(result: A): OngoingStubbing[A] = Mockito.when(methodCall).thenReturn(result) - } - def answers(function: Any => A): OngoingStubbing[A] = { - Mockito.when(methodCall) thenAnswer { - invocationOnMock => { - val args = invocationOnMock.getArguments - // The DSL behavior of the below is directly taken with thanks from the link above. - args.size match { - case 0 => - function match { - case function0: Function0[_] => - function0.apply().asInstanceOf[A] - case _ => - function.apply(invocationOnMock.getMock) - } - case 1 => - function(args(0)) - case _ => - function(args) - } + def answers(function: Any => A): OngoingStubbing[A] = + Mockito.when(methodCall) thenAnswer { invocationOnMock => + val args = invocationOnMock.getArguments + // The DSL behavior of the below is directly taken with thanks from the link above. + args.size match { + case 0 => + function match { + case function0: Function0[_] => + function0.apply().asInstanceOf[A] + case _ => + function.apply(invocationOnMock.getMock) + } + case 1 => + function(args(0)) + case _ => + function(args) } } - } def responds(f: Any => A): OngoingStubbing[A] = answers(f) } diff --git a/common/src/test/scala/common/mock/MockSugar.scala b/common/src/test/scala/common/mock/MockSugar.scala index 27a4ae87b55..6a92bd2fd47 100644 --- a/common/src/test/scala/common/mock/MockSugar.scala +++ b/common/src/test/scala/common/mock/MockSugar.scala @@ -2,7 +2,7 @@ package common.mock import org.mockito.{ArgumentCaptor, Mockito} -import scala.reflect.{ClassTag, classTag} +import scala.reflect.{classTag, ClassTag} /** * Yet another scala wrapper around Mockito. @@ -37,12 +37,11 @@ trait MockSugar extends MockImplicits { * * Note: if you run into issues with `mock` then try [[mockWithDefaults]]. */ - def mock[A: ClassTag]: A = { + def mock[A: ClassTag]: A = Mockito.mock( classTag[A].runtimeClass.asInstanceOf[Class[A]], - Mockito.withSettings().defaultAnswer(Mockito.RETURNS_SMART_NULLS), + Mockito.withSettings().defaultAnswer(Mockito.RETURNS_SMART_NULLS) ) - } /** * Creates a mock returning default values instead of Smart Nulls. @@ -56,16 +55,14 @@ trait MockSugar extends MockImplicits { * * An alternative workaround was to use `Mockito.doReturn(retVal).when(mockObj).someMethod`. */ - def mockWithDefaults[A: ClassTag]: A = { + def mockWithDefaults[A: ClassTag]: A = Mockito.mock( classTag[A].runtimeClass.asInstanceOf[Class[A]], - Mockito.withSettings().defaultAnswer(Mockito.RETURNS_DEFAULTS), + Mockito.withSettings().defaultAnswer(Mockito.RETURNS_DEFAULTS) ) - } - def capture[A: ClassTag]: ArgumentCaptor[A] = { + def capture[A: ClassTag]: ArgumentCaptor[A] = ArgumentCaptor.forClass(classTag[A].runtimeClass.asInstanceOf[Class[A]]) - } } object MockSugar extends MockSugar diff --git a/common/src/test/scala/common/numeric/IntegerUtilSpec.scala b/common/src/test/scala/common/numeric/IntegerUtilSpec.scala index b06af539626..48fc9ed3b3b 100644 --- a/common/src/test/scala/common/numeric/IntegerUtilSpec.scala +++ b/common/src/test/scala/common/numeric/IntegerUtilSpec.scala @@ -5,18 +5,33 @@ import common.numeric.IntegerUtil.IntEnhanced import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class IntegerUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { it should "return ordinal String for any Int" in { - val numbers = List(0, 1, 2, 3, 4, - 10, 11, 12, 13, 14, - 20, 21, 22, 23, 24, - 100, 101, 102, 103, 104) map { _.toOrdinal } + val numbers = List(0, 1, 2, 3, 4, 10, 11, 12, 13, 14, 20, 21, 22, 23, 24, 100, 101, 102, 103, 104) map { + _.toOrdinal + } - val expected = List("0th", "1st", "2nd", "3rd", "4th", - "10th", "11th", "12th", "13th", "14th", - "20th", "21st", "22nd", "23rd", "24th", - "100th", "101st", "102nd", "103rd", "104th") + val expected = List("0th", + "1st", + "2nd", + "3rd", + "4th", + "10th", + "11th", + "12th", + "13th", + "14th", + "20th", + "21st", + "22nd", + "23rd", + "24th", + "100th", + "101st", + "102nd", + "103rd", + "104th" + ) numbers should contain theSameElementsInOrderAs expected } diff --git a/common/src/test/scala/common/util/IntrospectableLazySpec.scala b/common/src/test/scala/common/util/IntrospectableLazySpec.scala index 337e193ab8f..91243135ab6 100644 --- a/common/src/test/scala/common/util/IntrospectableLazySpec.scala +++ b/common/src/test/scala/common/util/IntrospectableLazySpec.scala @@ -22,17 +22,19 @@ class IntrospectableLazySpec extends AnyFlatSpec with CromwellTimeoutSpec with M 4 } - val myLazy = lazily { lazyContents } + val myLazy = lazily(lazyContents) assert(lazyInstantiations == 0) assert(!myLazy.exists) // Fails without `synchronized { ... }` Await.result(Future.sequence( - Seq.fill(100)(Future { - myLazy() shouldBe 4 - }) - ), 1.seconds) + Seq.fill(100)(Future { + myLazy() shouldBe 4 + }) + ), + 1.seconds + ) assert(lazyInstantiations == 1) assert(myLazy.exists) diff --git a/common/src/test/scala/common/util/IoRetrySpec.scala b/common/src/test/scala/common/util/IoRetrySpec.scala index 4a5fc582eac..2ec5c56aba4 100644 --- a/common/src/test/scala/common/util/IoRetrySpec.scala +++ b/common/src/test/scala/common/util/IoRetrySpec.scala @@ -9,7 +9,6 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.ExecutionContext import scala.concurrent.duration._ - class IoRetrySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { implicit val timer = IO.timer(ExecutionContext.global) implicit val ioError = new StatefulIoError[Int] { @@ -28,7 +27,8 @@ class IoRetrySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } val incrementOnRetry: (Throwable, Int) => Int = (_, s) => s + 1 - val io = IORetry.withRetry(work, 1, Option(3), backoff = Backoff.staticBackoff(10.millis), onRetry = incrementOnRetry) + val io = + IORetry.withRetry(work, 1, Option(3), backoff = Backoff.staticBackoff(10.millis), onRetry = incrementOnRetry) val statefulException = the[Exception] thrownBy io.unsafeRunSync() statefulException.getCause shouldBe exception statefulException.getMessage shouldBe "Attempted 3 times" diff --git a/common/src/test/scala/common/util/StringUtilSpec.scala b/common/src/test/scala/common/util/StringUtilSpec.scala index 3d7b8db92d1..f71d79819cb 100644 --- a/common/src/test/scala/common/util/StringUtilSpec.scala +++ b/common/src/test/scala/common/util/StringUtilSpec.scala @@ -17,11 +17,13 @@ class StringUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers // With the elided string, we stop processing early and are able to produce a nice, short string without ever // touching the later elements: - fooOfBars.toPrettyElidedString(1000) should be("""Foo( - | bar = "long long list", - | list = List( - | "blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0", - | "blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah...""".stripMargin) + fooOfBars.toPrettyElidedString(1000) should be( + """Foo( + | bar = "long long list", + | list = List( + | "blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0", + | "blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah...""".stripMargin + ) } @@ -36,84 +38,84 @@ class StringUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers ( "mask user info", "https://user:pass@example.com/path/to/file", - "https://example.com/path/to/file", + "https://example.com/path/to/file" ), ( "mask the entire query if no known sensitive query params are found", s"https://example.com/path/to/file?my_new_hidden_param=$InputToBeMasked", - "https://example.com/path/to/file?masked", + "https://example.com/path/to/file?masked" ), ( "mask credential params", s"https://example.com/path/to/file?my_credential_param=$InputToBeMasked&my_other_param=ok", - s"https://example.com/path/to/file?my_credential_param=$OutputMasked&my_other_param=ok", + s"https://example.com/path/to/file?my_credential_param=$OutputMasked&my_other_param=ok" ), ( "mask signature params", s"https://example.com/path/to/file?my_signature_param=$InputToBeMasked&my_other_param=ok", - s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=ok", + s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=ok" ), ( "mask encoded signature params", s"https://example.com/path/to/file?my_sign%61ture_param=$InputToBeMasked&my_other_param=ok", - s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=ok", + s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=ok" ), ( // There is a note in the docs for common.util.StringUtil.EnhancedString.maskSensitiveUri about this behavior "mask uris with encoded parameters", s"https://example.com/path/to/file?my_signature_param=$InputToBeMasked&my_other_param=%26%2F%3F", - s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=&/?", + s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=&/?" ), ( "mask uris with parameters without values", s"https://example.com/path/to/file?my_signature_param=$InputToBeMasked&my_other_param", - s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param", + s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param" ), ( "mask uris with parameters values containing equal signs", s"https://example.com/path/to/file?my_signature_param=$InputToBeMasked&my_other_param=with=equal", - s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=with=equal", + s"https://example.com/path/to/file?my_signature_param=$OutputMasked&my_other_param=with=equal" ), ( "not mask the fragment", s"https://example.com?my_signature_param=$InputToBeMasked#nofilter", - s"https://example.com?my_signature_param=$OutputMasked#nofilter", + s"https://example.com?my_signature_param=$OutputMasked#nofilter" ), ( "not mask the port number", s"https://example.com:1234?my_signature_param=$InputToBeMasked", - s"https://example.com:1234?my_signature_param=$OutputMasked", + s"https://example.com:1234?my_signature_param=$OutputMasked" ), ( // via: https://cr.openjdk.java.net/~dfuchs/writeups/updating-uri/ "not mask a RFC 3986 specific uri", s"urn:isbn:096139210?my_credential_param=$InputToBeMasked", - s"urn:isbn:096139210?my_credential_param=$InputToBeMasked", + s"urn:isbn:096139210?my_credential_param=$InputToBeMasked" ), ( // via: https://bvdp-saturn-dev.appspot.com/#workspaces/general-dev-billing-account/DRS%20and%20Signed%20URL%20Development%20-%20Dev/notebooks/launch/drs_signed_url_flow_kids_dev.ipynb "not mask a DRS CIB URI", "drs://dg.F82A1A:371b834f-a896-42e6-b1d1-9fad96891f33", - "drs://dg.F82A1A:371b834f-a896-42e6-b1d1-9fad96891f33", + "drs://dg.F82A1A:371b834f-a896-42e6-b1d1-9fad96891f33" ), ( // via: https://bvdp-saturn-dev.appspot.com/#workspaces/general-dev-billing-account/DRS%20and%20Signed%20URL%20Development%20-%20Dev/notebooks/launch/drs_signed_url_flow_kids_dev.ipynb "mask an AWS Signed URL", s"https://example-redacted-but-not-masked.s3.amazonaws.com/$InputRedacted.CNVs.p.value.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=$InputToBeMasked&X-Amz-Date=20210504T200819Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&user_id=122&username=$InputRedacted&X-Amz-Signature=$InputToBeMasked", - s"https://example-redacted-but-not-masked.s3.amazonaws.com/$InputRedacted.CNVs.p.value.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=$OutputMasked&X-Amz-Date=20210504T200819Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&user_id=122&username=$InputRedacted&X-Amz-Signature=$OutputMasked", + s"https://example-redacted-but-not-masked.s3.amazonaws.com/$InputRedacted.CNVs.p.value.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=$OutputMasked&X-Amz-Date=20210504T200819Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&user_id=122&username=$InputRedacted&X-Amz-Signature=$OutputMasked" ), ( // via: https://bvdp-saturn-dev.appspot.com/#workspaces/general-dev-billing-account/DRS%20and%20Signed%20URL%20Development%20-%20Dev/notebooks/launch/drs_signed_url_flow_bdcat_dev.ipynb "mask a GCS V2 Signed URL", s"https://storage.googleapis.com/$InputRedacted/testfile.txt?GoogleAccessId=$InputRedacted&Expires=1614119022&Signature=$InputToBeMasked&userProject=$InputRedacted", - s"https://storage.googleapis.com/$InputRedacted/testfile.txt?GoogleAccessId=$InputRedacted&Expires=1614119022&Signature=$OutputMasked&userProject=$InputRedacted", + s"https://storage.googleapis.com/$InputRedacted/testfile.txt?GoogleAccessId=$InputRedacted&Expires=1614119022&Signature=$OutputMasked&userProject=$InputRedacted" ), ( // via: gsutil signurl $HOME/.config/gcloud/legacy_credentials/cromwell@broad-dsde-cromwell-dev.iam.gserviceaccount.com/adc.json gs://cloud-cromwell-dev/some/gumby.png "mask a GCS V4 Signed URL", s"https://storage.googleapis.com/cloud-cromwell-dev/some/gumby.png?x-goog-signature=$InputToBeMasked&x-goog-algorithm=GOOG4-RSA-SHA256&x-goog-credential=$InputToBeMasked&x-goog-date=20210505T042119Z&x-goog-expires=3600&x-goog-signedheaders=host", - s"https://storage.googleapis.com/cloud-cromwell-dev/some/gumby.png?x-goog-signature=$OutputMasked&x-goog-algorithm=GOOG4-RSA-SHA256&x-goog-credential=$OutputMasked&x-goog-date=20210505T042119Z&x-goog-expires=3600&x-goog-signedheaders=host", - ), + s"https://storage.googleapis.com/cloud-cromwell-dev/some/gumby.png?x-goog-signature=$OutputMasked&x-goog-algorithm=GOOG4-RSA-SHA256&x-goog-credential=$OutputMasked&x-goog-date=20210505T042119Z&x-goog-expires=3600&x-goog-signedheaders=host" + ) ) forAll(maskSensitiveUriTests) { (description, input, expected) => @@ -128,7 +130,7 @@ object StringUtilSpec { final case class Foo(bar: String, list: List[Bar]) final class Bar(index: Int) { - private def longLine(i: Int) = "\"" + s"blah$i" * 100 + "\"" + private def longLine(i: Int) = "\"" + s"blah$i" * 100 + "\"" override def toString: String = if (index < 2) { longLine(index) } else { diff --git a/common/src/test/scala/common/util/TerminalUtilSpec.scala b/common/src/test/scala/common/util/TerminalUtilSpec.scala index c8fda6627e4..1c68b9f17fc 100644 --- a/common/src/test/scala/common/util/TerminalUtilSpec.scala +++ b/common/src/test/scala/common/util/TerminalUtilSpec.scala @@ -5,7 +5,6 @@ import common.util.TerminalUtil._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class TerminalUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "TerminalUtil" diff --git a/common/src/test/scala/common/util/TryUtilSpec.scala b/common/src/test/scala/common/util/TryUtilSpec.scala index d7b8739337b..0bb62f9ed33 100644 --- a/common/src/test/scala/common/util/TryUtilSpec.scala +++ b/common/src/test/scala/common/util/TryUtilSpec.scala @@ -9,7 +9,6 @@ import org.scalatest.matchers.should.Matchers import scala.util.{Failure, Success, Try} import org.scalatest.enablers.Emptiness._ - class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "TryUtil" @@ -80,15 +79,15 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "sequence successful keys and successful values" in { - val result: Try[Map[String, String]] = sequenceKeyValues( - Map(Success("success key") -> Success("success value")), "prefix") + val result: Try[Map[String, String]] = + sequenceKeyValues(Map(Success("success key") -> Success("success value")), "prefix") result.isSuccess should be(true) result.get.toList should contain theSameElementsAs Map("success key" -> "success value") } it should "sequence successful keys and failed values" in { - val result: Try[Map[String, String]] = sequenceKeyValues( - Map(Success("success key") -> Failure(new RuntimeException("failed value"))), "prefix") + val result: Try[Map[String, String]] = + sequenceKeyValues(Map(Success("success key") -> Failure(new RuntimeException("failed value"))), "prefix") result.isFailure should be(true) result.failed.get should be(an[AggregatedException]) val exception = result.failed.get.asInstanceOf[AggregatedException] @@ -98,8 +97,8 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "sequence failed keys and successful values" in { - val result: Try[Map[String, String]] = sequenceKeyValues( - Map(Failure(new RuntimeException("failed key")) -> Success("success value")), "prefix") + val result: Try[Map[String, String]] = + sequenceKeyValues(Map(Failure(new RuntimeException("failed key")) -> Success("success value")), "prefix") result.isFailure should be(true) result.failed.get should be(an[AggregatedException]) val exception = result.failed.get.asInstanceOf[AggregatedException] @@ -110,7 +109,9 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { it should "sequence failed keys and failed values" in { val result: Try[Map[String, String]] = sequenceKeyValues( - Map(Failure(new RuntimeException("failed key")) -> Failure(new RuntimeException("failed value"))), "prefix") + Map(Failure(new RuntimeException("failed key")) -> Failure(new RuntimeException("failed value"))), + "prefix" + ) result.isFailure should be(true) result.failed.get should be(an[AggregatedException]) val exception = result.failed.get.asInstanceOf[AggregatedException] @@ -127,8 +128,8 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "sequence a successful key with a failed value" in { - val result: Try[(String, String)] = sequenceTuple( - (Success("success key"), Failure(new RuntimeException("failed value"))), "prefix") + val result: Try[(String, String)] = + sequenceTuple((Success("success key"), Failure(new RuntimeException("failed value"))), "prefix") result.isFailure should be(true) result.failed.get should be(an[AggregatedException]) val exception = result.failed.get.asInstanceOf[AggregatedException] @@ -138,8 +139,8 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "sequence a failed key with a successful value" in { - val result: Try[(String, String)] = sequenceTuple( - (Failure(new RuntimeException("failed key")), Success("success value")), "prefix") + val result: Try[(String, String)] = + sequenceTuple((Failure(new RuntimeException("failed key")), Success("success value")), "prefix") result.isFailure should be(true) result.failed.get should be(an[AggregatedException]) val exception = result.failed.get.asInstanceOf[AggregatedException] @@ -149,8 +150,10 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "sequence a failed key with a failed value" in { - val result: Try[(String, String)] = sequenceTuple( - (Failure(new RuntimeException("failed key")), Failure(new RuntimeException("failed value"))), "prefix") + val result: Try[(String, String)] = + sequenceTuple((Failure(new RuntimeException("failed key")), Failure(new RuntimeException("failed value"))), + "prefix" + ) result.isFailure should be(true) result.failed.get should be(an[AggregatedException]) val exception = result.failed.get.asInstanceOf[AggregatedException] diff --git a/common/src/test/scala/common/util/VersionUtilSpec.scala b/common/src/test/scala/common/util/VersionUtilSpec.scala index 50aab84bad6..5d58d2f0852 100644 --- a/common/src/test/scala/common/util/VersionUtilSpec.scala +++ b/common/src/test/scala/common/util/VersionUtilSpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class VersionUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "VersionUtil" @@ -22,12 +21,12 @@ class VersionUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "getVersion with the default" in { val version = VersionUtil.getVersion("made-up-artifact") - version should be ("made-up-artifact-version.conf-to-be-generated-by-sbt") + version should be("made-up-artifact-version.conf-to-be-generated-by-sbt") } it should "getVersion with a default override" in { val version = VersionUtil.getVersion("made-up-artifact", _ => "default override") - version should be ("default override") + version should be("default override") } it should "defaultMessage" in { @@ -38,7 +37,7 @@ class VersionUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val expected = intercept[RuntimeException](VersionUtil.sbtDependencyVersion("madeUp")("made-up-project")) expected.getMessage should fullyMatch regex "Did not parse a version for 'madeUpV' from .*/project/Dependencies.scala " + - "\\(This occurred after made-up-project-version.conf was not found.\\)" + "\\(This occurred after made-up-project-version.conf was not found.\\)" } it should "pass sbtDependencyVersion check for typesafeConfig" in { diff --git a/common/src/test/scala/common/validation/CheckedSpec.scala b/common/src/test/scala/common/validation/CheckedSpec.scala index c9b5a387666..23281fccc0b 100644 --- a/common/src/test/scala/common/validation/CheckedSpec.scala +++ b/common/src/test/scala/common/validation/CheckedSpec.scala @@ -6,10 +6,9 @@ import common.validation.Checked._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class CheckedSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "Checked" - + it should "provide helper methods" in { 5.validNelCheck shouldBe Right(5) "argh".invalidNelCheck[Int] shouldBe Left(NonEmptyList.one("argh")) diff --git a/common/src/test/scala/common/validation/ErrorOrSpec.scala b/common/src/test/scala/common/validation/ErrorOrSpec.scala index ac0ebfd957b..55a7d004cc3 100644 --- a/common/src/test/scala/common/validation/ErrorOrSpec.scala +++ b/common/src/test/scala/common/validation/ErrorOrSpec.scala @@ -8,7 +8,6 @@ import common.validation.ErrorOr._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class ErrorOrSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ErrorOr" @@ -44,11 +43,33 @@ class ErrorOrSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } val DivBy0Error: String = "Divide by 0!" - def errorOrDiv(v1: Int, v2: Int): ErrorOr[Double] = if (v2 != 0) { Valid(v1.toDouble / v2.toDouble) } else { DivBy0Error.invalidNel } - def errorOrDiv(v1: Double, v2: Int): ErrorOr[Double] = if (v2 != 0) { Valid(v1.toDouble / v2.toDouble) } else { DivBy0Error.invalidNel } - def errorOrSelect(v1: Int, v2: Int, v3: Int, v4: Int, v5: Int, v6: Int, v7: Int, - v8: Int, v9: Int, v10: Int, v11: Int, v12: Int, v13: Int, v14: Int, - v15: Int, v16: Int, v17: Int, v18: Int, v19: Int, v20: Int, v21: Int, v22: Int): ErrorOr[Int] = Valid(v4 + v6 + v22) + def errorOrDiv(v1: Int, v2: Int): ErrorOr[Double] = if (v2 != 0) { Valid(v1.toDouble / v2.toDouble) } + else { DivBy0Error.invalidNel } + def errorOrDiv(v1: Double, v2: Int): ErrorOr[Double] = if (v2 != 0) { Valid(v1.toDouble / v2.toDouble) } + else { DivBy0Error.invalidNel } + def errorOrSelect(v1: Int, + v2: Int, + v3: Int, + v4: Int, + v5: Int, + v6: Int, + v7: Int, + v8: Int, + v9: Int, + v10: Int, + v11: Int, + v12: Int, + v13: Int, + v14: Int, + v15: Int, + v16: Int, + v17: Int, + v18: Int, + v19: Int, + v20: Int, + v21: Int, + v22: Int + ): ErrorOr[Int] = Valid(v4 + v6 + v22) val valid0 = Valid(0) val valid1 = Valid(1) @@ -63,13 +84,29 @@ class ErrorOrSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "flatMapN 22-tuples into a Valid" in { - (valid0, valid1, valid2, - valid0, valid1, valid2, - valid0, valid1, valid2, - valid0, valid1, valid2, - valid0, valid1, valid2, - valid0, valid1, valid2, - valid0, valid1, valid2, valid0) flatMapN errorOrSelect should be(Valid(0 + 2 + 0)) + (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0 + ) flatMapN errorOrSelect should be(Valid(0 + 2 + 0)) } it should "flatMapN 1-tuples into a Valid string" in { @@ -139,82 +176,211 @@ class ErrorOrSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "flatMapN 13-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0) - .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) + val result = + (valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0) + .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("0120120120120")) } it should "flatMapN 14-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1) - .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) + val result = + (valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1) + .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("01201201201201")) } it should "flatMapN 15-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("012012012012012")) } it should "flatMapN 16-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("0120120120120120")) } it should "flatMapN 17-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0, valid1) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("01201201201201201")) } it should "flatMapN 18-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0, valid1, valid2) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("012012012012012012")) } it should "flatMapN 19-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0, valid1, valid2, valid0) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("0120120120120120120")) } it should "flatMapN 20-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("01201201201201201201")) } it should "flatMapN 21-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2 + ) .flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("012012012012012012012")) } it should "flatMapN 22-tuples into a Valid string" in { - val result = ( - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, - valid0, valid1, valid2, valid0, valid1, valid2, valid0, valid1, valid2, valid0).flatMapN( - Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) + val result = (valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0, + valid1, + valid2, + valid0 + ).flatMapN(Array(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _).mkString.valid) result should be(Valid("0120120120120120120120")) } @@ -228,7 +394,8 @@ class ErrorOrSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { | def flatMapN[T_OUT](f1: (A) => ErrorOr[T_OUT]): ErrorOr[T_OUT] = t1.tupled flatMap f1.tupled |} | - |""".stripMargin) + |""".stripMargin + ) result } diff --git a/common/src/test/scala/common/validation/ValidationSpec.scala b/common/src/test/scala/common/validation/ValidationSpec.scala index 543e33d7573..8a1c72a922e 100644 --- a/common/src/test/scala/common/validation/ValidationSpec.scala +++ b/common/src/test/scala/common/validation/ValidationSpec.scala @@ -14,7 +14,6 @@ import common.mock.MockSugar import scala.util.{Failure, Success} - class ValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "Validation" diff --git a/core/src/main/scala/cromwell/core/BackendDockerConfiguration.scala b/core/src/main/scala/cromwell/core/BackendDockerConfiguration.scala index 3000b30e8df..8c53b38c36d 100644 --- a/core/src/main/scala/cromwell/core/BackendDockerConfiguration.scala +++ b/core/src/main/scala/cromwell/core/BackendDockerConfiguration.scala @@ -14,7 +14,9 @@ object DockerCredentials { object DockerCredentialUsernameAndPassword { private val tokenStringFormat = raw"([^:]*):(.*)".r - def unapply(arg: DockerCredentials): Option[(String, String)] = Try(new String(Base64.getDecoder.decode(arg.token))).toOption match { + def unapply(arg: DockerCredentials): Option[(String, String)] = Try( + new String(Base64.getDecoder.decode(arg.token)) + ).toOption match { case Some(tokenStringFormat(username, password)) => Some((username, password)) case _ => None } diff --git a/core/src/main/scala/cromwell/core/ConfigUtil.scala b/core/src/main/scala/cromwell/core/ConfigUtil.scala index 0fd5002ffa8..5adf56ec0a4 100644 --- a/core/src/main/scala/cromwell/core/ConfigUtil.scala +++ b/core/src/main/scala/cromwell/core/ConfigUtil.scala @@ -8,7 +8,7 @@ import com.typesafe.config.{Config, ConfigException, ConfigValue} import org.slf4j.LoggerFactory import scala.jdk.CollectionConverters._ -import scala.reflect.{ClassTag, classTag} +import scala.reflect.{classTag, ClassTag} object ConfigUtil { @@ -20,12 +20,12 @@ object ConfigUtil { /** * For keys that are in the configuration but not in the reference keySet, log a warning. */ - def warnNotRecognized(keySet: Set[String], context: String) = { + def warnNotRecognized(keySet: Set[String], context: String) = keys.diff(keySet) match { - case warnings if warnings.nonEmpty => validationLogger.warn(s"Unrecognized configuration key(s) for $context: ${warnings.mkString(", ")}") + case warnings if warnings.nonEmpty => + validationLogger.warn(s"Unrecognized configuration key(s) for $context: ${warnings.mkString(", ")}") case _ => } - } /** * Validates that the value for this key is a well formed URL. @@ -34,15 +34,15 @@ object ConfigUtil { new URL(config.getString(url)) } - def validateString(key: String): ValidatedNel[String, String] = try { + def validateString(key: String): ValidatedNel[String, String] = try config.getString(key).validNel - } catch { + catch { case _: ConfigException.Missing => s"Could not find key: $key".invalidNel } - def validateConfig(key: String): ValidatedNel[String, Config] = try { + def validateConfig(key: String): ValidatedNel[String, Config] = try config.getConfig(key).validNel - } catch { + catch { case _: ConfigException.Missing => s"Could not find key: $key".invalidNel case _: ConfigException.WrongType => s"key $key cannot be parsed to a Config".invalidNel } @@ -50,6 +50,7 @@ object ConfigUtil { } implicit class EnhancedValidation[I <: AnyRef](val value: I) extends AnyVal { + /** * Validates this value by applying validationFunction to it and returning a Validation: * Returns successNel upon success. @@ -58,9 +59,9 @@ object ConfigUtil { * @tparam O return type of validationFunction * @tparam E Restricts the subtype of Exception that should be caught during validation */ - def validateAny[O, E <: Exception: ClassTag](validationFunction: I => O): ValidatedNel[String, O] = try { + def validateAny[O, E <: Exception: ClassTag](validationFunction: I => O): ValidatedNel[String, O] = try validationFunction(value).validNel - } catch { + catch { case e if classTag[E].runtimeClass.isInstance(e) => e.getMessage.invalidNel } } diff --git a/core/src/main/scala/cromwell/core/DockerConfiguration.scala b/core/src/main/scala/cromwell/core/DockerConfiguration.scala index e765eec80fe..3f2dfa5c6f8 100644 --- a/core/src/main/scala/cromwell/core/DockerConfiguration.scala +++ b/core/src/main/scala/cromwell/core/DockerConfiguration.scala @@ -18,23 +18,30 @@ object DockerConfiguration { lazy val instance: DockerConfiguration = { if (dockerHashLookupConfig.hasPath("gcr-api-queries-per-100-seconds")) { - logger.warn("'docker.hash-lookup.gcr-api-queries-per-100-seconds' is no longer supported, use 'docker.hash-lookup.google.throttle' instead (see reference.conf)") + logger.warn( + "'docker.hash-lookup.gcr-api-queries-per-100-seconds' is no longer supported, use 'docker.hash-lookup.google.throttle' instead (see reference.conf)" + ) } - val enabled = validate { dockerHashLookupConfig.as[Boolean]("enabled") } - val cacheEntryTtl = validate { dockerHashLookupConfig.as[FiniteDuration]("cache-entry-ttl") } - val cacheSize = validate { dockerHashLookupConfig.as[Long]("cache-size") } - val method: ErrorOr[DockerHashLookupMethod] = validate { dockerHashLookupConfig.as[String]("method") } map { + val enabled = validate(dockerHashLookupConfig.as[Boolean]("enabled")) + val cacheEntryTtl = validate(dockerHashLookupConfig.as[FiniteDuration]("cache-entry-ttl")) + val cacheSize = validate(dockerHashLookupConfig.as[Long]("cache-size")) + val method: ErrorOr[DockerHashLookupMethod] = validate(dockerHashLookupConfig.as[String]("method")) map { case "local" => DockerLocalLookup case "remote" => DockerRemoteLookup case other => throw new IllegalArgumentException(s"Unrecognized docker hash lookup method: $other") } - val sizeCompressionFactor = validate { dockerHashLookupConfig.as[Double]("size-compression-factor") } - val maxTimeBetweenRetries = validate { dockerHashLookupConfig.as[FiniteDuration]("max-time-between-retries") } - val maxRetries = validate { dockerHashLookupConfig.as[Int]("max-retries") } + val sizeCompressionFactor = validate(dockerHashLookupConfig.as[Double]("size-compression-factor")) + val maxTimeBetweenRetries = validate(dockerHashLookupConfig.as[FiniteDuration]("max-time-between-retries")) + val maxRetries = validate(dockerHashLookupConfig.as[Int]("max-retries")) val dockerConfiguration = (enabled, - cacheEntryTtl, cacheSize, method, - sizeCompressionFactor, maxTimeBetweenRetries, maxRetries) mapN DockerConfiguration.apply + cacheEntryTtl, + cacheSize, + method, + sizeCompressionFactor, + maxTimeBetweenRetries, + maxRetries + ) mapN DockerConfiguration.apply dockerConfiguration match { case Valid(conf) => conf @@ -44,14 +51,14 @@ object DockerConfiguration { } case class DockerConfiguration( - enabled: Boolean, - cacheEntryTtl: FiniteDuration, - cacheSize: Long, - method: DockerHashLookupMethod, - sizeCompressionFactor: Double, - maxTimeBetweenRetries: FiniteDuration, - maxRetries: Int - ) + enabled: Boolean, + cacheEntryTtl: FiniteDuration, + cacheSize: Long, + method: DockerHashLookupMethod, + sizeCompressionFactor: Double, + maxTimeBetweenRetries: FiniteDuration, + maxRetries: Int +) sealed trait DockerHashLookupMethod diff --git a/core/src/main/scala/cromwell/core/Encryption.scala b/core/src/main/scala/cromwell/core/Encryption.scala index 0258d4511e3..4e05693b003 100644 --- a/core/src/main/scala/cromwell/core/Encryption.scala +++ b/core/src/main/scala/cromwell/core/Encryption.scala @@ -31,15 +31,18 @@ case object Aes256Cbc { cipher } - final def validateLength(arrayName: String, array: Array[Byte], expectedBitLength: Int): Try[Unit] = { + final def validateLength(arrayName: String, array: Array[Byte], expectedBitLength: Int): Try[Unit] = if (array.length * 8 == expectedBitLength) { Success(()) } else { - Failure(new IllegalArgumentException(s"$arrayName size (${array.length * 8} bits) did not match the required length $expectedBitLength")) + Failure( + new IllegalArgumentException( + s"$arrayName size (${array.length * 8} bits) did not match the required length $expectedBitLength" + ) + ) } - } - final def encrypt(plainText: Array[Byte], secretKey: SecretKey): Try[EncryptedBytes] = { + final def encrypt(plainText: Array[Byte], secretKey: SecretKey): Try[EncryptedBytes] = validateLength("Secret key", secretKey.key, keySize) map { _ => val iv = new Array[Byte](blockSize / 8) ranGen.nextBytes(iv) @@ -47,15 +50,15 @@ case object Aes256Cbc { val cipher = init(Cipher.ENCRYPT_MODE, secretKey.key, iv) EncryptedBytes(cipher.doFinal(plainText), iv) } - } - final def decrypt(encryptedBytes: EncryptedBytes, secretKey: SecretKey): Try[Array[Byte]] = { + final def decrypt(encryptedBytes: EncryptedBytes, secretKey: SecretKey): Try[Array[Byte]] = for { _ <- validateLength("Secret key", secretKey.key, keySize) _ <- validateLength("Initialization vector", encryptedBytes.initializationVector, blockSize) - bytes = init(Cipher.DECRYPT_MODE, secretKey.key, encryptedBytes.initializationVector).doFinal(encryptedBytes.cipherText) + bytes = init(Cipher.DECRYPT_MODE, secretKey.key, encryptedBytes.initializationVector).doFinal( + encryptedBytes.cipherText + ) } yield bytes - } } final case class EncryptedBytes(cipherText: Array[Byte], initializationVector: Array[Byte]) { @@ -71,4 +74,4 @@ object EncryptedBytes { final case class SecretKey(key: Array[Byte]) object SecretKey { def apply(base64KeyString: String): SecretKey = SecretKey(Base64.decodeBase64(base64KeyString)) -} \ No newline at end of file +} diff --git a/core/src/main/scala/cromwell/core/ExecutionIndex.scala b/core/src/main/scala/cromwell/core/ExecutionIndex.scala index 4f04179db0c..926fad13c11 100644 --- a/core/src/main/scala/cromwell/core/ExecutionIndex.scala +++ b/core/src/main/scala/cromwell/core/ExecutionIndex.scala @@ -23,8 +23,7 @@ object ExecutionIndex { } implicit val ExecutionIndexOrdering = new Ordering[ExecutionIndex] { - override def compare(x: ExecutionIndex, y: ExecutionIndex): Int = { + override def compare(x: ExecutionIndex, y: ExecutionIndex): Int = x.fromIndex.compareTo(y.fromIndex) - } } } diff --git a/core/src/main/scala/cromwell/core/ExecutionStatus.scala b/core/src/main/scala/cromwell/core/ExecutionStatus.scala index 3d4016d90a5..fa2ece67ab5 100644 --- a/core/src/main/scala/cromwell/core/ExecutionStatus.scala +++ b/core/src/main/scala/cromwell/core/ExecutionStatus.scala @@ -2,7 +2,8 @@ package cromwell.core object ExecutionStatus extends Enumeration { type ExecutionStatus = Value - val NotStarted, WaitingForQueueSpace, QueuedInCromwell, Starting, Running, Aborting, Failed, RetryableFailure, Done, Bypassed, Aborted, Unstartable = Value + val NotStarted, WaitingForQueueSpace, QueuedInCromwell, Starting, Running, Aborting, Failed, RetryableFailure, Done, + Bypassed, Aborted, Unstartable = Value val TerminalStatuses = Set(Failed, Done, Aborted, Bypassed, Unstartable) val TerminalOrRetryableStatuses = TerminalStatuses + RetryableFailure val NonTerminalStatuses = values.diff(TerminalOrRetryableStatuses) @@ -24,7 +25,7 @@ object ExecutionStatus extends Enumeration { case Done => 11 } } - + implicit class EnhancedExecutionStatus(val status: ExecutionStatus) extends AnyVal { def isTerminal: Boolean = TerminalStatuses contains status diff --git a/core/src/main/scala/cromwell/core/HogGroup.scala b/core/src/main/scala/cromwell/core/HogGroup.scala index 87bcad5d0c9..c49b84a9ecc 100644 --- a/core/src/main/scala/cromwell/core/HogGroup.scala +++ b/core/src/main/scala/cromwell/core/HogGroup.scala @@ -17,16 +17,16 @@ object HogGroup { if (config.hasPath("system.hog-safety.workflow-option")) { val hogGroupField = config.getString("system.hog-safety.workflow-option") - (options, workflowId) => { + (options, workflowId) => options.get(hogGroupField) match { case Success(hg) => HogGroup(hg) case Failure(_) => HogGroup(workflowId.shortString) } - } - } else { - (_, workflowId) => HogGroup(workflowId.shortString) + } else { (_, workflowId) => + HogGroup(workflowId.shortString) } } - def decide(workflowOptions: WorkflowOptions, workflowId: WorkflowId): HogGroup = HogGroupDeciderFunction.apply(workflowOptions, workflowId) + def decide(workflowOptions: WorkflowOptions, workflowId: WorkflowId): HogGroup = + HogGroupDeciderFunction.apply(workflowOptions, workflowId) } diff --git a/core/src/main/scala/cromwell/core/JobKey.scala b/core/src/main/scala/cromwell/core/JobKey.scala index e5f990aa433..99118dc2235 100644 --- a/core/src/main/scala/cromwell/core/JobKey.scala +++ b/core/src/main/scala/cromwell/core/JobKey.scala @@ -13,6 +13,6 @@ trait JobKey { import ExecutionIndex.IndexEnhancedIndex s"${getClass.getSimpleName}_${node.getClass.getSimpleName}_${node.fullyQualifiedName}:${index.fromIndex}:$attempt" } - - def isShard = index.isDefined + + def isShard = index.isDefined } diff --git a/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala b/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala index 53f0e4de6fc..8932b3f6836 100644 --- a/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala +++ b/core/src/main/scala/cromwell/core/MonitoringCompanionActor.scala @@ -9,21 +9,21 @@ import scala.language.postfixOps object MonitoringCompanionActor { sealed trait MonitoringCompanionCommand - private [core] case object AddWork extends MonitoringCompanionCommand - private [core] case object RemoveWork extends MonitoringCompanionCommand - private [core] def props(actorToMonitor: ActorRef) = Props(new MonitoringCompanionActor(actorToMonitor)) + private[core] case object AddWork extends MonitoringCompanionCommand + private[core] case object RemoveWork extends MonitoringCompanionCommand + private[core] def props(actorToMonitor: ActorRef) = Props(new MonitoringCompanionActor(actorToMonitor)) } -private [core] class MonitoringCompanionActor(actorToMonitor: ActorRef) extends Actor with ActorLogging { +private[core] class MonitoringCompanionActor(actorToMonitor: ActorRef) extends Actor with ActorLogging { private var workCount: Int = 0 - + override def receive = { case AddWork => workCount += 1 case RemoveWork => workCount -= 1 case ShutdownCommand if workCount <= 0 => context stop actorToMonitor context stop self - case ShutdownCommand => + case ShutdownCommand => log.info(s"{} is still processing {} messages", actorToMonitor.path.name, workCount) context.system.scheduler.scheduleOnce(1 second, self, ShutdownCommand)(context.dispatcher) () @@ -33,12 +33,12 @@ private [core] class MonitoringCompanionActor(actorToMonitor: ActorRef) extends trait MonitoringCompanionHelper { this: Actor => private val monitoringActor = context.actorOf(MonitoringCompanionActor.props(self)) private var shuttingDown: Boolean = false - + def addWork() = monitoringActor ! AddWork def removeWork() = monitoringActor ! RemoveWork val monitoringReceive: Receive = { - case ShutdownCommand if !shuttingDown => + case ShutdownCommand if !shuttingDown => shuttingDown = true monitoringActor ! ShutdownCommand case ShutdownCommand => // Ignore if we're already shutting down diff --git a/core/src/main/scala/cromwell/core/WorkflowId.scala b/core/src/main/scala/cromwell/core/WorkflowId.scala index feb0ee601a9..f2444738fed 100644 --- a/core/src/main/scala/cromwell/core/WorkflowId.scala +++ b/core/src/main/scala/cromwell/core/WorkflowId.scala @@ -8,19 +8,17 @@ sealed trait WorkflowId { override def toString = id.toString def shortString = id.toString.split("-")(0) - def toRoot: RootWorkflowId = { + def toRoot: RootWorkflowId = this match { case root: RootWorkflowId => root case _ => RootWorkflowId(id) } - } - def toPossiblyNotRoot: PossiblyNotRootWorkflowId = { + def toPossiblyNotRoot: PossiblyNotRootWorkflowId = this match { case possiblyNotRoot: PossiblyNotRootWorkflowId => possiblyNotRoot case _ => PossiblyNotRootWorkflowId(id) } - } } object WorkflowId { diff --git a/core/src/main/scala/cromwell/core/WorkflowOptions.scala b/core/src/main/scala/cromwell/core/WorkflowOptions.scala index 91a7c30bbfe..cbdb1201986 100644 --- a/core/src/main/scala/cromwell/core/WorkflowOptions.scala +++ b/core/src/main/scala/cromwell/core/WorkflowOptions.scala @@ -55,7 +55,7 @@ object WorkflowOptions { case object FinalWorkflowLogDir extends WorkflowOption("final_workflow_log_dir") case object FinalCallLogsDir extends WorkflowOption("final_call_logs_dir") case object FinalWorkflowOutputsDir extends WorkflowOption("final_workflow_outputs_dir") - case object UseRelativeOutputPaths extends WorkflowOption(name="use_relative_output_paths") + case object UseRelativeOutputPaths extends WorkflowOption(name = "use_relative_output_paths") // Misc. case object DefaultRuntimeOptions extends WorkflowOption("default_runtime_attributes") @@ -70,23 +70,28 @@ object WorkflowOptions { private lazy val defaultRuntimeOptionKey: String = DefaultRuntimeOptions.name private lazy val validObjectKeys: Set[String] = Set(DefaultRuntimeOptions.name, "google_labels") - def encryptField(value: JsString): Try[JsObject] = { + def encryptField(value: JsString): Try[JsObject] = Aes256Cbc.encrypt(value.value.getBytes("utf-8"), SecretKey(EncryptionKey)) match { - case Success(encryptedValue) => Success(JsObject(Map( - "iv" -> JsString(encryptedValue.base64Iv), - "ciphertext" -> JsString(encryptedValue.base64CipherText) - ))) + case Success(encryptedValue) => + Success( + JsObject( + Map( + "iv" -> JsString(encryptedValue.base64Iv), + "ciphertext" -> JsString(encryptedValue.base64CipherText) + ) + ) + ) case Failure(ex) => Failure(ex) } - } - def decryptField(obj: JsObject): Try[String] = { + def decryptField(obj: JsObject): Try[String] = (obj.fields.get("iv"), obj.fields.get("ciphertext")) match { case (Some(iv: JsString), Some(ciphertext: JsString)) => - Aes256Cbc.decrypt(EncryptedBytes(ciphertext.value, iv.value), SecretKey(WorkflowOptions.EncryptionKey)).map(new String(_, "utf-8")) + Aes256Cbc + .decrypt(EncryptedBytes(ciphertext.value, iv.value), SecretKey(WorkflowOptions.EncryptionKey)) + .map(new String(_, "utf-8")) case _ => Failure(new RuntimeException(s"JsObject must have 'iv' and 'ciphertext' fields to decrypt: $obj")) } - } def isEncryptedField(jsValue: JsValue): Boolean = jsValue match { case obj: JsObject if obj.fields.keys.exists(_ == "iv") && obj.fields.keys.exists(_ == "ciphertext") => true @@ -102,7 +107,8 @@ object WorkflowOptions { case (k, v: JsNumber) => k -> Success(v) case (k, v) if isEncryptedField(v) => k -> Success(v) case (k, v: JsArray) => k -> Success(v) - case (k, v) => k -> Failure(new UnsupportedOperationException(s"Unsupported key/value pair in WorkflowOptions: $k -> $v")) + case (k, v) => + k -> Failure(new UnsupportedOperationException(s"Unsupported key/value pair in WorkflowOptions: $k -> $v")) } encrypted.values collect { case f: Failure[_] => f } match { @@ -117,7 +123,7 @@ object WorkflowOptions { case Success(x) => Failure(new UnsupportedOperationException(s"Expecting JSON object, got $x")) } - def fromMap(m: Map[String, String]) = fromJsonObject(JsObject(m map { case (k, v) => k -> JsString(v)})) + def fromMap(m: Map[String, String]) = fromJsonObject(JsObject(m map { case (k, v) => k -> JsString(v) })) private def getAsJson(key: String, jsObject: JsObject) = jsObject.fields.get(key) match { case Some(jsStr: JsString) => Success(jsStr) @@ -157,7 +163,7 @@ case class WorkflowOptions(jsObject: JsObject) { } def getVectorOfStrings(key: String): ErrorOr[Option[Vector[String]]] = jsObject.fields.get(key) match { - case Some(jsArr: JsArray) => Option(jsArr.elements collect { case e: JsString => e.value } ).validNel + case Some(jsArr: JsArray) => Option(jsArr.elements collect { case e: JsString => e.value }).validNel case Some(jsVal: JsValue) => s"Unsupported JsValue as JsArray: $jsVal".invalidNel case _ => None.validNel } @@ -167,8 +173,14 @@ case class WorkflowOptions(jsObject: JsObject) { } lazy val defaultRuntimeOptions = jsObject.fields.get(defaultRuntimeOptionKey) match { - case Some(jsObj: JsObject) => TryUtil.sequenceMap(jsObj.fields map { case (k, _) => k -> WorkflowOptions.getAsJson(k, jsObj) }) - case Some(jsVal) => Failure(new IllegalArgumentException(s"Unsupported JsValue for $defaultRuntimeOptionKey: $jsVal. Expected a JSON object.")) + case Some(jsObj: JsObject) => + TryUtil.sequenceMap(jsObj.fields map { case (k, _) => k -> WorkflowOptions.getAsJson(k, jsObj) }) + case Some(jsVal) => + Failure( + new IllegalArgumentException( + s"Unsupported JsValue for $defaultRuntimeOptionKey: $jsVal. Expected a JSON object." + ) + ) case None => Failure(OptionNotFoundException(s"Cannot find definition for default runtime attributes")) } diff --git a/core/src/main/scala/cromwell/core/WorkflowProcessingEvents.scala b/core/src/main/scala/cromwell/core/WorkflowProcessingEvents.scala index ab80bf78724..e2db6df75c8 100644 --- a/core/src/main/scala/cromwell/core/WorkflowProcessingEvents.scala +++ b/core/src/main/scala/cromwell/core/WorkflowProcessingEvents.scala @@ -36,4 +36,8 @@ object WorkflowProcessingEvents { val ProcessingEventsKey = "workflowProcessingEvents" } -case class WorkflowProcessingEvent(cromwellId: String, description: String, timestamp: OffsetDateTime, cromwellVersion: String) +case class WorkflowProcessingEvent(cromwellId: String, + description: String, + timestamp: OffsetDateTime, + cromwellVersion: String +) diff --git a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala index ee936b86819..aa1faf89542 100644 --- a/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala +++ b/core/src/main/scala/cromwell/core/WorkflowSourceFilesCollection.scala @@ -22,16 +22,15 @@ sealed trait WorkflowSourceFilesCollection { def importsZipFileOption: Option[Array[Byte]] = this match { case _: WorkflowSourceFilesWithoutImports => None - case w: WorkflowSourceFilesWithDependenciesZip => Option(w.importsZip) // i.e. Some(importsZip) if our wiring is correct + case w: WorkflowSourceFilesWithDependenciesZip => + Option(w.importsZip) // i.e. Some(importsZip) if our wiring is correct } - def setOptions(workflowOptions: WorkflowOptions) = { - + def setOptions(workflowOptions: WorkflowOptions) = this match { case w: WorkflowSourceFilesWithoutImports => w.copy(workflowOptions = workflowOptions) case w: WorkflowSourceFilesWithDependenciesZip => w.copy(workflowOptions = workflowOptions) } - } } trait HasWorkflowIdAndSources { @@ -51,7 +50,8 @@ object WorkflowSourceFilesCollection { importsFile: Option[Array[Byte]], workflowOnHold: Boolean, warnings: Seq[String], - requestedWorkflowId: Option[WorkflowId]): WorkflowSourceFilesCollection = importsFile match { + requestedWorkflowId: Option[WorkflowId] + ): WorkflowSourceFilesCollection = importsFile match { case Some(imports) => WorkflowSourceFilesWithDependenciesZip( workflowSource = workflowSource, @@ -65,7 +65,8 @@ object WorkflowSourceFilesCollection { importsZip = imports, workflowOnHold = workflowOnHold, warnings = warnings, - requestedWorkflowId = requestedWorkflowId) + requestedWorkflowId = requestedWorkflowId + ) case None => WorkflowSourceFilesWithoutImports( workflowSource = workflowSource, @@ -78,7 +79,8 @@ object WorkflowSourceFilesCollection { labelsJson = labelsJson, workflowOnHold = workflowOnHold, warnings = warnings, - requestedWorkflowId = requestedWorkflowId) + requestedWorkflowId = requestedWorkflowId + ) } } @@ -92,7 +94,8 @@ final case class WorkflowSourceFilesWithoutImports(workflowSource: Option[Workfl labelsJson: WorkflowJson, workflowOnHold: Boolean = false, warnings: Seq[String], - requestedWorkflowId: Option[WorkflowId]) extends WorkflowSourceFilesCollection + requestedWorkflowId: Option[WorkflowId] +) extends WorkflowSourceFilesCollection final case class WorkflowSourceFilesWithDependenciesZip(workflowSource: Option[WorkflowSource], workflowUrl: Option[WorkflowUrl], @@ -105,9 +108,9 @@ final case class WorkflowSourceFilesWithDependenciesZip(workflowSource: Option[W importsZip: Array[Byte], workflowOnHold: Boolean = false, warnings: Seq[String], - requestedWorkflowId: Option[WorkflowId]) extends WorkflowSourceFilesCollection { - override def toString = { + requestedWorkflowId: Option[WorkflowId] +) extends WorkflowSourceFilesCollection { + override def toString = s"WorkflowSourceFilesWithDependenciesZip($workflowSource, $workflowUrl, $workflowType, $workflowTypeVersion," + s""" $inputsJson, ${workflowOptions.asPrettyJson}, $labelsJson, <>, $warnings)""" - } } diff --git a/core/src/main/scala/cromwell/core/WorkflowState.scala b/core/src/main/scala/cromwell/core/WorkflowState.scala index db4bddfedc5..26294ff648b 100644 --- a/core/src/main/scala/cromwell/core/WorkflowState.scala +++ b/core/src/main/scala/cromwell/core/WorkflowState.scala @@ -2,7 +2,6 @@ package cromwell.core import cats.Semigroup - sealed trait WorkflowState { def isTerminal: Boolean protected def ordinal: Int @@ -10,10 +9,18 @@ sealed trait WorkflowState { } object WorkflowState { - lazy val WorkflowStateValues = Seq(WorkflowOnHold, WorkflowSubmitted, WorkflowRunning, WorkflowFailed, WorkflowSucceeded, WorkflowAborting, WorkflowAborted) + lazy val WorkflowStateValues = Seq(WorkflowOnHold, + WorkflowSubmitted, + WorkflowRunning, + WorkflowFailed, + WorkflowSucceeded, + WorkflowAborting, + WorkflowAborted + ) - def withName(str: String): WorkflowState = WorkflowStateValues.find(_.toString.equalsIgnoreCase(str)).getOrElse( - throw new NoSuchElementException(s"No such WorkflowState: $str")) + def withName(str: String): WorkflowState = WorkflowStateValues + .find(_.toString.equalsIgnoreCase(str)) + .getOrElse(throw new NoSuchElementException(s"No such WorkflowState: $str")) implicit val WorkflowStateSemigroup = new Semigroup[WorkflowState] { override def combine(f1: WorkflowState, f2: WorkflowState): WorkflowState = f1.combine(f2) @@ -22,7 +29,7 @@ object WorkflowState { implicit val WorkflowStateOrdering = Ordering.by { self: WorkflowState => self.ordinal } } -case object WorkflowOnHold extends WorkflowState{ +case object WorkflowOnHold extends WorkflowState { override val toString: String = "On Hold" override val isTerminal = false override val ordinal = 0 diff --git a/core/src/main/scala/cromwell/core/actor/BatchActor.scala b/core/src/main/scala/cromwell/core/actor/BatchActor.scala index 5988dd822d2..9b002f4fcf5 100644 --- a/core/src/main/scala/cromwell/core/actor/BatchActor.scala +++ b/core/src/main/scala/cromwell/core/actor/BatchActor.scala @@ -14,7 +14,6 @@ import scala.concurrent.Future import scala.concurrent.duration.{Duration, FiniteDuration} import scala.util.{Failure, Success} - /** A collection of state, data, and message types to support BatchActor. */ object BatchActor { type BatchData[C] = WeightedQueue[C, Int] @@ -45,8 +44,9 @@ object BatchActor { * It is backed by a WeightedQueue which makes it possible to decouple the number of messages received from * the effective "weight" of the queue. */ -abstract class BatchActor[C](val flushRate: FiniteDuration, - val batchSize: Int) extends FSM[BatchActorState, BatchData[C]] with Timers { +abstract class BatchActor[C](val flushRate: FiniteDuration, val batchSize: Int) + extends FSM[BatchActorState, BatchData[C]] + with Timers { private var shuttingDown: Boolean = false implicit val ec = context.dispatcher @@ -63,7 +63,8 @@ abstract class BatchActor[C](val flushRate: FiniteDuration, protected def routed: Boolean = false override def preStart(): Unit = { - if (logOnStartUp) log.info("{} configured to flush with batch size {} and process rate {}.", name, batchSize, flushRate) + if (logOnStartUp) + log.info("{} configured to flush with batch size {} and process rate {}.", name, batchSize, flushRate) if (flushRate != Duration.Zero) { timers.startPeriodicTimer(ScheduledFlushKey, ScheduledProcessAction, flushRate) } @@ -133,10 +134,9 @@ abstract class BatchActor[C](val flushRate: FiniteDuration, */ protected def process(data: NonEmptyVector[C]): Future[Int] - private def processIfBatchSizeReached(data: BatchData[C]) = { + private def processIfBatchSizeReached(data: BatchData[C]) = if (data.weight >= batchSize) processHead(data) else goto(WaitingToProcess) using data - } private def processHead(data: BatchData[C]) = if (data.innerQueue.nonEmpty) { val (head, newQueue) = data.behead(batchSize) @@ -159,8 +159,8 @@ abstract class BatchActor[C](val flushRate: FiniteDuration, goto(Processing) using newQueue } else - // This goto is important, even if we're already in WaitingToProcess we want to trigger the onTransition below - // to check if it's time to shutdown + // This goto is important, even if we're already in WaitingToProcess we want to trigger the onTransition below + // to check if it's time to shutdown goto(WaitingToProcess) using data onTransition { diff --git a/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala b/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala index d8b0cbe0716..85376bedb37 100644 --- a/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala +++ b/core/src/main/scala/cromwell/core/actor/RobustClientHelper.scala @@ -15,14 +15,14 @@ object RobustClientHelper { } trait RobustClientHelper { this: Actor with ActorLogging => - private [actor] implicit val robustActorHelperEc = context.dispatcher + implicit private[actor] val robustActorHelperEc = context.dispatcher private var backoff: Option[Backoff] = None // package private for testing - private [core] var timeouts = Map.empty[Any, (Cancellable, FiniteDuration)] + private[core] var timeouts = Map.empty[Any, (Cancellable, FiniteDuration)] - protected def initialBackoff(): Backoff = SimpleExponentialBackoff(5.seconds, 20.minutes, 2D) + protected def initialBackoff(): Backoff = SimpleExponentialBackoff(5.seconds, 20.minutes, 2d) def robustReceive: Receive = { case BackPressure(request) => @@ -33,39 +33,38 @@ trait RobustClientHelper { this: Actor with ActorLogging => case RequestTimeout(request, to) => onTimeout(request, to) } - private final def newTimer(msg: Any, to: ActorRef, in: FiniteDuration) = { + final private def newTimer(msg: Any, to: ActorRef, in: FiniteDuration) = context.system.scheduler.scheduleOnce(in, to, msg)(robustActorHelperEc, self) - } def robustSend(msg: Any, to: ActorRef, timeout: FiniteDuration = DefaultRequestLostTimeout): Unit = { to ! msg addTimeout(msg, to, timeout) } - private final def addTimeout(command: Any, to: ActorRef, timeout: FiniteDuration) = { + final private def addTimeout(command: Any, to: ActorRef, timeout: FiniteDuration) = { val cancellable = newTimer(RequestTimeout(command, to), self, timeout) timeouts = timeouts + (command -> (cancellable -> timeout)) } - protected final def hasTimeout(command: Any) = timeouts.get(command).isDefined + final protected def hasTimeout(command: Any) = timeouts.get(command).isDefined - protected final def cancelTimeout(command: Any) = { + final protected def cancelTimeout(command: Any) = { timeouts.get(command) foreach { case (cancellable, _) => cancellable.cancel() } timeouts = timeouts - command } - private final def resetTimeout(command: Any, to: ActorRef) = { + final private def resetTimeout(command: Any, to: ActorRef) = { val timeout = timeouts.get(command) map { _._2 } cancelTimeout(command) timeout foreach { addTimeout(command, to, _) } } - private [actor] final def generateBackpressureTime: FiniteDuration = { - val effectiveBackoff = backoff.getOrElse({ + final private[actor] def generateBackpressureTime: FiniteDuration = { + val effectiveBackoff = backoff.getOrElse { val firstBackoff = initialBackoff() backoff = Option(firstBackoff) firstBackoff - }) + } val backoffTime = effectiveBackoff.backoffMillis backoff = Option(effectiveBackoff.next) backoffTime.millis diff --git a/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala b/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala index d03853e5585..023a01dc450 100644 --- a/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala +++ b/core/src/main/scala/cromwell/core/actor/StreamActorHelper.scala @@ -13,8 +13,8 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} object StreamActorHelper { - private [actor] case class StreamFailed(failure: Throwable) - private [actor] case object StreamCompleted + private[actor] case class StreamFailed(failure: Throwable) + private[actor] case object StreamCompleted class ActorRestartException(throwable: Throwable) extends RuntimeException(throwable) } @@ -25,71 +25,68 @@ trait StreamActorHelper[T <: StreamContext] { this: Actor with ActorLogging => implicit def materializer: ActorMaterializer private val decider: Supervision.Decider = _ => Supervision.Resume - - private val replySink = Sink.foreach[(Any, T)] { - case (response, commandContext) => - val reply = commandContext.clientContext map { (_, response) } getOrElse response - commandContext.replyTo ! reply + + private val replySink = Sink.foreach[(Any, T)] { case (response, commandContext) => + val reply = commandContext.clientContext map { (_, response) } getOrElse response + commandContext.replyTo ! reply } protected def actorReceive: Receive - + protected def streamSource: Source[(Any, T), SourceQueueWithComplete[T]] override def receive = streamReceive.orElse(actorReceive) - + protected def onBackpressure(scale: Option[Double] = None): Unit = {} - private [actor] lazy val stream = { + private[actor] lazy val stream = streamSource .to(replySink) .withAttributes(ActorAttributes.supervisionStrategy(decider)) .run() - } - override def preStart(): Unit = { + override def preStart(): Unit = stream.watchCompletion() onComplete { case Success(_) => self ! StreamCompleted case Failure(failure) => self ! StreamFailed(failure) } - } def sendToStream(commandContext: T) = { val enqueue = stream offer commandContext map { case Enqueued => EnqueueResponse(Enqueued, commandContext) case other => EnqueueResponse(other, commandContext) - } recoverWith { - case t => Future.successful(FailedToEnqueue(t, commandContext)) + } recoverWith { case t => + Future.successful(FailedToEnqueue(t, commandContext)) } pipe(enqueue) to self () } - + private def backpressure(commandContext: StreamContext) = { - val originalRequest = commandContext.clientContext map { _ -> commandContext.request } getOrElse commandContext.request + val originalRequest = commandContext.clientContext map { + _ -> commandContext.request + } getOrElse commandContext.request commandContext.replyTo ! BackPressure(originalRequest) onBackpressure() } private def streamReceive: Receive = { - case ShutdownCommand => + case ShutdownCommand => stream.complete() case EnqueueResponse(Enqueued, _: T @unchecked) => // Good ! - case EnqueueResponse(_, commandContext) => backpressure(commandContext) case FailedToEnqueue(_, commandContext) => backpressure(commandContext) - - case StreamCompleted => + + case StreamCompleted => context stop self - case StreamFailed(failure) => + case StreamFailed(failure) => restart(failure) } /** Throw the exception to force the actor to restart so it can be back in business * IMPORTANT: Make sure the supervision strategy for this actor is Restart */ - private def restart(throwable: Throwable) = { + private def restart(throwable: Throwable) = throw new ActorRestartException(throwable) - } } diff --git a/core/src/main/scala/cromwell/core/actor/ThrottlerActor.scala b/core/src/main/scala/cromwell/core/actor/ThrottlerActor.scala index c1128897445..6ddbb5f8b18 100644 --- a/core/src/main/scala/cromwell/core/actor/ThrottlerActor.scala +++ b/core/src/main/scala/cromwell/core/actor/ThrottlerActor.scala @@ -15,7 +15,7 @@ import scala.concurrent.duration._ abstract class ThrottlerActor[C] extends BatchActor[C](Duration.Zero, 1) { override protected def logOnStartUp = false override def weightFunction(command: C) = 1 - override final def process(data: NonEmptyVector[C]): Future[Int] = { + final override def process(data: NonEmptyVector[C]): Future[Int] = // This ShouldNotBePossible™ but in case it happens, instead of dropping elements process them all anyway // Explanation: batch size is 1 which means as soon as we receive 1 element, the process method should be called. // Because the BatchActor calls the process method with vector of elements which total weight is batch size, and because @@ -25,6 +25,5 @@ abstract class ThrottlerActor[C] extends BatchActor[C](Duration.Zero, 1) { log.error("{} is throttled and is not supposed to process more than one element at a time !", self.path.name) data.toVector.traverse(processHead).map(_.length) } else processHead(data.head).map(_ => 1) - } def processHead(head: C): Future[Int] } diff --git a/core/src/main/scala/cromwell/core/callcaching/CallCachingMode.scala b/core/src/main/scala/cromwell/core/callcaching/CallCachingMode.scala index 23d4f396a2c..08527dbcf1d 100644 --- a/core/src/main/scala/cromwell/core/callcaching/CallCachingMode.scala +++ b/core/src/main/scala/cromwell/core/callcaching/CallCachingMode.scala @@ -1,6 +1,7 @@ package cromwell.core.callcaching sealed trait CallCachingMode { + /** * Return an equivalent of this call caching mode with READ disabled. */ @@ -19,11 +20,14 @@ case object CallCachingOff extends CallCachingMode { override val withoutWrite = this } -case class CallCachingActivity(readWriteMode: ReadWriteMode, options: CallCachingOptions = CallCachingOptions()) extends CallCachingMode { +case class CallCachingActivity(readWriteMode: ReadWriteMode, options: CallCachingOptions = CallCachingOptions()) + extends CallCachingMode { override val readFromCache = readWriteMode.r override val writeToCache = readWriteMode.w - override lazy val withoutRead: CallCachingMode = if (!writeToCache) CallCachingOff else this.copy(readWriteMode = WriteCache) - override lazy val withoutWrite: CallCachingMode = if (!readFromCache) CallCachingOff else this.copy(readWriteMode = ReadCache) + override lazy val withoutRead: CallCachingMode = + if (!writeToCache) CallCachingOff else this.copy(readWriteMode = WriteCache) + override lazy val withoutWrite: CallCachingMode = + if (!readFromCache) CallCachingOff else this.copy(readWriteMode = ReadCache) override val toString = readWriteMode.toString } @@ -35,4 +39,6 @@ case object ReadCache extends ReadWriteMode { override val w = false } case object WriteCache extends ReadWriteMode { override val r = false } case object ReadAndWriteCache extends ReadWriteMode -final case class CallCachingOptions(invalidateBadCacheResults: Boolean = true, workflowOptionCallCachePrefixes: Option[Vector[String]] = None) +final case class CallCachingOptions(invalidateBadCacheResults: Boolean = true, + workflowOptionCallCachePrefixes: Option[Vector[String]] = None +) diff --git a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala index 920702280b0..a78c7f20b6a 100644 --- a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala +++ b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala @@ -2,7 +2,6 @@ package cromwell.core.callcaching import cromwell.core.callcaching.HashKey.KeySeparator - object HashKey { private val KeySeparator = ": " def apply(keyComponents: String*) = new HashKey(true, keyComponents.toList) diff --git a/core/src/main/scala/cromwell/core/core.scala b/core/src/main/scala/cromwell/core/core.scala index 68edbaacbe7..2535a58f5c6 100644 --- a/core/src/main/scala/cromwell/core/core.scala +++ b/core/src/main/scala/cromwell/core/core.scala @@ -7,7 +7,6 @@ import mouse.boolean._ import scala.concurrent.duration.FiniteDuration - case class StandardPaths(output: Path, error: Path) case class CallContext(root: Path, standardPaths: StandardPaths, isDocker: Boolean) @@ -27,26 +26,35 @@ object CromwellFatalException { class CromwellFatalException(val exception: Throwable) extends Exception(exception) with CromwellFatalExceptionMarker case class CromwellAggregatedException(throwables: Seq[Throwable], exceptionContext: String = "") - extends Exception with ThrowableAggregation + extends Exception + with ThrowableAggregation case class CacheConfig(concurrency: Int, size: Long, ttl: FiniteDuration) import net.ceedubs.ficus.Ficus._ object CacheConfig { + /** * From an optional `Config` entry and specified defaults, always return a `CacheConfig` object. */ - def config(caching: Option[Config], defaultConcurrency: Int, defaultSize: Long, defaultTtl: FiniteDuration): CacheConfig = { + def config(caching: Option[Config], + defaultConcurrency: Int, + defaultSize: Long, + defaultTtl: FiniteDuration + ): CacheConfig = caching flatMap { c => optionalConfig(c, defaultConcurrency = defaultConcurrency, defaultSize = defaultSize, defaultTtl = defaultTtl) } getOrElse CacheConfig(concurrency = defaultConcurrency, size = defaultSize, ttl = defaultTtl) - } /** * From a non-optional `Config` and specified defaults, if caching is enabled return a `CacheConfig` object wrapped in a `Some`, * otherwise return `None`. */ - def optionalConfig(caching: Config, defaultConcurrency: Int, defaultSize: Long, defaultTtl: FiniteDuration): Option[CacheConfig] = { + def optionalConfig(caching: Config, + defaultConcurrency: Int, + defaultSize: Long, + defaultTtl: FiniteDuration + ): Option[CacheConfig] = { val cachingEnabled = caching.getOrElse("enabled", false) cachingEnabled.option( diff --git a/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala b/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala index caeb56509fd..ac2fe473572 100644 --- a/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala +++ b/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala @@ -24,23 +24,29 @@ import scala.util.{Failure, Try} */ class CromwellFileSystems(globalConfig: Config) { // Validate the configuration and creates a Map of PathBuilderFactory constructors, along with their optional singleton config - private [filesystem] val factoryBuilders: Map[String, (Constructor[_], Option[AnyRef])] = if (globalConfig.hasPath("filesystems")) { - val rawConfigSet = globalConfig.getObject("filesystems").entrySet.asScala - val configMap = rawConfigSet.toList.map({ entry => entry.getKey -> entry.getValue }) - val constructorMap = configMap.traverse[ErrorOr, (String, (Constructor[_], Option[AnyRef]))]({ - case (key, fsConfig: ConfigObject) => processFileSystem(key, fsConfig) - case (key, _) => s"Invalid filesystem configuration for $key".invalidNel - }).map(_.toMap) - - constructorMap.unsafe("Failed to initialize Cromwell filesystems") - } else Map.empty + private[filesystem] val factoryBuilders: Map[String, (Constructor[_], Option[AnyRef])] = + if (globalConfig.hasPath("filesystems")) { + val rawConfigSet = globalConfig.getObject("filesystems").entrySet.asScala + val configMap = rawConfigSet.toList.map(entry => entry.getKey -> entry.getValue) + val constructorMap = configMap + .traverse[ErrorOr, (String, (Constructor[_], Option[AnyRef]))] { + case (key, fsConfig: ConfigObject) => processFileSystem(key, fsConfig) + case (key, _) => s"Invalid filesystem configuration for $key".invalidNel + } + .map(_.toMap) + + constructorMap.unsafe("Failed to initialize Cromwell filesystems") + } else Map.empty val supportedFileSystems: Iterable[String] = factoryBuilders.keys // Generate the appropriate constructor and optional singleton instance for a filesystem - private def processFileSystem(key: String, fsConfig: ConfigObject): ErrorOr[(String, (Constructor[_], Option[AnyRef]))] = { + private def processFileSystem(key: String, + fsConfig: ConfigObject + ): ErrorOr[(String, (Constructor[_], Option[AnyRef]))] = { // This is the (optional) singleton instance shared by all factory instances - val singletonInstance: Checked[Option[AnyRef]] = fsConfig.toConfig.getAs[Config]("global") + val singletonInstance: Checked[Option[AnyRef]] = fsConfig.toConfig + .getAs[Config]("global") .map(c => instantiateSingletonConfig(key, c).toValidated) .sequence[ErrorOr, AnyRef] .toEither @@ -57,70 +63,88 @@ class CromwellFileSystems(globalConfig: Config) { } // Instantiates the singleton config for a filesystem - private def instantiateSingletonConfig(filesystem: String, config: Config): Checked[AnyRef] = { + private def instantiateSingletonConfig(filesystem: String, config: Config): Checked[AnyRef] = for { constructor <- createConstructor(filesystem, config, List(classOf[Config])) instanceConfig = config.getAs[Config]("config").getOrElse(ConfigFactory.empty) instance <- Try(constructor.newInstance(instanceConfig)).toChecked - cast <- instance.cast[AnyRef].toChecked(s"The filesystem global configuration class for $filesystem is not a Java Object") + cast <- instance + .cast[AnyRef] + .toChecked(s"The filesystem global configuration class for $filesystem is not a Java Object") } yield cast - } // Create a constructor from a configuration object - private def createConstructor(key: String, config: Config, parameterTypes: List[Class[_]]): Checked[Constructor[_]] = for { - clazz <- config.as[Option[String]]("class").toChecked(s"Filesystem configuration $key doesn't have a class field") - constructor <- createConstructor(key, clazz, parameterTypes) - } yield constructor + private def createConstructor(key: String, config: Config, parameterTypes: List[Class[_]]): Checked[Constructor[_]] = + for { + clazz <- config.as[Option[String]]("class").toChecked(s"Filesystem configuration $key doesn't have a class field") + constructor <- createConstructor(key, clazz, parameterTypes) + } yield constructor // Getting a constructor from a class name - private def createConstructor(filesystem: String, className: String, parameterTypes: List[Class[_]]): Checked[Constructor[_]] = Try ( + private def createConstructor(filesystem: String, + className: String, + parameterTypes: List[Class[_]] + ): Checked[Constructor[_]] = Try( Class.forName(className).getConstructor(parameterTypes: _*) - ).recoverWith({ - case e: ClassNotFoundException => Failure( - new RuntimeException(s"Class $className for filesystem $filesystem cannot be found in the class path.", e) - ) - case e: NoSuchMethodException => Failure( - new RuntimeException(s"Class $className for filesystem $filesystem does not have the required constructor signature: (${parameterTypes.map(_.getCanonicalName).mkString(", ")})", e) - ) - }).toChecked + ).recoverWith { + case e: ClassNotFoundException => + Failure( + new RuntimeException(s"Class $className for filesystem $filesystem cannot be found in the class path.", e) + ) + case e: NoSuchMethodException => + Failure( + new RuntimeException( + s"Class $className for filesystem $filesystem does not have the required constructor signature: (${parameterTypes.map(_.getCanonicalName).mkString(", ")})", + e + ) + ) + }.toChecked // Instantiate a PathBuilderFactory from its constructor and instance config - private def instantiate(name: String, constructor: Constructor[_], instanceConfig: Config, global: Option[AnyRef]): Checked[PathBuilderFactory] = { + private def instantiate(name: String, + constructor: Constructor[_], + instanceConfig: Config, + global: Option[AnyRef] + ): Checked[PathBuilderFactory] = for { instance <- global match { case Some(g) => Try(constructor.newInstance(globalConfig, instanceConfig, g)).toChecked case None => Try(constructor.newInstance(globalConfig, instanceConfig)).toChecked } - cast <- instance.cast[PathBuilderFactory].toChecked(s"The filesystem class for $name is not an instance of PathBuilderFactory") + cast <- instance + .cast[PathBuilderFactory] + .toChecked(s"The filesystem class for $name is not an instance of PathBuilderFactory") } yield cast - } // Look for a constructor in the map of known filesystems private def getConstructor(fileSystemName: String): Checked[(Constructor[_], Option[AnyRef])] = factoryBuilders .get(fileSystemName) - .toChecked(s"Cannot find a filesystem with name $fileSystemName in the configuration. Available filesystems: ${factoryBuilders.keySet.mkString(", ")}") + .toChecked( + s"Cannot find a filesystem with name $fileSystemName in the configuration. Available filesystems: ${factoryBuilders.keySet + .mkString(", ")}" + ) /** * Try to find a configured filesystem with the given name and build a PathFactory for it * @param name name of the filesystem * @param instanceConfig filesystem specific configuration for this instance of the factory to build */ - def buildFactory(name: String, instanceConfig: Config): Checked[PathBuilderFactory] = { + def buildFactory(name: String, instanceConfig: Config): Checked[PathBuilderFactory] = if (DefaultPathBuilderFactory.name.equalsIgnoreCase(name)) DefaultPathBuilderFactory.validNelCheck - else for { - constructorAndGlobal <- getConstructor(name) - factory <- instantiate(name, constructorAndGlobal._1, instanceConfig, constructorAndGlobal._2) - } yield factory - } + else + for { + constructorAndGlobal <- getConstructor(name) + factory <- instantiate(name, constructorAndGlobal._1, instanceConfig, constructorAndGlobal._2) + } yield factory /** * Given a filesystems config, build the PathBuilderFactories */ - def factoriesFromConfig(filesystemsConfig: Config): Checked[Map[String, PathBuilderFactory]] = { + def factoriesFromConfig(filesystemsConfig: Config): Checked[Map[String, PathBuilderFactory]] = if (filesystemsConfig.hasPath("filesystems")) { // Iterate over the config entries under the "filesystems" config val rawConfigSet = filesystemsConfig.getObject("filesystems").entrySet().asScala - val configMap = rawConfigSet.toList.map({ entry => entry.getKey -> entry.getValue }) + val configMap = rawConfigSet.toList.map(entry => entry.getKey -> entry.getValue) import net.ceedubs.ficus.Ficus._ def isFilesystemEnabled(configObject: ConfigObject): Boolean = { @@ -141,7 +165,6 @@ class CromwellFileSystems(globalConfig: Config) { case (key, _) => s"Invalid filesystem backend configuration for $key".invalidNel } map { _.toMap } toEither } else Map.empty[String, PathBuilderFactory].validNelCheck - } } object CromwellFileSystems { diff --git a/core/src/main/scala/cromwell/core/io/AsyncIo.scala b/core/src/main/scala/cromwell/core/io/AsyncIo.scala index 435058d942c..535c7e1a8a4 100644 --- a/core/src/main/scala/cromwell/core/io/AsyncIo.scala +++ b/core/src/main/scala/cromwell/core/io/AsyncIo.scala @@ -23,7 +23,8 @@ object AsyncIo { */ class AsyncIo(ioEndpoint: ActorRef, ioCommandBuilder: IoCommandBuilder) { private def asyncCommand[A](commandTry: Try[IoCommand[A]], - timeout: FiniteDuration = AsyncIo.defaultTimeout): Future[A] = { + timeout: FiniteDuration = AsyncIo.defaultTimeout + ): Future[A] = commandTry match { case Failure(throwable) => Future.failed(throwable) @@ -32,46 +33,36 @@ class AsyncIo(ioEndpoint: ActorRef, ioCommandBuilder: IoCommandBuilder) { ioEndpoint ! commandWithPromise commandWithPromise.promise.future } - } /** * IMPORTANT: This loads the entire content of the file into memory ! * Only use for small files ! */ - def contentAsStringAsync(path: Path, maxBytes: Option[Int], failOnOverflow: Boolean): Future[String] = { + def contentAsStringAsync(path: Path, maxBytes: Option[Int], failOnOverflow: Boolean): Future[String] = asyncCommand(ioCommandBuilder.contentAsStringCommand(path, maxBytes, failOnOverflow)) - } - def writeAsync(path: Path, content: String, options: OpenOptions, compressPayload: Boolean = false): Future[Unit] = { + def writeAsync(path: Path, content: String, options: OpenOptions, compressPayload: Boolean = false): Future[Unit] = asyncCommand(ioCommandBuilder.writeCommand(path, content, options, compressPayload)) - } - def sizeAsync(path: Path): Future[Long] = { + def sizeAsync(path: Path): Future[Long] = asyncCommand(ioCommandBuilder.sizeCommand(path)) - } - def hashAsync(path: Path): Future[String] = { + def hashAsync(path: Path): Future[String] = asyncCommand(ioCommandBuilder.hashCommand(path)) - } - def deleteAsync(path: Path, swallowIoExceptions: Boolean = false): Future[Unit] = { + def deleteAsync(path: Path, swallowIoExceptions: Boolean = false): Future[Unit] = asyncCommand(ioCommandBuilder.deleteCommand(path, swallowIoExceptions)) - } - def existsAsync(path: Path): Future[Boolean] = { + def existsAsync(path: Path): Future[Boolean] = asyncCommand(ioCommandBuilder.existsCommand(path)) - } - def readLinesAsync(path: Path): Future[Iterable[String]] = { + def readLinesAsync(path: Path): Future[Iterable[String]] = asyncCommand(ioCommandBuilder.readLines(path)) - } - def isDirectory(path: Path): Future[Boolean] = { + def isDirectory(path: Path): Future[Boolean] = asyncCommand(ioCommandBuilder.isDirectoryCommand(path)) - } - def copyAsync(src: Path, dest: Path): Future[Unit] = { + def copyAsync(src: Path, dest: Path): Future[Unit] = // Allow for a much larger timeout for copies, as large files can take a while (even on gcs, if they are in different locations...) asyncCommand(ioCommandBuilder.copyCommand(src, dest), AsyncIo.copyTimeout) - } } diff --git a/core/src/main/scala/cromwell/core/io/AsyncIoFunctions.scala b/core/src/main/scala/cromwell/core/io/AsyncIoFunctions.scala index 6b4a7763d2a..c7bb7927a00 100644 --- a/core/src/main/scala/cromwell/core/io/AsyncIoFunctions.scala +++ b/core/src/main/scala/cromwell/core/io/AsyncIoFunctions.scala @@ -3,6 +3,7 @@ package cromwell.core.io import wom.expression.IoFunctionSet trait AsyncIoFunctions { this: IoFunctionSet => + /** * Used to perform io functions asynchronously through the ioActorProxy */ diff --git a/core/src/main/scala/cromwell/core/io/CorePathFunctionSet.scala b/core/src/main/scala/cromwell/core/io/CorePathFunctionSet.scala index cf79d06cd2b..59eed184e45 100644 --- a/core/src/main/scala/cromwell/core/io/CorePathFunctionSet.scala +++ b/core/src/main/scala/cromwell/core/io/CorePathFunctionSet.scala @@ -8,7 +8,9 @@ import wom.expression.{IoFunctionSet, PathFunctionSet} import scala.util.Try class WorkflowCorePathFunctionSet(override val pathBuilders: PathBuilders) extends PathFunctionSet with PathFactory { - private def fail(name: String) = throw new UnsupportedOperationException(s"$name is not implemented at the workflow level") + private def fail(name: String) = throw new UnsupportedOperationException( + s"$name is not implemented at the workflow level" + ) override def sibling(of: String, path: String): String = buildPath(of).sibling(path).pathAsString override def isAbsolute(path: String): Boolean = Try(buildPath(path)).map(_.isAbsolute).toOption.contains(true) override def name(path: String) = buildPath(path).name @@ -19,8 +21,10 @@ class WorkflowCorePathFunctionSet(override val pathBuilders: PathBuilders) exten override def stderr: String = fail("stderr") } -class CallCorePathFunctionSet(pathBuilders: PathBuilders, callContext: CallContext) extends WorkflowCorePathFunctionSet(pathBuilders) { - override def relativeToHostCallRoot(path: String) = if (isAbsolute(path)) path else callContext.root.resolve(path).pathAsString +class CallCorePathFunctionSet(pathBuilders: PathBuilders, callContext: CallContext) + extends WorkflowCorePathFunctionSet(pathBuilders) { + override def relativeToHostCallRoot(path: String) = + if (isAbsolute(path)) path else callContext.root.resolve(path).pathAsString override def stdout = callContext.standardPaths.output.pathAsString override def stderr = callContext.standardPaths.error.pathAsString } @@ -29,7 +33,6 @@ trait WorkflowCorePathFunctions extends { this: IoFunctionSet with PathFactory = override lazy val pathFunctions = new WorkflowCorePathFunctionSet(pathBuilders) } - trait CallCorePathFunctions extends { this: IoFunctionSet with PathFactory => def callContext: CallContext override lazy val pathFunctions = new CallCorePathFunctionSet(pathBuilders, callContext) diff --git a/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala b/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala index bb5e5eec973..2faf49696ac 100644 --- a/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala +++ b/core/src/main/scala/cromwell/core/io/DefaultIoCommand.scala @@ -5,13 +5,13 @@ import cromwell.core.io.IoContentAsStringCommand.IoReadOptions import cromwell.core.path.Path object DefaultIoCommand { - case class DefaultIoCopyCommand(override val source: Path, - override val destination: Path, - ) extends IoCopyCommand(source, destination) { + case class DefaultIoCopyCommand(override val source: Path, override val destination: Path) + extends IoCopyCommand(source, destination) { override def commandDescription: String = s"DefaultIoCopyCommand source '$source' destination '$destination'" } - case class DefaultIoContentAsStringCommand(override val file: Path, override val options: IoReadOptions) extends IoContentAsStringCommand(file, options) { + case class DefaultIoContentAsStringCommand(override val file: Path, override val options: IoReadOptions) + extends IoContentAsStringCommand(file, options) { override def commandDescription: String = s"DefaultIoContentAsStringCommand file '$file' options '$options'" } @@ -22,18 +22,24 @@ object DefaultIoCommand { case class DefaultIoWriteCommand(override val file: Path, override val content: String, override val openOptions: OpenOptions, - override val compressPayload: Boolean) extends IoWriteCommand( - file, content, openOptions, compressPayload - ) { + override val compressPayload: Boolean + ) extends IoWriteCommand( + file, + content, + openOptions, + compressPayload + ) { override def commandDescription: String = s"DefaultIoWriteCommand file '$file' content length " + s"'${content.length}' openOptions '$openOptions' compressPayload '$compressPayload'" } - case class DefaultIoDeleteCommand(override val file: Path, - override val swallowIOExceptions: Boolean) extends IoDeleteCommand( - file, swallowIOExceptions - ) { - override def commandDescription: String = s"DefaultIoDeleteCommand file '$file' swallowIOExceptions '$swallowIOExceptions'" + case class DefaultIoDeleteCommand(override val file: Path, override val swallowIOExceptions: Boolean) + extends IoDeleteCommand( + file, + swallowIOExceptions + ) { + override def commandDescription: String = + s"DefaultIoDeleteCommand file '$file' swallowIOExceptions '$swallowIOExceptions'" } case class DefaultIoHashCommand(override val file: Path) extends IoHashCommand(file) { diff --git a/core/src/main/scala/cromwell/core/io/IoAck.scala b/core/src/main/scala/cromwell/core/io/IoAck.scala index 430b30db792..41066791fe3 100644 --- a/core/src/main/scala/cromwell/core/io/IoAck.scala +++ b/core/src/main/scala/cromwell/core/io/IoAck.scala @@ -8,6 +8,7 @@ import scala.util.{Failure, Success, Try} * @tparam T type of the returned value if success */ sealed trait IoAck[T] { + /** * Original command */ @@ -20,13 +21,12 @@ case class IoSuccess[T](command: IoCommand[T], result: T) extends IoAck[T] { } object IoFailAck { - def unapply(any: Any): Option[(IoCommand[_], Throwable)] = { + def unapply(any: Any): Option[(IoCommand[_], Throwable)] = any match { case f: IoFailAck[_] => Option((f.command, f.failure)) case _ => None } - } } trait IoFailAck[T] extends IoAck[T] { @@ -36,5 +36,7 @@ trait IoFailAck[T] extends IoAck[T] { /** Failure of an unspecified variety. */ case class IoFailure[T](command: IoCommand[T], override val failure: Throwable) extends IoFailAck[T] + /** Specifically read forbidden failure. */ -case class IoReadForbiddenFailure[T](command: IoCommand[T], override val failure: Throwable, forbiddenPath: String) extends IoFailAck[T] +case class IoReadForbiddenFailure[T](command: IoCommand[T], override val failure: Throwable, forbiddenPath: String) + extends IoFailAck[T] diff --git a/core/src/main/scala/cromwell/core/io/IoClientHelper.scala b/core/src/main/scala/cromwell/core/io/IoClientHelper.scala index 26dd0732d33..169e55a496d 100644 --- a/core/src/main/scala/cromwell/core/io/IoClientHelper.scala +++ b/core/src/main/scala/cromwell/core/io/IoClientHelper.scala @@ -13,9 +13,9 @@ trait IoClientHelper extends RobustClientHelper { this: Actor with ActorLogging def ioActor: ActorRef lazy val defaultIoTimeout = RobustClientHelper.DefaultRequestLostTimeout - + protected def config = ConfigFactory.load().as[Config]("system.io.backpressure-backoff") - + override protected def initialBackoff(): Backoff = SimpleExponentialBackoff(config) protected def ioResponseReceive: Receive = { @@ -26,19 +26,16 @@ trait IoClientHelper extends RobustClientHelper { this: Actor with ActorLogging cancelTimeout(context -> ack.command) receive.apply(context -> ack) } - + def ioReceive = robustReceive orElse ioResponseReceive - - def sendIoCommand(ioCommand: IoCommand[_]) = { + + def sendIoCommand(ioCommand: IoCommand[_]) = sendIoCommandWithCustomTimeout(ioCommand, defaultIoTimeout) - } - def sendIoCommandWithCustomTimeout(ioCommand: IoCommand[_], timeout: FiniteDuration) = { + def sendIoCommandWithCustomTimeout(ioCommand: IoCommand[_], timeout: FiniteDuration) = robustSend(ioCommand, ioActor, timeout) - } - def sendIoCommandWithContext[T](ioCommand: IoCommand[_], context: T, timeout: FiniteDuration = defaultIoTimeout) = { + def sendIoCommandWithContext[T](ioCommand: IoCommand[_], context: T, timeout: FiniteDuration = defaultIoTimeout) = robustSend(context -> ioCommand, ioActor, timeout) - } } diff --git a/core/src/main/scala/cromwell/core/io/IoCommand.scala b/core/src/main/scala/cromwell/core/io/IoCommand.scala index 7d71f9bd307..9db50bc2c87 100644 --- a/core/src/main/scala/cromwell/core/io/IoCommand.scala +++ b/core/src/main/scala/cromwell/core/io/IoCommand.scala @@ -23,7 +23,7 @@ object IoCommand { .setInitialIntervalMillis((1 second).toMillis.toInt) .setMaxIntervalMillis((5 minutes).toMillis.toInt) .setMultiplier(3L) - .setRandomizationFactor(0.2D) + .setRandomizationFactor(0.2d) .setMaxElapsedTimeMillis((10 minutes).toMillis.toInt) .build() @@ -47,7 +47,7 @@ trait IoCommand[+T] { def logIOMsgOverLimit(message: => String): Unit = { val millis: Long = java.time.Duration.between(creation, OffsetDateTime.now).toMillis if (millis > IoCommand.IOCommandWarnLimit.toMillis) { - val seconds = millis / 1000D + val seconds = millis / 1000d /* For now we decided to log this as INFO. In future if needed, we can update this to WARN. @@ -59,8 +59,10 @@ trait IoCommand[+T] { (https://github.com/broadinstitute/firecloud-develop/blob/c77e0f371be0aac545e204f1a134cc6f8ef3c301/run-context/live/configs/cromwell/app.env.ctmpl#L42-L51) - Logback manual (http://logback.qos.ch/manual/index.html) */ - IoCommand.logger.info(f"(IO-$uuid) '$message' is over 5 minutes. It was running for " + - f"$seconds%,.3f seconds. IO command description: '$commandDescription'") + IoCommand.logger.info( + f"(IO-$uuid) '$message' is over 5 minutes. It was running for " + + f"$seconds%,.3f seconds. IO command description: '$commandDescription'" + ) } } @@ -82,7 +84,9 @@ trait IoCommand[+T] { } def failReadForbidden[S >: T](failure: Throwable, forbiddenPath: String): IoReadForbiddenFailure[S] = { - logIOMsgOverLimit(s"IOCommand.failReadForbidden '${failure.toPrettyElidedString(limit = 1000)}' path '$forbiddenPath'") + logIOMsgOverLimit( + s"IOCommand.failReadForbidden '${failure.toPrettyElidedString(limit = 1000)}' path '$forbiddenPath'" + ) IoReadForbiddenFailure(this, failure, forbiddenPath) } @@ -118,7 +122,9 @@ object IoContentAsStringCommand { /** * Read file as a string (load the entire content in memory) */ -abstract class IoContentAsStringCommand(val file: Path, val options: IoReadOptions = IoReadOptions(None, failOnOverflow = false)) extends SingleFileIoCommand[String] { +abstract class IoContentAsStringCommand(val file: Path, + val options: IoReadOptions = IoReadOptions(None, failOnOverflow = false) +) extends SingleFileIoCommand[String] { override def toString = s"read content of ${file.pathAsString}" override lazy val name = "read" } @@ -138,7 +144,8 @@ abstract class IoSizeCommand(val file: Path) extends SingleFileIoCommand[Long] { abstract class IoWriteCommand(val file: Path, val content: String, val openOptions: OpenOptions, - val compressPayload: Boolean) extends SingleFileIoCommand[Unit] { + val compressPayload: Boolean +) extends SingleFileIoCommand[Unit] { override def toString = s"write to ${file.pathAsString}" override lazy val name = "write" } diff --git a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala index 43a6f5864b0..c4f5da49959 100644 --- a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala +++ b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala @@ -26,13 +26,11 @@ abstract class PartialIoCommandBuilder { } object IoCommandBuilder { - def apply(partialBuilders: PartialIoCommandBuilder*): IoCommandBuilder = { + def apply(partialBuilders: PartialIoCommandBuilder*): IoCommandBuilder = new IoCommandBuilder(partialBuilders.toList) - } - def apply: IoCommandBuilder = { + def apply: IoCommandBuilder = new IoCommandBuilder(List.empty) - } } /** @@ -49,56 +47,58 @@ class IoCommandBuilder(partialBuilders: List[PartialIoCommandBuilder] = List.emp // Find the first partialBuilder for which the partial function is defined, or use the default private def buildOrDefault[A, B](builder: PartialIoCommandBuilder => PartialFunction[A, Try[B]], params: A, - default: => B): Try[B] = { - partialBuilders.to(LazyList).map(builder(_).lift(params)).collectFirst({ - case Some(command) => command - }).getOrElse(Try(default)) - } + default: => B + ): Try[B] = + partialBuilders + .to(LazyList) + .map(builder(_).lift(params)) + .collectFirst { case Some(command) => + command + } + .getOrElse(Try(default)) def contentAsStringCommand(path: Path, maxBytes: Option[Int], - failOnOverflow: Boolean): Try[IoContentAsStringCommand] = { - buildOrDefault(_.contentAsStringCommand, (path, maxBytes, failOnOverflow), DefaultIoContentAsStringCommand(path, IoReadOptions(maxBytes, failOnOverflow))) - } + failOnOverflow: Boolean + ): Try[IoContentAsStringCommand] = + buildOrDefault(_.contentAsStringCommand, + (path, maxBytes, failOnOverflow), + DefaultIoContentAsStringCommand(path, IoReadOptions(maxBytes, failOnOverflow)) + ) def writeCommand(path: Path, content: String, options: OpenOptions, - compressPayload: Boolean = false): Try[IoWriteCommand] = { - buildOrDefault(_.writeCommand, (path, content, options, compressPayload), DefaultIoWriteCommand(path, content, options, compressPayload)) - } - - def sizeCommand(path: Path): Try[IoSizeCommand] = { + compressPayload: Boolean = false + ): Try[IoWriteCommand] = + buildOrDefault(_.writeCommand, + (path, content, options, compressPayload), + DefaultIoWriteCommand(path, content, options, compressPayload) + ) + + def sizeCommand(path: Path): Try[IoSizeCommand] = buildOrDefault(_.sizeCommand, path, DefaultIoSizeCommand(path)) - } - def deleteCommand(path: Path, swallowIoExceptions: Boolean = true): Try[IoDeleteCommand] = { + def deleteCommand(path: Path, swallowIoExceptions: Boolean = true): Try[IoDeleteCommand] = buildOrDefault(_.deleteCommand, (path, swallowIoExceptions), DefaultIoDeleteCommand(path, swallowIoExceptions)) - } - def copyCommand(src: Path, dest: Path): Try[IoCopyCommand] = { + def copyCommand(src: Path, dest: Path): Try[IoCopyCommand] = buildOrDefault(_.copyCommand, (src, dest), DefaultIoCopyCommand(src, dest)) - } - def hashCommand(file: Path): Try[IoHashCommand] = { + def hashCommand(file: Path): Try[IoHashCommand] = buildOrDefault(_.hashCommand, file, DefaultIoHashCommand(file)) - } - def touchCommand(file: Path): Try[IoTouchCommand] = { + def touchCommand(file: Path): Try[IoTouchCommand] = buildOrDefault(_.touchCommand, file, DefaultIoTouchCommand(file)) - } - def existsCommand(file: Path): Try[IoExistsCommand] = { + def existsCommand(file: Path): Try[IoExistsCommand] = buildOrDefault(_.existsCommand, file, DefaultIoExistsCommand(file)) - } - def isDirectoryCommand(file: Path): Try[IoIsDirectoryCommand] = { + def isDirectoryCommand(file: Path): Try[IoIsDirectoryCommand] = buildOrDefault(_.isDirectoryCommand, file, DefaultIoIsDirectoryCommand(file)) - } - def readLines(file: Path): Try[IoReadLinesCommand] = { + def readLines(file: Path): Try[IoReadLinesCommand] = buildOrDefault(_.readLinesCommand, file, DefaultIoReadLinesCommand(file)) - } } /** diff --git a/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala b/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala index dd8f44464f3..e9a8a8934bc 100644 --- a/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala +++ b/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala @@ -25,17 +25,15 @@ object IoPromiseProxyActor { class IoPromiseProxyActor(override val ioActor: ActorRef) extends Actor with ActorLogging with IoClientHelper { override def receive = ioReceive orElse actorReceive - def actorReceive: Receive = { - case withPromise: IoCommandWithPromise[_] => - sendIoCommandWithContext(withPromise.ioCommand, withPromise.promise, withPromise.timeout) + def actorReceive: Receive = { case withPromise: IoCommandWithPromise[_] => + sendIoCommandWithContext(withPromise.ioCommand, withPromise.promise, withPromise.timeout) } - override protected def ioResponseReceive: Receive = { - case (promise: Promise[_], ack: IoAck[Any] @unchecked) => - cancelTimeout(promise -> ack.command) - // This is not typesafe and assumes the Promise context is of the same type as the IoAck response. - promise.asInstanceOf[Promise[Any]].complete(ack.toTry) - () + override protected def ioResponseReceive: Receive = { case (promise: Promise[_], ack: IoAck[Any] @unchecked) => + cancelTimeout(promise -> ack.command) + // This is not typesafe and assumes the Promise context is of the same type as the IoAck response. + promise.asInstanceOf[Promise[Any]].complete(ack.toTry) + () } override def onTimeout(message: Any, to: ActorRef): Unit = message match { diff --git a/core/src/main/scala/cromwell/core/io/Throttle.scala b/core/src/main/scala/cromwell/core/io/Throttle.scala index 88246f1d88f..aaa5b5d30f5 100644 --- a/core/src/main/scala/cromwell/core/io/Throttle.scala +++ b/core/src/main/scala/cromwell/core/io/Throttle.scala @@ -11,11 +11,10 @@ case class Throttle(elements: Int, per: FiniteDuration, maximumBurst: Int) { } object Throttle { - implicit val throttleOptionValueReader: ValueReader[Option[Throttle]] = (config: Config, path: String) => { + implicit val throttleOptionValueReader: ValueReader[Option[Throttle]] = (config: Config, path: String) => config.getAs[Config](path) map { throttleConfig => val elements = throttleConfig.as[Int]("number-of-requests") val per = throttleConfig.as[FiniteDuration]("per") Throttle(elements, per, elements) } - } } diff --git a/core/src/main/scala/cromwell/core/labels/Label.scala b/core/src/main/scala/cromwell/core/labels/Label.scala index 759300da5b1..616298840b7 100644 --- a/core/src/main/scala/cromwell/core/labels/Label.scala +++ b/core/src/main/scala/cromwell/core/labels/Label.scala @@ -12,21 +12,19 @@ object Label { val MaxLabelLength = 255 val LabelExpectationsMessage = s"A Label key must be non-empty." - def validateLabelKey(s: String): ErrorOr[String] = { + def validateLabelKey(s: String): ErrorOr[String] = (s.length >= 1, s.length <= MaxLabelLength) match { case (true, true) => s.validNel case (false, _) => s"Invalid label: `$s` can't be empty".invalidNel case (_, false) => s"Invalid label: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel } - } - def validateLabelValue(s: String): ErrorOr[String] = { + def validateLabelValue(s: String): ErrorOr[String] = if (s.length <= MaxLabelLength) { s.validNel } else { s"Invalid label: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel } - } def validateLabel(key: String, value: String): ErrorOr[Label] = { val validatedKey = validateLabelKey(key) @@ -35,7 +33,6 @@ object Label { (validatedKey, validatedValue) mapN Label.apply } - def apply(key: String, value: String) = { + def apply(key: String, value: String) = new Label(key, value) {} - } } diff --git a/core/src/main/scala/cromwell/core/labels/Labels.scala b/core/src/main/scala/cromwell/core/labels/Labels.scala index 5499fa5b2e8..148a0a7926d 100644 --- a/core/src/main/scala/cromwell/core/labels/Labels.scala +++ b/core/src/main/scala/cromwell/core/labels/Labels.scala @@ -19,13 +19,11 @@ case class Labels(value: Vector[Label]) { } object Labels { - def apply(values: (String, String)*): Labels = { + def apply(values: (String, String)*): Labels = Labels(values.toVector map (Label.apply _).tupled) - } - def validateMapOfLabels(labels: Map[String, String]): ErrorOr[Labels] = { + def validateMapOfLabels(labels: Map[String, String]): ErrorOr[Labels] = labels.toVector traverse { Label.validateLabel _ }.tupled map Labels.apply - } def empty = Labels(Vector.empty) } diff --git a/core/src/main/scala/cromwell/core/logging/EnhancedDateConverter.scala b/core/src/main/scala/cromwell/core/logging/EnhancedDateConverter.scala index dcf62bc0b28..47eb951199a 100644 --- a/core/src/main/scala/cromwell/core/logging/EnhancedDateConverter.scala +++ b/core/src/main/scala/cromwell/core/logging/EnhancedDateConverter.scala @@ -25,9 +25,9 @@ class EnhancedDateConverter extends DateConverter { cachingDateFormatterProtected = Option(getFirstOption) match { case Some(CoreConstants.ISO8601_STR) | None => new CachingDateFormatter(CoreConstants.ISO8601_PATTERN) case Some(datePattern) => - try { + try new CachingDateFormatter(datePattern) - } catch { + catch { case e: IllegalArgumentException => addWarn("Could not instantiate SimpleDateFormat with pattern " + datePattern, e) // default to the ISO8601 format @@ -35,8 +35,7 @@ class EnhancedDateConverter extends DateConverter { } } // if the option list contains a TZ option, then set it. - Option(getOptionList) - .toList + Option(getOptionList).toList .flatMap(_.asScala) .drop(1) .headOption diff --git a/core/src/main/scala/cromwell/core/logging/EnhancedSlf4jLogger.scala b/core/src/main/scala/cromwell/core/logging/EnhancedSlf4jLogger.scala index 0999ec18055..6f9e5f2fdfb 100644 --- a/core/src/main/scala/cromwell/core/logging/EnhancedSlf4jLogger.scala +++ b/core/src/main/scala/cromwell/core/logging/EnhancedSlf4jLogger.scala @@ -3,6 +3,7 @@ package cromwell.core.logging import akka.event.slf4j.Slf4jLogger class EnhancedSlf4jLogger extends Slf4jLogger { + /** * Format the timestamp as a simple long. Allows the akkaTimestamp to be retrieved later from the MDC by custom * converters. diff --git a/core/src/main/scala/cromwell/core/logging/JavaLoggingBridge.scala b/core/src/main/scala/cromwell/core/logging/JavaLoggingBridge.scala index 1dc10fab46d..beff484f941 100644 --- a/core/src/main/scala/cromwell/core/logging/JavaLoggingBridge.scala +++ b/core/src/main/scala/cromwell/core/logging/JavaLoggingBridge.scala @@ -8,6 +8,7 @@ import org.slf4j.bridge.SLF4JBridgeHandler import scala.jdk.CollectionConverters._ object JavaLoggingBridge { + /** * Replace java.util.logging with SLF4J while ensuring Logback is configured with a LevelChangePropogator. * diff --git a/core/src/main/scala/cromwell/core/logging/JobLogger.scala b/core/src/main/scala/cromwell/core/logging/JobLogger.scala index 37ab6cfa2da..4af851124cb 100644 --- a/core/src/main/scala/cromwell/core/logging/JobLogger.scala +++ b/core/src/main/scala/cromwell/core/logging/JobLogger.scala @@ -28,14 +28,14 @@ class JobLogger(loggerName: String, rootWorkflowIdForLogging: RootWorkflowId, jobTag: String, akkaLogger: Option[LoggingAdapter] = None, - otherLoggers: Set[Logger] = Set.empty[Logger]) - extends WorkflowLogger( - loggerName = loggerName, - workflowId = workflowIdForLogging, - rootWorkflowId = rootWorkflowIdForLogging, - akkaLogger = akkaLogger, - otherLoggers = otherLoggers - ) { + otherLoggers: Set[Logger] = Set.empty[Logger] +) extends WorkflowLogger( + loggerName = loggerName, + workflowId = workflowIdForLogging, + rootWorkflowId = rootWorkflowIdForLogging, + akkaLogger = akkaLogger, + otherLoggers = otherLoggers + ) { override def tag = s"$loggerName [UUID(${workflowIdForLogging.shortString})$jobTag]" } diff --git a/core/src/main/scala/cromwell/core/logging/LoggerWrapper.scala b/core/src/main/scala/cromwell/core/logging/LoggerWrapper.scala index 5af71c7099c..3f9044aeaa3 100644 --- a/core/src/main/scala/cromwell/core/logging/LoggerWrapper.scala +++ b/core/src/main/scala/cromwell/core/logging/LoggerWrapper.scala @@ -23,9 +23,8 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { * * https://github.com/qos-ch/slf4j/blob/v_1.7.30/slf4j-simple/src/main/java/org/slf4j/impl/SimpleLogger.java#L293-L295 */ - private def format(msg: String, throwable: Throwable): String = { + private def format(msg: String, throwable: Throwable): String = format(msg) + "\n" + ExceptionUtils.getStackTrace(throwable) - } /** * Passes a formatted string to akka similar to slf4j's SimpleLogger @@ -113,7 +112,7 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { lazy val formatted: String = format(pattern) varargsAkkaLog(Logging.ErrorLevel, pattern, arguments) - slf4jLoggers.foreach(_.error(formatted, arguments:_*)) + slf4jLoggers.foreach(_.error(formatted, arguments: _*)) } override def error(pattern: String, arg: Any): Unit = { @@ -130,10 +129,9 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { slf4jLoggers.foreach(_.error(formatted, arg1, arg2: Any)) } - def error(t: Throwable, pattern: String, arguments: Any*): Unit = { + def error(t: Throwable, pattern: String, arguments: Any*): Unit = // slf4j extracts the last variable argument as a throwable. error(pattern, (arguments :+ t).map(_.asInstanceOf[AnyRef]): _*) - } override def debug(msg: String): Unit = { lazy val formatted: String = format(msg) @@ -153,7 +151,7 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { lazy val formatted: String = format(pattern) varargsAkkaLog(Logging.DebugLevel, pattern, arguments) - slf4jLoggers.foreach(_.debug(formatted, arguments:_*)) + slf4jLoggers.foreach(_.debug(formatted, arguments: _*)) } override def debug(pattern: String, argument: Any): Unit = { @@ -170,25 +168,20 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { slf4jLoggers.foreach(_.debug(formatted, arg1, arg2: Any)) } - override def trace(msg: String): Unit = { + override def trace(msg: String): Unit = slf4jLoggers.foreach(_.trace(format(msg))) - } - override def trace(msg: String, t: Throwable): Unit = { + override def trace(msg: String, t: Throwable): Unit = slf4jLoggers.foreach(_.trace(format(msg), t)) - } - override def trace(pattern: String, arguments: AnyRef*): Unit = { - slf4jLoggers.foreach(_.trace(format(pattern), arguments:_*)) - } + override def trace(pattern: String, arguments: AnyRef*): Unit = + slf4jLoggers.foreach(_.trace(format(pattern), arguments: _*)) - override def trace(pattern: String, arg: Any): Unit = { + override def trace(pattern: String, arg: Any): Unit = slf4jLoggers.foreach(_.trace(format(pattern), arg)) - } - override def trace(pattern: String, arg1: Any, arg2: Any): Unit = { + override def trace(pattern: String, arg1: Any, arg2: Any): Unit = slf4jLoggers.foreach(_.trace(format(pattern), arg1, arg2: Any)) - } override def info(msg: String): Unit = { lazy val formatted: String = format(msg) @@ -208,7 +201,7 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { lazy val formatted: String = format(pattern) varargsAkkaLog(Logging.InfoLevel, pattern, arguments) - slf4jLoggers.foreach(_.info(formatted, arguments:_*)) + slf4jLoggers.foreach(_.info(formatted, arguments: _*)) } override def info(pattern: String, arg: Any): Unit = { @@ -225,14 +218,24 @@ abstract class LoggerWrapper extends MarkerIgnoringBase { slf4jLoggers.foreach(_.info(formatted, arg1, arg2: Any)) } - override def isErrorEnabled: Boolean = throw new UnsupportedOperationException("This logger wraps an arbitrary set of loggers that can each have a different level enabled.") + override def isErrorEnabled: Boolean = throw new UnsupportedOperationException( + "This logger wraps an arbitrary set of loggers that can each have a different level enabled." + ) - override def isInfoEnabled: Boolean = throw new UnsupportedOperationException("This logger wraps an arbitrary set of loggers that can each have a different level enabled.") + override def isInfoEnabled: Boolean = throw new UnsupportedOperationException( + "This logger wraps an arbitrary set of loggers that can each have a different level enabled." + ) - override def isDebugEnabled: Boolean = throw new UnsupportedOperationException("This logger wraps an arbitrary set of loggers that can each have a different level enabled.") + override def isDebugEnabled: Boolean = throw new UnsupportedOperationException( + "This logger wraps an arbitrary set of loggers that can each have a different level enabled." + ) - override def isTraceEnabled: Boolean = throw new UnsupportedOperationException("This logger wraps an arbitrary set of loggers that can each have a different level enabled.") + override def isTraceEnabled: Boolean = throw new UnsupportedOperationException( + "This logger wraps an arbitrary set of loggers that can each have a different level enabled." + ) - override def isWarnEnabled: Boolean = throw new UnsupportedOperationException("This logger wraps an arbitrary set of loggers that can each have a different level enabled.") + override def isWarnEnabled: Boolean = throw new UnsupportedOperationException( + "This logger wraps an arbitrary set of loggers that can each have a different level enabled." + ) } diff --git a/core/src/main/scala/cromwell/core/logging/WorkflowLogger.scala b/core/src/main/scala/cromwell/core/logging/WorkflowLogger.scala index 94028b39407..d404cb36336 100644 --- a/core/src/main/scala/cromwell/core/logging/WorkflowLogger.scala +++ b/core/src/main/scala/cromwell/core/logging/WorkflowLogger.scala @@ -49,41 +49,40 @@ object WorkflowLogger { https://github.com/qos-ch/logback/commit/77128a003a7fd7e8bd7a6ddb12da7a65cf296593#diff-f8cd32379a53986c2e70e2abe86fa0faR145 */ private def makeSynchronizedFileLogger(path: Path, level: Level, ctx: LoggerContext, name: String): Logger = - ctx.synchronized { - Option(ctx.exists(name)) match { - case Some(existingLogger) => existingLogger - case None => - val encoder = new PatternLayoutEncoder() - encoder.setPattern("%date %-5level - %msg%n") - encoder.setContext(ctx) - encoder.start() - - val appender = new FileAppender[ILoggingEvent]() - appender.setFile(path.pathAsString) - appender.setEncoder(encoder) - appender.setName(name) - appender.setContext(ctx) - appender.start() - - val fileLogger = ctx.getLogger(name) - fileLogger.addAppender(appender) - fileLogger.setAdditive(false) - fileLogger.setLevel(level) - fileLogger + ctx.synchronized { + Option(ctx.exists(name)) match { + case Some(existingLogger) => existingLogger + case None => + val encoder = new PatternLayoutEncoder() + encoder.setPattern("%date %-5level - %msg%n") + encoder.setContext(ctx) + encoder.start() + + val appender = new FileAppender[ILoggingEvent]() + appender.setFile(path.pathAsString) + appender.setEncoder(encoder) + appender.setName(name) + appender.setContext(ctx) + appender.start() + + val fileLogger = ctx.getLogger(name) + fileLogger.addAppender(appender) + fileLogger.setAdditive(false) + fileLogger.setLevel(level) + fileLogger + } } - } case class WorkflowLogConfiguration(dir: Path, temporary: Boolean) private val conf = ConfigFactory.load() - val workflowLogConfiguration: Option[WorkflowLogConfiguration] = { + val workflowLogConfiguration: Option[WorkflowLogConfiguration] = for { workflowConfig <- conf.as[Option[Config]]("workflow-options") dir <- workflowConfig.as[Option[String]]("workflow-log-dir") if !dir.isEmpty temporary <- workflowConfig.as[Option[Boolean]]("workflow-log-temporary") orElse Option(true) } yield WorkflowLogConfiguration(DefaultPathBuilder.get(dir).toAbsolutePath, temporary) - } val isEnabled = workflowLogConfiguration.isDefined val isTemporary = workflowLogConfiguration exists { @@ -111,8 +110,8 @@ class WorkflowLogger(loggerName: String, workflowId: PossiblyNotRootWorkflowId, rootWorkflowId: RootWorkflowId, override val akkaLogger: Option[LoggingAdapter], - otherLoggers: Set[Logger] = Set.empty[Logger]) - extends LoggerWrapper { + otherLoggers: Set[Logger] = Set.empty[Logger] +) extends LoggerWrapper { override def getName = loggerName @@ -137,7 +136,8 @@ class WorkflowLogger(loggerName: String, import WorkflowLogger._ lazy val workflowLogPath = workflowLogConfiguration.map(workflowLogConfigurationActual => - workflowLogConfigurationActual.dir.createPermissionedDirectories() / s"workflow.$rootWorkflowId.log") + workflowLogConfigurationActual.dir.createPermissionedDirectories() / s"workflow.$rootWorkflowId.log" + ) lazy val fileLogger = workflowLogPath match { case Some(path) => makeFileLogger(path, Level.toLevel(sys.props.getOrElse("LOG_LEVEL", "debug"))) diff --git a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala index f0a6464f12e..ba300da9b83 100644 --- a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala +++ b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala @@ -27,9 +27,9 @@ trait BetterFileMethods { import BetterFileMethods._ - private final def newPath(file: better.files.File): Path = newPath(file.path) + final private def newPath(file: better.files.File): Path = newPath(file.path) - private final def newPathOrNull(file: better.files.File): Path = Option(file).map(newPath).orNull + final private def newPathOrNull(file: better.files.File): Path = Option(file).map(newPath).orNull final def toJava: JFile = betterFile.toJava @@ -44,8 +44,10 @@ trait BetterFileMethods { final def extension: Option[String] = betterFile.extension - final def extension(includeDot: Boolean = true, includeAll: Boolean = false, - toLowerCase: Boolean = true): Option[String] = + final def extension(includeDot: Boolean = true, + includeAll: Boolean = false, + toLowerCase: Boolean = true + ): Option[String] = betterFile.extension(includeDot, includeAll, toLowerCase) final def hasExtension: Boolean = betterFile.hasExtension @@ -60,16 +62,17 @@ trait BetterFileMethods { final def /(child: String): Path = newPath(betterFile./(child)) - final def createChild(child: String, asDirectory: Boolean = false) - (implicit attributes: Attributes = Attributes.default, - linkOptions: LinkOptions = LinkOptions.default): Path = + final def createChild(child: String, asDirectory: Boolean = false)(implicit + attributes: Attributes = Attributes.default, + linkOptions: LinkOptions = LinkOptions.default + ): Path = newPath(betterFile.createChild(child, asDirectory)(attributes, linkOptions)) - final def createIfNotExists(asDirectory: Boolean = false, createParents: Boolean = false) - (implicit attributes: Attributes = Attributes.default, - linkOptions: LinkOptions = LinkOptions.default): Path = { + final def createIfNotExists(asDirectory: Boolean = false, createParents: Boolean = false)(implicit + attributes: Attributes = Attributes.default, + linkOptions: LinkOptions = LinkOptions.default + ): Path = newPath(betterFile.createIfNotExists(asDirectory, createParents)(attributes, linkOptions)) - } final def exists(implicit linkOptions: LinkOptions = LinkOptions.default): Boolean = betterFile.exists(linkOptions) @@ -108,15 +111,17 @@ trait BetterFileMethods { final def lines(implicit charset: Charset = DefaultCharset): Iterable[String] = betterFile.lines(charset) - final def lineIterator(implicit charset: Charset= DefaultCharset): Iterator[String] = betterFile.lineIterator(charset) + final def lineIterator(implicit charset: Charset = DefaultCharset): Iterator[String] = + betterFile.lineIterator(charset) - final def tokens(splitter: StringSplitter = StringSplitter.Default) - (implicit charset: Charset = DefaultCharset): Iterator[String] = + final def tokens(splitter: StringSplitter = StringSplitter.Default)(implicit + charset: Charset = DefaultCharset + ): Iterator[String] = betterFile.tokens(splitter)(charset) final def contentAsString(implicit charset: Charset = DefaultCharset): String = betterFile.contentAsString(charset) - final def `!`(implicit charset: Charset= DefaultCharset): String = betterFile.contentAsString(charset) + final def `!`(implicit charset: Charset = DefaultCharset): String = betterFile.contentAsString(charset) final def printLines(lines: Iterator[Any])(implicit openOptions: OpenOptions = OpenOptions.append): this.type = { betterFile.printLines(lines)(openOptions) @@ -173,33 +178,37 @@ trait BetterFileMethods { this } - final def writeText(text: String)(implicit openOptions: OpenOptions = OpenOptions.default, - charset: Charset = DefaultCharset): this.type = { + final def writeText( + text: String + )(implicit openOptions: OpenOptions = OpenOptions.default, charset: Charset = DefaultCharset): this.type = { betterFile.writeText(text)(openOptions, charset) this } - final def write(text: String) - (implicit openOptions: OpenOptions = OpenOptions.default, - charset: Charset = DefaultCharset): this.type = { + final def write( + text: String + )(implicit openOptions: OpenOptions = OpenOptions.default, charset: Charset = DefaultCharset): this.type = { betterFile.write(text)(openOptions, charset) this } - final def overwrite(text: String)(implicit openOptions: OpenOptions = OpenOptions.default, - charset: Charset = DefaultCharset): this.type = { + final def overwrite( + text: String + )(implicit openOptions: OpenOptions = OpenOptions.default, charset: Charset = DefaultCharset): this.type = { betterFile.overwrite(text)(openOptions, charset) this } - final def <(text: String)(implicit openOptions: OpenOptions = OpenOptions.default, - charset: Charset = DefaultCharset): this.type = { + final def <( + text: String + )(implicit openOptions: OpenOptions = OpenOptions.default, charset: Charset = DefaultCharset): this.type = { betterFile.write(text)(openOptions, charset) this } - final def `>:`(text: String)(implicit openOptions: OpenOptions = OpenOptions.default, - charset: Charset = DefaultCharset): this.type = { + final def `>:`( + text: String + )(implicit openOptions: OpenOptions = OpenOptions.default, charset: Charset = DefaultCharset): this.type = { betterFile.write(text)(openOptions, charset) this } @@ -221,12 +230,16 @@ trait BetterFileMethods { final def bufferedReader(implicit charset: Charset = DefaultCharset): Dispose[BufferedReader] = betterFile.bufferedReader(charset) - final def newBufferedWriter(implicit charset: Charset = DefaultCharset, - openOptions: OpenOptions = OpenOptions.default): BufferedWriter = + final def newBufferedWriter(implicit + charset: Charset = DefaultCharset, + openOptions: OpenOptions = OpenOptions.default + ): BufferedWriter = betterFile.newBufferedWriter(charset, openOptions) - final def bufferedWriter(implicit charset: Charset = DefaultCharset, - openOptions: OpenOptions = OpenOptions.default): Dispose[BufferedWriter] = + final def bufferedWriter(implicit + charset: Charset = DefaultCharset, + openOptions: OpenOptions = OpenOptions.default + ): Dispose[BufferedWriter] = betterFile.bufferedWriter(charset, openOptions) final def newFileReader: FileReader = betterFile.newFileReader @@ -237,12 +250,14 @@ trait BetterFileMethods { final def fileWriter(append: Boolean = false): Dispose[FileWriter] = betterFile.fileWriter(append) - final def newPrintWriter(autoFlush: Boolean = false) - (implicit openOptions: OpenOptions = OpenOptions.default): PrintWriter = + final def newPrintWriter(autoFlush: Boolean = false)(implicit + openOptions: OpenOptions = OpenOptions.default + ): PrintWriter = betterFile.newPrintWriter(autoFlush) - final def printWriter(autoFlush: Boolean = false) - (implicit openOptions: OpenOptions = OpenOptions.default): Dispose[PrintWriter] = + final def printWriter(autoFlush: Boolean = false)(implicit + openOptions: OpenOptions = OpenOptions.default + ): Dispose[PrintWriter] = betterFile.printWriter(autoFlush) final def newInputStream(implicit openOptions: OpenOptions = OpenOptions.default): InputStream = @@ -251,12 +266,14 @@ trait BetterFileMethods { final def inputStream(implicit openOptions: OpenOptions = OpenOptions.default): Dispose[InputStream] = betterFile.inputStream(openOptions) - final def newScanner(splitter: StringSplitter = StringSplitter.Default) - (implicit charset: Charset = DefaultCharset): Scanner = + final def newScanner(splitter: StringSplitter = StringSplitter.Default)(implicit + charset: Charset = DefaultCharset + ): Scanner = betterFile.newScanner(splitter)(charset) - final def scanner(splitter: StringSplitter = StringSplitter.Default) - (implicit charset: Charset = DefaultCharset): Dispose[Scanner] = + final def scanner(splitter: StringSplitter = StringSplitter.Default)(implicit + charset: Charset = DefaultCharset + ): Dispose[Scanner] = betterFile.scanner(splitter)(charset) final def newOutputStream(implicit openOptions: OpenOptions = OpenOptions.default): OutputStream = @@ -265,19 +282,25 @@ trait BetterFileMethods { final def outputStream(implicit openOptions: OpenOptions = OpenOptions.default): Dispose[OutputStream] = betterFile.outputStream(openOptions) - final def newFileChannel(implicit openOptions: OpenOptions = OpenOptions.default, - attributes: Attributes = Attributes.default): FileChannel = + final def newFileChannel(implicit + openOptions: OpenOptions = OpenOptions.default, + attributes: Attributes = Attributes.default + ): FileChannel = betterFile.newFileChannel(openOptions, attributes) - final def fileChannel(implicit openOptions: OpenOptions = OpenOptions.default, - attributes: Attributes = Attributes.default): Dispose[FileChannel] = + final def fileChannel(implicit + openOptions: OpenOptions = OpenOptions.default, + attributes: Attributes = Attributes.default + ): Dispose[FileChannel] = betterFile.fileChannel(openOptions, attributes) - final def newAsynchronousFileChannel(implicit openOptions: OpenOptions = OpenOptions.default): - AsynchronousFileChannel = betterFile.newAsynchronousFileChannel(openOptions) + final def newAsynchronousFileChannel(implicit + openOptions: OpenOptions = OpenOptions.default + ): AsynchronousFileChannel = betterFile.newAsynchronousFileChannel(openOptions) - final def asynchronousFileChannel(implicit openOptions: OpenOptions = OpenOptions.default): - Dispose[AsynchronousFileChannel] = betterFile.asynchronousFileChannel(openOptions) + final def asynchronousFileChannel(implicit + openOptions: OpenOptions = OpenOptions.default + ): Dispose[AsynchronousFileChannel] = betterFile.asynchronousFileChannel(openOptions) final def digest(algorithmName: String): Array[Byte] = { val messageDigest = MessageDigest.getInstance(algorithmName) @@ -307,8 +330,11 @@ trait BetterFileMethods { final def isHidden: Boolean = betterFile.isHidden - final def isLocked(mode: RandomAccessMode, position: Long = 0L, size: Long = Long.MaxValue, - isShared: Boolean = false): Boolean = betterFile.isLocked(mode, position, size, isShared) + final def isLocked(mode: RandomAccessMode, + position: Long = 0L, + size: Long = Long.MaxValue, + isShared: Boolean = false + ): Boolean = betterFile.isLocked(mode, position, size, isShared) final def isReadLocked(position: Long = 0L, size: Long = Long.MaxValue, isShared: Boolean = false): Boolean = betterFile.isReadLocked(position, size, isShared) @@ -359,7 +385,7 @@ trait BetterFileMethods { } // Conflicts with the legacy cromwell.core.path.Obsolete.PathMethodAliases.getFileName(). Uncomment when that's gone. - //final def apply(permission: PosixFilePermission): Boolean = betterFile.apply(permission) + // final def apply(permission: PosixFilePermission): Boolean = betterFile.apply(permission) final def isOwnerReadable: Boolean = betterFile.isOwnerReadable @@ -416,9 +442,9 @@ trait BetterFileMethods { this } - final def touch(time: Instant = Instant.now()) - (implicit attributes: Attributes = Attributes.default, - linkOptions: LinkOptions = LinkOptions.default): this.type = { + final def touch( + time: Instant = Instant.now() + )(implicit attributes: Attributes = Attributes.default, linkOptions: LinkOptions = LinkOptions.default): this.type = { betterFile.touch(time)(attributes, linkOptions) this } @@ -443,14 +469,16 @@ trait BetterFileMethods { destination } - final def symbolicLinkTo(destination: Path) - (implicit attributes: Attributes = Attributes.default): destination.type = { + final def symbolicLinkTo( + destination: Path + )(implicit attributes: Attributes = Attributes.default): destination.type = { betterFile.symbolicLinkTo(destination.betterFile)(attributes) destination } - final def linkTo(destination: Path, symbolic: Boolean = false) - (implicit attributes: Attributes = Attributes.default): destination.type = { + final def linkTo(destination: Path, symbolic: Boolean = false)(implicit + attributes: Attributes = Attributes.default + ): destination.type = { betterFile.linkTo(destination.betterFile, symbolic)(attributes) destination } @@ -477,13 +505,16 @@ trait BetterFileMethods { this } - final def zipTo(destination: Path, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION) - (implicit charset: Charset = DefaultCharset): destination.type = { + final def zipTo(destination: Path, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit + charset: Charset = DefaultCharset + ): destination.type = { betterFile.zipTo(destination.betterFile, compressionLevel)(charset) destination } - final def zip(compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit charset: Charset = DefaultCharset): Path = + final def zip(compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit + charset: Charset = DefaultCharset + ): Path = newPath(betterFile.zip(compressionLevel)(charset)) final def unzipTo(destination: Path)(implicit charset: Charset = DefaultCharset): destination.type = { @@ -515,9 +546,9 @@ object BetterFileMethods { def cwd: Path = pwd - val `..`: Path => Path = _.parent + val `..` : Path => Path = _.parent - val `.`: Path => Path = identity + val `.` : Path => Path = identity implicit class FileDsl(file: Path) { def /(f: Path => Path): Path = f(file) @@ -561,7 +592,8 @@ object BetterFileMethods { def chgrp(group: String, file: Path): Path = file.setGroup(group) - def chmod(permissions: String, file: Path): Path = file.setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet) + def chmod(permissions: String, file: Path): Path = + file.setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet) def chmod_+(permission: PosixFilePermission, file: Path): Path = file.addPermission(permission) @@ -572,10 +604,12 @@ object BetterFileMethods { def unzip(zipFile: Path)(destination: Path)(implicit charset: Charset = DefaultCharset): destination.type = zipFile.unzipTo(destination)(charset) - def zip(files: better.files.File*)(destination: better.files.File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION) - (implicit charset: Charset = DefaultCharset): destination.type = { + def zip( + files: better.files.File* + )(destination: better.files.File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION)(implicit + charset: Charset = DefaultCharset + ): destination.type = destination.zipIn(files.iterator, compressionLevel)(charset) - } } type PathMatcherSyntax = better.files.File.PathMatcherSyntax diff --git a/core/src/main/scala/cromwell/core/path/CustomRetryParams.scala b/core/src/main/scala/cromwell/core/path/CustomRetryParams.scala index 13f577a1718..5136d7baa20 100644 --- a/core/src/main/scala/cromwell/core/path/CustomRetryParams.scala +++ b/core/src/main/scala/cromwell/core/path/CustomRetryParams.scala @@ -11,7 +11,7 @@ object CustomRetryParams { val Default = CustomRetryParams( timeout = Duration.Inf, maxRetries = Option(3), - backoff = SimpleExponentialBackoff(1 seconds, 3 seconds, 1.5D), + backoff = SimpleExponentialBackoff(1 seconds, 3 seconds, 1.5d), isTransient = throwableToFalse, isFatal = throwableToFalse ) @@ -23,4 +23,5 @@ case class CustomRetryParams(timeout: Duration, maxRetries: Option[Int], backoff: Backoff, isTransient: Throwable => Boolean, - isFatal: Throwable => Boolean) + isFatal: Throwable => Boolean +) diff --git a/core/src/main/scala/cromwell/core/path/DefaultPathBuilder.scala b/core/src/main/scala/cromwell/core/path/DefaultPathBuilder.scala index 4bbefe71d0a..f478d04aa67 100644 --- a/core/src/main/scala/cromwell/core/path/DefaultPathBuilder.scala +++ b/core/src/main/scala/cromwell/core/path/DefaultPathBuilder.scala @@ -19,7 +19,6 @@ case object DefaultPathBuilder extends PathBuilder { val uri = URI.create(UrlEscapers.urlFragmentEscaper().escape(pathAsString)) Option(uri.getScheme) match { case Some("file") | None => - if (pathAsString.startsWith("file://")) { // NOTE: Legacy support for old paths generated as URIs by the old .toRealString val host = Option(uri.getHost) getOrElse "" @@ -44,15 +43,14 @@ case object DefaultPathBuilder extends PathBuilder { def createTempDirectory(prefix: String): DefaultPath = DefaultPath(java.nio.file.Files.createTempDirectory(prefix)) - def createTempFile(prefix: String = "", suffix: String = "", parent: Option[Path] = None): Path = { + def createTempFile(prefix: String = "", suffix: String = "", parent: Option[Path] = None): Path = parent match { case Some(dir) => dir.createTempFile(prefix, suffix) case _ => DefaultPath(java.nio.file.Files.createTempFile(prefix, suffix)) } - } } -case class DefaultPath private[path](nioPath: NioPath) extends Path { +case class DefaultPath private[path] (nioPath: NioPath) extends Path { override protected def newPath(nioPath: NioPath): DefaultPath = DefaultPath(nioPath) override def pathAsString: String = nioPath.toString diff --git a/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala b/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala index 20a2afdd5ce..f986dcfa1a5 100644 --- a/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala +++ b/core/src/main/scala/cromwell/core/path/DefaultPathBuilderFactory.scala @@ -7,7 +7,8 @@ import cromwell.core.path.PathBuilderFactory.PriorityDefault import scala.concurrent.{ExecutionContext, Future} case object DefaultPathBuilderFactory extends PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem, ec: ExecutionContext) = Future.successful(DefaultPathBuilder) + override def withOptions(options: WorkflowOptions)(implicit actorSystem: ActorSystem, ec: ExecutionContext) = + Future.successful(DefaultPathBuilder) val name = "local" val tuple = name -> this diff --git a/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala b/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala index ffe862c2df3..e455867e5e6 100644 --- a/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala +++ b/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala @@ -1,6 +1,6 @@ package cromwell.core.path -import java.io.{BufferedInputStream, BufferedReader, ByteArrayOutputStream, IOException, InputStream, InputStreamReader} +import java.io.{BufferedInputStream, BufferedReader, ByteArrayOutputStream, InputStream, InputStreamReader, IOException} import java.nio.file.{FileAlreadyExistsException, Files} import java.nio.file.attribute.{PosixFilePermission, PosixFilePermissions} import java.util.zip.GZIPOutputStream @@ -33,13 +33,11 @@ trait EvenBetterPathMethods { final def plusSuffix(suffix: String): Path = swapSuffix("", suffix) - final def swapSuffix(oldSuffix: String, newSuffix: String): Path = { + final def swapSuffix(oldSuffix: String, newSuffix: String): Path = sibling(s"${name.stripSuffix(oldSuffix)}$newSuffix") - } - final def createTempFile(prefix: String = "", suffix: String = ""): Path = { + final def createTempFile(prefix: String = "", suffix: String = ""): Path = newPath(java.nio.file.Files.createTempFile(nioPathPrivate, prefix, suffix)) - } def chmod(permissions: String): this.type = { setPermissions(PosixFilePermissions.fromString(permissions).asScala.toSet) @@ -49,18 +47,16 @@ trait EvenBetterPathMethods { // betterFile.symbolicLink calls Files.readSymbolicLink, but then implicitly converts the java.nio.Path returned to a better.File // which calls toAbsolutePath. Consequently, if the path was relative, the current directory is used to make it absolute. // This is not the desired behavior to be able to follow relative symbolic links, so bypass better files method and directly use the java one. - final def symbolicLinkRelative: Option[Path] = { + final def symbolicLinkRelative: Option[Path] = if (betterFile.isSymbolicLink) { Option(newPath(Files.readSymbolicLink(betterFile.path))) } else None - } - final def followSymbolicLinks: Path = { + final def followSymbolicLinks: Path = symbolicLinkRelative match { case Some(target) => parent.resolve(target.followSymbolicLinks) case None => this } - } final def createPermissionedDirectories(): this.type = { if (!exists) { @@ -73,8 +69,7 @@ trait EvenBetterPathMethods { addPermission(PosixFilePermission.OTHERS_READ) addPermission(PosixFilePermission.OTHERS_WRITE) addPermission(PosixFilePermission.OTHERS_EXECUTE) - } - catch { + } catch { // Race condition that's particularly likely with scatters. Ignore. case _: FileAlreadyExistsException => // The GCS filesystem does not support setting permissions and will throw an `UnsupportedOperationException`. @@ -105,7 +100,9 @@ trait EvenBetterPathMethods { byteStream.toByteArray } - def writeContent(content: String)(openOptions: OpenOptions, codec: Codec, compressPayload: Boolean)(implicit ec: ExecutionContext): this.type = { + def writeContent( + content: String + )(openOptions: OpenOptions, codec: Codec, compressPayload: Boolean)(implicit ec: ExecutionContext): this.type = { locally(ec) val contentByteArray = content.getBytes(codec.charSet) writeByteArray { @@ -113,8 +110,8 @@ trait EvenBetterPathMethods { }(openOptions) } - private def fileIoErrorPf[A]: PartialFunction[Throwable, Try[A]] = { - case ex: Throwable => Failure(new IOException(s"Could not read from ${this.pathAsString}: ${ex.getMessage}", ex)) + private def fileIoErrorPf[A]: PartialFunction[Throwable, Try[A]] = { case ex: Throwable => + Failure(new IOException(s"Could not read from ${this.pathAsString}: ${ex.getMessage}", ex)) } /** @@ -122,35 +119,36 @@ trait EvenBetterPathMethods { * The input stream will be closed when this method returns, which means the f function * cannot leak an open stream. */ - def withReader[A](f: BufferedReader => A)(implicit ec: ExecutionContext): A = { + def withReader[A](f: BufferedReader => A)(implicit ec: ExecutionContext): A = // Use an input reader to convert the byte stream to character stream. Buffered reader for efficiency. - tryWithResource(() => new BufferedReader(new InputStreamReader(this.mediaInputStream, Codec.UTF8.name)))(f).recoverWith(fileIoErrorPf).get - } + tryWithResource(() => new BufferedReader(new InputStreamReader(this.mediaInputStream, Codec.UTF8.name)))(f) + .recoverWith(fileIoErrorPf) + .get /** * InputStream's read method reads bytes, whereas InputStreamReader's read method reads characters. * BufferedInputStream can be used to read bytes directly from input stream, without conversion to characters. */ - def withBufferedStream[A](f: BufferedInputStream => A)(implicit ec: ExecutionContext): A = { + def withBufferedStream[A](f: BufferedInputStream => A)(implicit ec: ExecutionContext): A = tryWithResource(() => new BufferedInputStream(this.mediaInputStream))(f).recoverWith(fileIoErrorPf).get - } /** * Returns an Array[Byte] from a Path. Limit the array size to "limit" byte if defined. * @throws IOException if failOnOverflow is true and the file is larger than limit */ - def limitFileContent(limit: Option[Int], failOnOverflow: Boolean)(implicit ec: ExecutionContext): Array[Byte] = withBufferedStream { bufferedStream => - val bytesIterator = Iterator.continually(bufferedStream.read).takeWhile(_ != -1).map(_.toByte) - // Take 1 more than the limit so that we can look at the size and know if it's overflowing - val bytesArray = limit.map(l => bytesIterator.take(l + 1)).getOrElse(bytesIterator).toArray - - limit match { - case Some(l) if failOnOverflow && bytesArray.length > l => - throw new IOException(s"File $this is larger than requested maximum of $l Bytes.") - case Some(l) => bytesArray.take(l) - case _ => bytesArray + def limitFileContent(limit: Option[Int], failOnOverflow: Boolean)(implicit ec: ExecutionContext): Array[Byte] = + withBufferedStream { bufferedStream => + val bytesIterator = Iterator.continually(bufferedStream.read).takeWhile(_ != -1).map(_.toByte) + // Take 1 more than the limit so that we can look at the size and know if it's overflowing + val bytesArray = limit.map(l => bytesIterator.take(l + 1)).getOrElse(bytesIterator).toArray + + limit match { + case Some(l) if failOnOverflow && bytesArray.length > l => + throw new IOException(s"File $this is larger than requested maximum of $l Bytes.") + case Some(l) => bytesArray.take(l) + case _ => bytesArray + } } - } /** * Reads the first limitBytes of a file and makes a String. Prepend with an annotation at the start (to say that this is the diff --git a/core/src/main/scala/cromwell/core/path/JavaWriterImplicits.scala b/core/src/main/scala/cromwell/core/path/JavaWriterImplicits.scala index cc1b7f40dde..2b5e36f2086 100644 --- a/core/src/main/scala/cromwell/core/path/JavaWriterImplicits.scala +++ b/core/src/main/scala/cromwell/core/path/JavaWriterImplicits.scala @@ -4,6 +4,7 @@ import java.io.Writer object JavaWriterImplicits { implicit class FlushingAndClosingWriter(writer: Writer) { + /** Convenience method to flush and close in one shot. */ def flushAndClose() = { writer.flush() diff --git a/core/src/main/scala/cromwell/core/path/NioPathMethods.scala b/core/src/main/scala/cromwell/core/path/NioPathMethods.scala index 42c17ca3222..3ce92dddd93 100644 --- a/core/src/main/scala/cromwell/core/path/NioPathMethods.scala +++ b/core/src/main/scala/cromwell/core/path/NioPathMethods.scala @@ -35,9 +35,9 @@ trait NioPathMethods { final def getNameCount: Int = nioPathPrivate.getNameCount /* This method cannot be used safely because it could fail for valid GcsPaths that are not valid URIs - * See https://github.com/GoogleCloudPlatform/google-cloud-java/issues/1343 + * See https://github.com/GoogleCloudPlatform/google-cloud-java/issues/1343 */ - //final def toUri: URI = nioPathPrivate.toUri + // final def toUri: URI = nioPathPrivate.toUri final def compareTo(other: Path): Int = nioPathPrivate.compareTo(other.nioPathPrivate) @@ -73,5 +73,5 @@ trait NioPathMethods { * Default implementation assumes symlinks are supported, and that toRealPath may return a valid path. * This implementation may be overridden for NIO implementations that do not support symbolic links (For example the Azure NIO library) */ - def getSymlinkSafePath(options: LinkOption*): Path = toRealPath(options: _*) + def getSymlinkSafePath(options: LinkOption*): Path = toRealPath(options: _*) } diff --git a/core/src/main/scala/cromwell/core/path/Obsolete.scala b/core/src/main/scala/cromwell/core/path/Obsolete.scala index 77fd32b54a7..78c346ce09d 100644 --- a/core/src/main/scala/cromwell/core/path/Obsolete.scala +++ b/core/src/main/scala/cromwell/core/path/Obsolete.scala @@ -47,16 +47,14 @@ object Obsolete { val File = ObsoleteFile object ObsoleteFile { - def newTemporaryDirectory(prefix: String = ""): DefaultPath = { + def newTemporaryDirectory(prefix: String = ""): DefaultPath = DefaultPath(better.files.File.newTemporaryDirectory(prefix).path) - } - def newTemporaryFile(prefix: String = "", suffix: String = "", parent: Option[Path] = None): Path = { + def newTemporaryFile(prefix: String = "", suffix: String = "", parent: Option[Path] = None): Path = parent match { case Some(dir) => dir.createTempFile(prefix, suffix) case _ => DefaultPathBuilder.createTempFile(prefix, suffix) } - } def apply(path: String, fragments: String*) = DefaultPath(better.files.File(path, fragments: _*).path) diff --git a/core/src/main/scala/cromwell/core/path/PathBuilder.scala b/core/src/main/scala/cromwell/core/path/PathBuilder.scala index 371c9e98157..fecfaebd507 100644 --- a/core/src/main/scala/cromwell/core/path/PathBuilder.scala +++ b/core/src/main/scala/cromwell/core/path/PathBuilder.scala @@ -40,6 +40,7 @@ trait PreResolvePathBuilder extends PathBuilder { * @see [[cromwell.core.path.EvenBetterPathMethods]] */ trait Path extends PathObjectMethods with NioPathMethods with BetterFileMethods with EvenBetterPathMethods { + /** * A reference to the underlying nioPath, used to create new java.nio.Path's that will then be sent to newPath * for wrapping. @@ -132,11 +133,11 @@ trait Path extends PathObjectMethods with NioPathMethods with BetterFileMethods def pathWithoutScheme: String // Used by various extension traits within this scala package - private[path] final def nioPathPrivate: NioPath = nioPath + final private[path] def nioPathPrivate: NioPath = nioPath // Used within BetterFileMethods - private[path] final def betterFile: better.files.File = nioPathPrivate + final private[path] def betterFile: better.files.File = nioPathPrivate // Some Path methods return null. - private[path] final def newPathOrNull(nioPath: NioPath) = Option(nioPath).map(newPath).orNull + final private[path] def newPathOrNull(nioPath: NioPath) = Option(nioPath).map(newPath).orNull } diff --git a/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala b/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala index 79adaae4d6b..cbc750f9f6f 100644 --- a/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala +++ b/core/src/main/scala/cromwell/core/path/PathBuilderFactory.scala @@ -11,15 +11,18 @@ import scala.concurrent.{ExecutionContext, Future} object PathBuilderFactory { // Given a list of factories, instantiates the corresponding path builders - def instantiatePathBuilders(factories: List[PathBuilderFactory], workflowOptions: WorkflowOptions)(implicit as: ActorSystem): Future[List[PathBuilder]] = { + def instantiatePathBuilders(factories: List[PathBuilderFactory], workflowOptions: WorkflowOptions)(implicit + as: ActorSystem + ): Future[List[PathBuilder]] = { implicit val ec: ExecutionContext = as.dispatchers.lookup(Dispatcher.IoDispatcher) val sortedFactories = factories.sortBy(_.priority) sortedFactories.traverse(_.withOptions(workflowOptions)) } - val PriorityBlob = 100 // High priority to evaluate first, because blob files may inadvertently match other filesystems + val PriorityBlob = + 100 // High priority to evaluate first, because blob files may inadvertently match other filesystems val PriorityStandard = 1000 - val PriorityDefault = 10000 // "Default" is a fallback, evaluate last + val PriorityDefault = 10000 // "Default" is a fallback, evaluate last } /** diff --git a/core/src/main/scala/cromwell/core/path/PathCopier.scala b/core/src/main/scala/cromwell/core/path/PathCopier.scala index b9352cb2082..6848ab3d24b 100644 --- a/core/src/main/scala/cromwell/core/path/PathCopier.scala +++ b/core/src/main/scala/cromwell/core/path/PathCopier.scala @@ -18,7 +18,7 @@ object PathCopier { val tokens2 = string2.split(regexIncludingSlashes) val matchingTokens: Array[(String, String)] = tokens1.zip(tokens2).takeWhile(Function.tupled(_ == _)) - val matchingPrefix = matchingTokens.map({ case (str, _) => str }).mkString + val matchingPrefix = matchingTokens.map { case (str, _) => str }.mkString string2.stripPrefix(matchingPrefix).replaceAll("^/+", "") } @@ -39,13 +39,12 @@ object PathCopier { /** * Copies from source to destination. NOTE: Copies are not atomic, and may create a partial copy. */ - def copy(sourceFilePath: Path, destinationFilePath: Path): Try[Unit] = { + def copy(sourceFilePath: Path, destinationFilePath: Path): Try[Unit] = Try { Option(destinationFilePath.parent).foreach(_.createDirectories()) sourceFilePath.copyTo(destinationFilePath, overwrite = true) () - } recoverWith { - case ex => Failure(new IOException(s"Failed to copy $sourceFilePath to $destinationFilePath", ex)) + } recoverWith { case ex => + Failure(new IOException(s"Failed to copy $sourceFilePath to $destinationFilePath", ex)) } - } } diff --git a/core/src/main/scala/cromwell/core/path/PathFactory.scala b/core/src/main/scala/cromwell/core/path/PathFactory.scala index a9e074afcc7..b2777b0fc83 100644 --- a/core/src/main/scala/cromwell/core/path/PathFactory.scala +++ b/core/src/main/scala/cromwell/core/path/PathFactory.scala @@ -15,6 +15,7 @@ import scala.util.{Failure, Success, Try} * Convenience trait delegating to the PathFactory singleton */ trait PathFactory { + /** * Path builders to be applied (in order) to attempt to build a Path from a string. */ @@ -43,11 +44,13 @@ object PathFactory { private def findFirstSuccess(string: String, allPathBuilders: PathBuilders, restPathBuilders: PathBuilders, - failures: Vector[String]): ErrorOr[Path] = restPathBuilders match { - case Nil => NonEmptyList.fromList(failures.toList) match { - case Some(errors) => Invalid(errors) - case None => s"Could not parse '$string' to path. No PathBuilders were provided".invalidNel - } + failures: Vector[String] + ): ErrorOr[Path] = restPathBuilders match { + case Nil => + NonEmptyList.fromList(failures.toList) match { + case Some(errors) => Invalid(errors) + case None => s"Could not parse '$string' to path. No PathBuilders were provided".invalidNel + } case pb :: rest => pb.build(string, allPathBuilders) match { case Success(path) => @@ -64,7 +67,8 @@ object PathFactory { def buildPath(string: String, pathBuilders: PathBuilders, preMapping: String => String = identity[String], - postMapping: Path => Path = identity[Path]): Path = { + postMapping: Path => Path = identity[Path] + ): Path = { lazy val pathBuilderNames: String = pathBuilders map { _.name } mkString ", " @@ -77,12 +81,12 @@ object PathFactory { path match { case Valid(v) => v case Invalid(errors) => - throw PathParsingException( - s"""Could not build the path "$string". It may refer to a filesystem not supported by this instance of Cromwell.""" + - s" Supported filesystems are: $pathBuilderNames." + - s" Failures: ${errors.toList.mkString(System.lineSeparator, System.lineSeparator, System.lineSeparator)}" + - s" Please refer to the documentation for more information on how to configure filesystems: http://cromwell.readthedocs.io/en/develop/backends/HPC/#filesystems" - ) + throw PathParsingException( + s"""Could not build the path "$string". It may refer to a filesystem not supported by this instance of Cromwell.""" + + s" Supported filesystems are: $pathBuilderNames." + + s" Failures: ${errors.toList.mkString(System.lineSeparator, System.lineSeparator, System.lineSeparator)}" + + s" Please refer to the documentation for more information on how to configure filesystems: http://cromwell.readthedocs.io/en/develop/backends/HPC/#filesystems" + ) } } } diff --git a/core/src/main/scala/cromwell/core/path/PathObjectMethods.scala b/core/src/main/scala/cromwell/core/path/PathObjectMethods.scala index d109faa797c..ab3d981316c 100644 --- a/core/src/main/scala/cromwell/core/path/PathObjectMethods.scala +++ b/core/src/main/scala/cromwell/core/path/PathObjectMethods.scala @@ -8,12 +8,11 @@ trait PathObjectMethods { override def toString: String = pathAsString - override def equals(obj: Any) = { + override def equals(obj: Any) = obj match { case other: Path => nioPathPrivate == other.nioPathPrivate case _ => false } - } override def hashCode = nioPathPrivate.hashCode() } diff --git a/core/src/main/scala/cromwell/core/path/PathWriter.scala b/core/src/main/scala/cromwell/core/path/PathWriter.scala index 5a602810183..ee7717b6765 100644 --- a/core/src/main/scala/cromwell/core/path/PathWriter.scala +++ b/core/src/main/scala/cromwell/core/path/PathWriter.scala @@ -59,7 +59,7 @@ case class TailedWriter(path: Path, tailedSize: Int) extends PathWriter { * * @return a descriptive tail of the `path` and the last `tailedLines` written. */ - def tailString: String = { + def tailString: String = if (tailedLines.isEmpty) { s"Contents of $path were empty." } else if (isTailed) { @@ -67,5 +67,4 @@ case class TailedWriter(path: Path, tailedSize: Int) extends PathWriter { } else { s"Contents of $path:\n${tailedLines.mkString("\n")}" } - } } diff --git a/core/src/main/scala/cromwell/core/retry/GoogleBackoff.scala b/core/src/main/scala/cromwell/core/retry/GoogleBackoff.scala index 0a50b79e091..c901bc0d1ec 100644 --- a/core/src/main/scala/cromwell/core/retry/GoogleBackoff.scala +++ b/core/src/main/scala/cromwell/core/retry/GoogleBackoff.scala @@ -8,47 +8,59 @@ import net.ceedubs.ficus.Ficus._ import scala.concurrent.duration.{Duration, FiniteDuration} object InitialGapBackoff { - def apply(initialGap: FiniteDuration, initialInterval: FiniteDuration, maxInterval: FiniteDuration, multiplier: Double) = { - new InitialGapBackoff(initialGap, new ExponentialBackOff.Builder() - .setInitialIntervalMillis(initialInterval.toMillis.toInt) - .setMaxIntervalMillis(maxInterval.toMillis.toInt) - .setMultiplier(multiplier) - .setMaxElapsedTimeMillis(Int.MaxValue) - .build()) - } + def apply(initialGap: FiniteDuration, + initialInterval: FiniteDuration, + maxInterval: FiniteDuration, + multiplier: Double + ) = + new InitialGapBackoff( + initialGap, + new ExponentialBackOff.Builder() + .setInitialIntervalMillis(initialInterval.toMillis.toInt) + .setMaxIntervalMillis(maxInterval.toMillis.toInt) + .setMultiplier(multiplier) + .setMaxElapsedTimeMillis(Int.MaxValue) + .build() + ) } case class InitialGapBackoff(initialGapMillis: FiniteDuration, googleBackoff: ExponentialBackOff) extends Backoff { assert(initialGapMillis.compareTo(Duration.Zero) != 0, "Initial gap cannot be null, use SimpleBackoff instead.") override val backoffMillis = initialGapMillis.toMillis + /** Switch to a SimpleExponentialBackoff after the initial gap has been used */ override def next = new SimpleExponentialBackoff(googleBackoff) } object SimpleExponentialBackoff { - def apply(initialInterval: FiniteDuration, maxInterval: FiniteDuration, multiplier: Double, randomizationFactor: Double = ExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR) = { - new SimpleExponentialBackoff(new ExponentialBackOff.Builder() - .setInitialIntervalMillis(initialInterval.toMillis.toInt) - .setMaxIntervalMillis(maxInterval.toMillis.toInt) - .setMultiplier(multiplier) - .setMaxElapsedTimeMillis(Int.MaxValue) - .setRandomizationFactor(randomizationFactor) - .build()) - } - - def apply(config: Config): SimpleExponentialBackoff = { + def apply(initialInterval: FiniteDuration, + maxInterval: FiniteDuration, + multiplier: Double, + randomizationFactor: Double = ExponentialBackOff.DEFAULT_RANDOMIZATION_FACTOR + ) = + new SimpleExponentialBackoff( + new ExponentialBackOff.Builder() + .setInitialIntervalMillis(initialInterval.toMillis.toInt) + .setMaxIntervalMillis(maxInterval.toMillis.toInt) + .setMultiplier(multiplier) + .setMaxElapsedTimeMillis(Int.MaxValue) + .setRandomizationFactor(randomizationFactor) + .build() + ) + + def apply(config: Config): SimpleExponentialBackoff = SimpleExponentialBackoff( config.as[FiniteDuration]("min"), config.as[FiniteDuration]("max"), config.as[Double]("multiplier"), config.as[Double]("randomization-factor") ) - } } case class SimpleExponentialBackoff(googleBackoff: ExponentialBackOff) extends Backoff { override def backoffMillis = googleBackoff.nextBackOffMillis() + /** google ExponentialBackOff is mutable so we can keep returning the same instance */ override def next = this } diff --git a/core/src/main/scala/cromwell/core/retry/Retry.scala b/core/src/main/scala/cromwell/core/retry/Retry.scala index 5e5ba4fe6b1..a7c10cd48dd 100644 --- a/core/src/main/scala/cromwell/core/retry/Retry.scala +++ b/core/src/main/scala/cromwell/core/retry/Retry.scala @@ -34,11 +34,11 @@ object Retry extends StrictLogging { */ def withRetry[A](f: () => Future[A], maxRetries: Option[Int] = Option(10), - backoff: Backoff = SimpleExponentialBackoff(5 seconds, 10 seconds, 1.1D), + backoff: Backoff = SimpleExponentialBackoff(5 seconds, 10 seconds, 1.1d), isTransient: Throwable => Boolean = throwableToFalse, isFatal: Throwable => Boolean = throwableToFalse, - onRetry: Throwable => Unit = noopOnRetry) - (implicit actorSystem: ActorSystem): Future[A] = { + onRetry: Throwable => Unit = noopOnRetry + )(implicit actorSystem: ActorSystem): Future[A] = { // In the future we might want EC passed in separately but at the moment it caused more issues than it solved to do so implicit val ec: ExecutionContext = actorSystem.dispatcher val delay = backoff.backoffMillis.millis @@ -47,10 +47,18 @@ object Retry extends StrictLogging { case throwable if isFatal(throwable) => Future.failed(CromwellFatalException(throwable)) case throwable if !isFatal(throwable) => val retriesLeft = if (isTransient(throwable)) maxRetries else maxRetries map { _ - 1 } - + if (retriesLeft.forall(_ > 0)) { onRetry(throwable) - after(delay, actorSystem.scheduler)(withRetry(f, backoff = backoff.next, maxRetries = retriesLeft, isTransient = isTransient, isFatal = isFatal, onRetry = onRetry)) + after(delay, actorSystem.scheduler)( + withRetry(f, + backoff = backoff.next, + maxRetries = retriesLeft, + isTransient = isTransient, + isFatal = isFatal, + onRetry = onRetry + ) + ) } else { Future.failed(new CromwellFatalException(throwable)) } @@ -69,8 +77,8 @@ object Retry extends StrictLogging { */ def withRetryForTransactionRollback[A](f: () => Future[A], maxRetries: Int = 5, - backoff: Backoff = SimpleExponentialBackoff(5 seconds, 10 seconds, 1.1D)) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[A] = { + backoff: Backoff = SimpleExponentialBackoff(5 seconds, 10 seconds, 1.1d) + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[A] = { val delay = backoff.backoffMillis.millis f() recoverWith { @@ -85,4 +93,3 @@ object Retry extends StrictLogging { } } } - diff --git a/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala b/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala index bfae7232d64..d2387b5062a 100644 --- a/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala +++ b/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala @@ -8,7 +8,6 @@ import wom.values._ import scala.language.postfixOps - /** * Builds arbitrary `WomValues` from `WomValueSimpletons`. **/ @@ -63,28 +62,26 @@ object WomValueBuilder { private val MapElementPattern = raw"^:((?:\\[]\[:]|[^]\[:])+)(.*)".r // Group tuples by key using a Map with key type `K`. - private def group[K](tuples: Iterable[(K, SimpletonComponent)]): Map[K, Iterable[SimpletonComponent]] = { - tuples groupBy { case (i, _) => i } map { case (k, v) => k -> (v map { case (_, s) => s}) } - } + private def group[K](tuples: Iterable[(K, SimpletonComponent)]): Map[K, Iterable[SimpletonComponent]] = + tuples groupBy { case (i, _) => i } map { case (k, v) => k -> (v map { case (_, s) => s }) } // Returns a tuple of the index into the outermost array and a `SimpletonComponent` whose path reflects the "descent" // into the array. e.g. for a component // SimpletonComponent("[0][1]", v) this would return (0 -> SimpletonComponent("[1]", v)). - private def descendIntoArray(component: SimpletonComponent): (Int, SimpletonComponent) = { - component.path match { case ArrayElementPattern(index, more) => index.toInt -> component.copy(path = more)} - } + private def descendIntoArray(component: SimpletonComponent): (Int, SimpletonComponent) = + component.path match { case ArrayElementPattern(index, more) => index.toInt -> component.copy(path = more) } // Returns a tuple of the key into the outermost map and a `SimpletonComponent` whose path reflects the "descent" // into the map. e.g. for a component // SimpletonComponent(":bar:baz", v) this would return ("bar" -> SimpletonComponent(":baz", v)). // Map keys are treated as Strings by this method, the caller must ultimately do the appropriate coercion to the // actual map key type. - private def descendIntoMap(component: SimpletonComponent): (String, SimpletonComponent) = { - component.path match { case MapElementPattern(key, more) => key.unescapeMeta -> component.copy(path = more)} - } + private def descendIntoMap(component: SimpletonComponent): (String, SimpletonComponent) = + component.path match { case MapElementPattern(key, more) => key.unescapeMeta -> component.copy(path = more) } - private implicit class EnhancedSimpletonComponents(val components: Iterable[SimpletonComponent]) extends AnyVal { - def asArray: List[Iterable[SimpletonComponent]] = group(components map descendIntoArray).toList.sortBy(_._1).map(_._2) + implicit private class EnhancedSimpletonComponents(val components: Iterable[SimpletonComponent]) extends AnyVal { + def asArray: List[Iterable[SimpletonComponent]] = + group(components map descendIntoArray).toList.sortBy(_._1).map(_._2) def asMap: Map[String, Iterable[SimpletonComponent]] = group(components map descendIntoMap) def asPrimitive: WomValue = components.head.value def asString: String = asPrimitive.valueString @@ -92,51 +89,50 @@ object WomValueBuilder { private def toWomValue(outputType: WomType, components: Iterable[SimpletonComponent]): WomValue = { - - // Returns a tuple of the key into the pair (i.e. left or right) and a `SimpletonComponent` whose path reflects the "descent" // into the pair. e.g. for a component // SimpletonComponent(":left:foo", someValue) this would return (PairLeft -> SimpletonComponent(":baz", someValue)). sealed trait PairLeftOrRight case object PairLeft extends PairLeftOrRight case object PairRight extends PairLeftOrRight - def descendIntoPair(component: SimpletonComponent): (PairLeftOrRight, SimpletonComponent) = { + def descendIntoPair(component: SimpletonComponent): (PairLeftOrRight, SimpletonComponent) = component.path match { case MapElementPattern("left", more) => PairLeft -> component.copy(path = more) case MapElementPattern("right", more) => PairRight -> component.copy(path = more) } - } - def toWomFile(components: Iterable[SimpletonComponent]) = { + def toWomFile(components: Iterable[SimpletonComponent]) = // If there's just one simpleton, it's a primitive (file or directory) if (components.size == 1) components.asPrimitive else { // Otherwise make a map of the components and detect the type of file from the class field val groupedListing = components.asMap - def isClass(className: String) = { - groupedListing.get(ClassKey) - /* If the class field is in an array it will be prefixed with a ':', so check for that as well. - * e.g: secondaryFiles[0]:class -> "File" - * secondaryFiles[0]:value -> "file/path" - * would produce a Map( - * ":class" -> List(Simpleton("File")), - * ":value" -> List(Simpleton("file/path")) - * ) - */ - .orElse(groupedListing.get(s":$ClassKey")) - .map(_.asPrimitive.valueString) - .contains(className) - } + def isClass(className: String) = + groupedListing + .get(ClassKey) + /* If the class field is in an array it will be prefixed with a ':', so check for that as well. + * e.g: secondaryFiles[0]:class -> "File" + * secondaryFiles[0]:value -> "file/path" + * would produce a Map( + * ":class" -> List(Simpleton("File")), + * ":value" -> List(Simpleton("file/path")) + * ) + */ + .orElse(groupedListing.get(s":$ClassKey")) + .map(_.asPrimitive.valueString) + .contains(className) def isDirectory = isClass(WomValueSimpleton.DirectoryClass) def isFile = isClass(WomValueSimpleton.FileClass) if (isDirectory) toWomValue(WomMaybeListedDirectoryType, components) else if (isFile) toWomValue(WomMaybePopulatedFileType, components) - else throw new IllegalArgumentException(s"There is no WomFile that can be built from simpletons: ${groupedListing.toList.mkString(", ")}") + else + throw new IllegalArgumentException( + s"There is no WomFile that can be built from simpletons: ${groupedListing.toList.mkString(", ")}" + ) } - } outputType match { case _: WomPrimitiveType => @@ -151,19 +147,31 @@ object WomValueBuilder { WomArray(arrayType, components.asArray map { toWomValue(arrayType.memberType, _) }) case mapType: WomMapType => // map keys are guaranteed by WOM to be primitives, so the "coerceRawValue(..).get" is safe. - WomMap(mapType, components.asMap map { case (k, ss) => mapType.keyType.coerceRawValue(k).get -> toWomValue(mapType.valueType, ss) }) + WomMap(mapType, + components.asMap map { case (k, ss) => + mapType.keyType.coerceRawValue(k).get -> toWomValue(mapType.valueType, ss) + } + ) case pairType: WomPairType => - val groupedByLeftOrRight: Map[PairLeftOrRight, Iterable[SimpletonComponent]] = group(components map descendIntoPair) - WomPair(toWomValue(pairType.leftType, groupedByLeftOrRight(PairLeft)), toWomValue(pairType.rightType, groupedByLeftOrRight(PairRight))) + val groupedByLeftOrRight: Map[PairLeftOrRight, Iterable[SimpletonComponent]] = group( + components map descendIntoPair + ) + WomPair(toWomValue(pairType.leftType, groupedByLeftOrRight(PairLeft)), + toWomValue(pairType.rightType, groupedByLeftOrRight(PairRight)) + ) case WomObjectType => // map keys are guaranteed by WOM to be primitives, so the "coerceRawValue(..).get" is safe. val map: Map[String, WomValue] = components.asMap map { case (k, ss) => k -> toWomValue(WomAnyType, ss) } WomObject(map) case composite: WomCompositeType => val map: Map[String, WomValue] = components.asMap map { case (k, ss) => - val valueType = composite - .typeMap - .getOrElse(k, throw new RuntimeException(s"Field $k is not a declared field of composite type $composite. Cannot build a WomValue from the simpletons.")) + val valueType = composite.typeMap + .getOrElse( + k, + throw new RuntimeException( + s"Field $k is not a declared field of composite type $composite. Cannot build a WomValue from the simpletons." + ) + ) k -> toWomValue(valueType, ss) } WomObject.withTypeUnsafe(map, composite) @@ -171,8 +179,9 @@ object WomValueBuilder { val directoryValues = components.asMap val value = directoryValues.get("value").map(_.asString) - val listing = directoryValues.get("listing") - .map({ _.asArray.map(toWomFile).collect({ case womFile: WomFile => womFile }) }) + val listing = directoryValues + .get("listing") + .map(_.asArray.map(toWomFile).collect { case womFile: WomFile => womFile }) WomMaybeListedDirectory(value, listing) case WomMaybePopulatedFileType => @@ -183,9 +192,9 @@ object WomValueBuilder { val size = populatedValues.get("size").map(_.asString.toLong) val format = populatedValues.get("format").map(_.asString) val contents = populatedValues.get("contents").map(_.asString) - val secondaryFiles = populatedValues.get("secondaryFiles").toList.flatMap({ - _.asArray.map(toWomFile).collect({ case womFile: WomFile => womFile }) - }) + val secondaryFiles = populatedValues.get("secondaryFiles").toList.flatMap { + _.asArray.map(toWomFile).collect { case womFile: WomFile => womFile } + } WomMaybePopulatedFile( valueOption = value, @@ -234,23 +243,28 @@ object WomValueBuilder { */ private case class SimpletonComponent(path: String, value: WomValue) - def toJobOutputs(taskOutputs: Iterable[OutputPort], simpletons: Iterable[WomValueSimpleton]): CallOutputs = { + def toJobOutputs(taskOutputs: Iterable[OutputPort], simpletons: Iterable[WomValueSimpleton]): CallOutputs = CallOutputs(toWomValues(taskOutputs, simpletons)) - } - def toWomValues(taskOutputs: Iterable[OutputPort], simpletons: Iterable[WomValueSimpleton]): Map[OutputPort, WomValue] = { + def toWomValues(taskOutputs: Iterable[OutputPort], + simpletons: Iterable[WomValueSimpleton] + ): Map[OutputPort, WomValue] = { - def simpletonToComponent(name: String)(simpleton: WomValueSimpleton): SimpletonComponent = { + def simpletonToComponent(name: String)(simpleton: WomValueSimpleton): SimpletonComponent = SimpletonComponent(simpleton.simpletonKey.drop(name.length), simpleton.simpletonValue) - } // This is meant to "rehydrate" simpletonized WomValues back to WomValues. It is assumed that these WomValues were // "dehydrated" to WomValueSimpletons correctly. This code is not robust to corrupt input whatsoever. val types = taskOutputs map { o => o -> o.womType } toMap - val simpletonsByOutputName = simpletons groupBy { _.simpletonKey match { case IdentifierAndPathPattern(i, _) => i } } + val simpletonsByOutputName = simpletons groupBy { + _.simpletonKey match { case IdentifierAndPathPattern(i, _) => i } + } val simpletonComponentsByOutputName: Map[String, Iterable[SimpletonComponent]] = simpletonsByOutputName map { case (name, ss) => name -> (ss map simpletonToComponent(name)) } - types map { case (outputPort, outputType) => outputPort -> toWomValue(outputType, simpletonComponentsByOutputName.getOrElse(outputPort.internalName, Seq.empty))} + types map { case (outputPort, outputType) => + outputPort -> toWomValue(outputType, + simpletonComponentsByOutputName.getOrElse(outputPort.internalName, Seq.empty) + ) + } } } - diff --git a/core/src/main/scala/cromwell/core/simpleton/WomValueSimpleton.scala b/core/src/main/scala/cromwell/core/simpleton/WomValueSimpleton.scala index 01ec7ddf8c8..35e7accdf07 100644 --- a/core/src/main/scala/cromwell/core/simpleton/WomValueSimpleton.scala +++ b/core/src/main/scala/cromwell/core/simpleton/WomValueSimpleton.scala @@ -15,7 +15,7 @@ case class WomValueSimpleton(simpletonKey: String, simpletonValue: WomPrimitive) * `WomValueSimpleton`s are transformed back to `WomValue`s. */ object WomValueSimpleton { - + val ClassKey = "class" val DirectoryClass = "Directory" val FileClass = "File" @@ -35,23 +35,30 @@ object WomValueSimpleton { private def toNumberSimpleton(key: String)(value: Long) = WomValueSimpleton(key, WomInteger(value.toInt)) // Pass the simplifyMode down to recursive calls without having to sling the parameter around explicitly. - def simplify(name: String)(implicit simplifyMode: SimplifyMode = SimplifyMode(forCaching = false)): Iterable[WomValueSimpleton] = { + def simplify( + name: String + )(implicit simplifyMode: SimplifyMode = SimplifyMode(forCaching = false)): Iterable[WomValueSimpleton] = { def suffix(suffix: String) = s"$name:$suffix" val fileValueSimplifier: String => String => WomValueSimpleton = if (simplifyMode.forCaching) key => value => WomValueSimpleton(key, WomSingleFile(value)) else toStringSimpleton // What should this even do? Maybe just pick out the last bit of the path and store that as a String? val directoryValueSimplifier: String => String => WomValueSimpleton = - if (simplifyMode.forCaching) key => value => WomValueSimpleton(key, WomString(value.substring(value.lastIndexOf("/") + 1))) else toStringSimpleton + if (simplifyMode.forCaching) + key => value => WomValueSimpleton(key, WomString(value.substring(value.lastIndexOf("/") + 1))) + else toStringSimpleton womValue match { case prim: WomPrimitive => List(WomValueSimpleton(name, prim)) case opt: WomOptionalValue => opt.value.map(_.simplify(name)).getOrElse(Seq.empty) - case WomArray(_, arrayValue) => arrayValue.zipWithIndex flatMap { case (arrayItem, index) => arrayItem.simplify(s"$name[$index]") } - case WomMap(_, mapValue) => mapValue flatMap { case (key, value) => value.simplify(s"$name:${key.valueString.escapeMeta}") } + case WomArray(_, arrayValue) => + arrayValue.zipWithIndex flatMap { case (arrayItem, index) => arrayItem.simplify(s"$name[$index]") } + case WomMap(_, mapValue) => + mapValue flatMap { case (key, value) => value.simplify(s"$name:${key.valueString.escapeMeta}") } case WomPair(left, right) => left.simplify(s"$name:left") ++ right.simplify(s"$name:right") - case womObjectLike: WomObjectLike => womObjectLike.values flatMap { - case (key, value) => value.simplify(s"$name:${key.escapeMeta}") - } + case womObjectLike: WomObjectLike => + womObjectLike.values flatMap { case (key, value) => + value.simplify(s"$name:${key.escapeMeta}") + } case WomMaybeListedDirectory(valueOption, listingOption, _, _) => // This simpleton is not strictly part of the WomFile but is used to record the type of this WomValue so it can // be re-built appropriately in the WomValueBuilder @@ -82,10 +89,14 @@ object WomValueSimpleton { } implicit class WomValuesSimplifier(womValues: Map[String, WomValue]) { - def simplifyForCaching: Iterable[WomValueSimpleton] = womValues flatMap { case (name, value) => value.simplify(name)(simplifyMode = SimplifyMode(forCaching = true)) } + def simplifyForCaching: Iterable[WomValueSimpleton] = womValues flatMap { case (name, value) => + value.simplify(name)(simplifyMode = SimplifyMode(forCaching = true)) + } } implicit class WomValuesSimplifierPort(womValues: Map[OutputPort, WomValue]) { - def simplify: Iterable[WomValueSimpleton] = womValues flatMap { case (port, value) => value.simplify(port.internalName) } + def simplify: Iterable[WomValueSimpleton] = womValues flatMap { case (port, value) => + value.simplify(port.internalName) + } } } diff --git a/core/src/main/scala/cromwell/util/DatabaseUtil.scala b/core/src/main/scala/cromwell/util/DatabaseUtil.scala index d5033415df1..af1b7c176d2 100644 --- a/core/src/main/scala/cromwell/util/DatabaseUtil.scala +++ b/core/src/main/scala/cromwell/util/DatabaseUtil.scala @@ -16,7 +16,7 @@ object DatabaseUtil { } def withRetry[A](f: () => Future[A])(implicit actorSystem: ActorSystem): Future[A] = { - val RetryBackoff = SimpleExponentialBackoff(50 millis, 1 seconds, 1D) + val RetryBackoff = SimpleExponentialBackoff(50 millis, 1 seconds, 1d) Retry.withRetry(f, maxRetries = Option(10), backoff = RetryBackoff, isTransient = isTransient) } } diff --git a/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala b/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala index 5ed66fb5e9c..5a1ea5ef2d9 100644 --- a/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala +++ b/core/src/main/scala/cromwell/util/GracefulShutdownHelper.scala @@ -12,10 +12,10 @@ object GracefulShutdownHelper { trait GracefulShutdownHelper extends GracefulStopSupport { this: Actor with ActorLogging => private var shuttingDown: Boolean = false private var shutdownList: Set[ActorRef] = Set.empty - + def isShuttingDown: Boolean = shuttingDown - def waitForActorsAndShutdown(actorsLists: NonEmptyList[ActorRef]): Unit = { + def waitForActorsAndShutdown(actorsLists: NonEmptyList[ActorRef]): Unit = if (shuttingDown) { log.error("Programmer error, this actor has already initiated its shutdown. Only call this once per actor !") } else { @@ -23,12 +23,11 @@ trait GracefulShutdownHelper extends GracefulStopSupport { this: Actor with Acto shutdownList = actorsLists.toList.toSet shutdownList foreach context.watch shutdownList foreach { _ ! ShutdownCommand } - + context become { case Terminated(actor) if shuttingDown && shutdownList.contains(actor) => shutdownList = shutdownList - actor if (shutdownList.isEmpty) context stop self } } - } } diff --git a/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala b/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala index 811f9dce61f..1de0da45989 100644 --- a/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala +++ b/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala @@ -12,15 +12,15 @@ object WomValueJsonFormatter extends DefaultJsonProtocol { case f: WomFloat => JsNumber(f.value) case b: WomBoolean => JsBoolean(b.value) case f: WomSingleFile => JsString(f.value) - case o: WomObjectLike => new JsObject(o.values map {case(k, v) => k -> write(v)}) + case o: WomObjectLike => new JsObject(o.values map { case (k, v) => k -> write(v) }) case a: WomArray => new JsArray(a.value.map(write).toVector) - case m: WomMap => new JsObject(m.value map {case(k,v) => k.valueString -> write(v)}) + case m: WomMap => new JsObject(m.value map { case (k, v) => k.valueString -> write(v) }) case q: WomPair => new JsObject(Map("left" -> write(q.left), "right" -> write(q.right))) case WomOptionalValue(_, Some(innerValue)) => write(innerValue) case WomOptionalValue(_, None) => JsNull case WomCoproductValue(_, innerValue) => write(innerValue) case WomEnumerationValue(_, innerValue) => JsString(innerValue) - // handles WdlExpression + // handles WdlExpression case v: WomValue => JsString(v.toWomString) } @@ -31,7 +31,7 @@ object WomValueJsonFormatter extends DefaultJsonProtocol { // In addition, we make a lot of assumptions about what type of WomValue to create. Oh well... it should all fall out in the coercion (fingercrossed)! def read(value: JsValue): WomValue = value match { case JsObject(fields) => - val wdlFields: Map[WomValue, WomValue] = fields map {case (k, v) => WomString(k) -> read(v)} + val wdlFields: Map[WomValue, WomValue] = fields map { case (k, v) => WomString(k) -> read(v) } if (fields.isEmpty) WomMap(WomMapType(WomStringType, WomStringType), Map.empty[WomValue, WomValue]) else WomMap(WomMapType(wdlFields.head._1.womType, wdlFields.head._2.womType), wdlFields) case JsArray(vector) if vector.nonEmpty => WomArray(WomArrayType(read(vector.head).womType), vector map read) @@ -53,4 +53,3 @@ object WomSingleFileJsonFormatter extends DefaultJsonProtocol { } } } - diff --git a/core/src/main/scala/cromwell/util/PromiseActor.scala b/core/src/main/scala/cromwell/util/PromiseActor.scala index bd5efa5b0c4..efe3f0cb217 100644 --- a/core/src/main/scala/cromwell/util/PromiseActor.scala +++ b/core/src/main/scala/cromwell/util/PromiseActor.scala @@ -17,7 +17,10 @@ private class PromiseActor(promise: Promise[Any], sendTo: ActorRef, msg: Any) ex if (actorRef == sendTo) { promise.tryFailure(new RuntimeException("Promise-watched actor completed before sending back a message")) } else { - log.error("Spooky happenstances! A Terminated({}) message was sent to a private Promise actor which wasn't watching it!?", actorRef) + log.error( + "Spooky happenstances! A Terminated({}) message was sent to a private Promise actor which wasn't watching it!?", + actorRef + ) } context.stop(self) case success => @@ -27,6 +30,7 @@ private class PromiseActor(promise: Promise[Any], sendTo: ActorRef, msg: Any) ex } object PromiseActor { + /** * Sends a message to an actor and returns the future associated with the fullfilment of the reply * Can be used instead of the akka `ask` semantics, without any timeout @@ -42,11 +46,11 @@ object PromiseActor { promise.future } - def props(promise: Promise[Any], sendTo: ActorRef, msg: Any): Props = Props(new PromiseActor(promise, sendTo, msg)).withDispatcher(EngineDispatcher) + def props(promise: Promise[Any], sendTo: ActorRef, msg: Any): Props = + Props(new PromiseActor(promise, sendTo, msg)).withDispatcher(EngineDispatcher) implicit class EnhancedActorRef(val actorRef: ActorRef) extends AnyVal { - def askNoTimeout(message: Any)(implicit actorRefFactory: ActorRefFactory): Future[Any] = { + def askNoTimeout(message: Any)(implicit actorRefFactory: ActorRefFactory): Future[Any] = PromiseActor.askNoTimeout(message, actorRef) - } } } diff --git a/core/src/main/scala/cromwell/util/StopAndLogSupervisor.scala b/core/src/main/scala/cromwell/util/StopAndLogSupervisor.scala index 238cdd70573..50a43cf2987 100644 --- a/core/src/main/scala/cromwell/util/StopAndLogSupervisor.scala +++ b/core/src/main/scala/cromwell/util/StopAndLogSupervisor.scala @@ -8,13 +8,12 @@ trait StopAndLogSupervisor { this: Actor => protected def onFailure(actorRef: ActorRef, throwable: => Throwable): Unit final val stopAndLogStrategy: SupervisorStrategy = { - def stoppingDecider: Decider = { - case e: Exception => - onFailure(sender(), e) - Stop + def stoppingDecider: Decider = { case e: Exception => + onFailure(sender(), e) + Stop } OneForOneStrategy(loggingEnabled = false)(stoppingDecider) } - override final val supervisorStrategy = stopAndLogStrategy + final override val supervisorStrategy = stopAndLogStrategy } diff --git a/core/src/main/scala/cromwell/util/TryWithResource.scala b/core/src/main/scala/cromwell/util/TryWithResource.scala index fdbfbda1882..6909232150d 100644 --- a/core/src/main/scala/cromwell/util/TryWithResource.scala +++ b/core/src/main/scala/cromwell/util/TryWithResource.scala @@ -20,17 +20,17 @@ object TryWithResource { case x: Throwable => t = Option(x) throw x - } finally { + } finally resource foreach { r => - try { + try r.close() - } catch { - case y: Throwable => t match { - case Some(_t) => _t.addSuppressed(y) - case None => throw y - } + catch { + case y: Throwable => + t match { + case Some(_t) => _t.addSuppressed(y) + case None => throw y + } } } - } } } diff --git a/core/src/test/scala/cromwell/core/DockerCredentialsSpec.scala b/core/src/test/scala/cromwell/core/DockerCredentialsSpec.scala index 25418a88fa3..4a3dc8c4929 100644 --- a/core/src/test/scala/cromwell/core/DockerCredentialsSpec.scala +++ b/core/src/test/scala/cromwell/core/DockerCredentialsSpec.scala @@ -18,11 +18,11 @@ class DockerCredentialsSpec extends AnyFlatSpec with Matchers { val credentials: Any = new DockerCredentials(Base64.getEncoder.encodeToString(tokenString.getBytes()), None, None) credentials match { - case DockerCredentialUsernameAndPassword(u, p) => { + case DockerCredentialUsernameAndPassword(u, p) => u should be(expectedUsername) p should be(expectedPassword) - } - case _ => fail(s"Expected to decompose ${tokenString} into username=$expectedPassword and password=$expectedPassword") + case _ => + fail(s"Expected to decompose ${tokenString} into username=$expectedPassword and password=$expectedPassword") } } } diff --git a/core/src/test/scala/cromwell/core/LoadConfigSpec.scala b/core/src/test/scala/cromwell/core/LoadConfigSpec.scala index 0ac4947aa1c..b2008ef3aa0 100644 --- a/core/src/test/scala/cromwell/core/LoadConfigSpec.scala +++ b/core/src/test/scala/cromwell/core/LoadConfigSpec.scala @@ -8,7 +8,7 @@ import scala.concurrent.duration._ class LoadConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "LoadConfig" - + it should "parse load config" in { LoadConfig.JobStoreReadThreshold shouldBe 10000 LoadConfig.JobStoreWriteThreshold shouldBe 10000 diff --git a/core/src/test/scala/cromwell/core/MockIoActor.scala b/core/src/test/scala/cromwell/core/MockIoActor.scala index 3c1831ae8d3..dd2e8d114ac 100644 --- a/core/src/test/scala/cromwell/core/MockIoActor.scala +++ b/core/src/test/scala/cromwell/core/MockIoActor.scala @@ -18,13 +18,14 @@ class MockIoActor(returnCode: String, stderrSize: Long) extends Actor { case command: IoSizeCommand => sender() ! IoSuccess(command, 0L) case command: IoContentAsStringCommand => sender() ! IoSuccess(command, "0") case command: IoExistsCommand => sender() ! IoSuccess(command, false) - - // With context + + // With context case (requestContext: Any, command: IoCopyCommand) => sender() ! (requestContext -> IoSuccess(command, ())) case (requestContext: Any, command: IoWriteCommand) => sender() ! (requestContext -> IoSuccess(command, ())) case (requestContext: Any, command: IoDeleteCommand) => sender() ! (requestContext -> IoSuccess(command, ())) case (requestContext: Any, command: IoSizeCommand) => sender() ! (requestContext -> IoSuccess(command, stderrSize)) - case (requestContext: Any, command: IoContentAsStringCommand) => sender() ! (requestContext -> IoSuccess(command, returnCode)) + case (requestContext: Any, command: IoContentAsStringCommand) => + sender() ! (requestContext -> IoSuccess(command, returnCode)) case (requestContext: Any, command: IoExistsCommand) => sender() ! (requestContext -> IoSuccess(command, false)) case withPromise: IoCommandWithPromise[_] => self ! ((withPromise.promise, withPromise.ioCommand)) diff --git a/core/src/test/scala/cromwell/core/SimpleIoActor.scala b/core/src/test/scala/cromwell/core/SimpleIoActor.scala index 589065758c5..4ca3d3c1bdf 100644 --- a/core/src/test/scala/cromwell/core/SimpleIoActor.scala +++ b/core/src/test/scala/cromwell/core/SimpleIoActor.scala @@ -14,49 +14,44 @@ object SimpleIoActor { } class SimpleIoActor extends Actor { - + override def receive: Receive = { case command: IoCopyCommand => - Try(command.source.copyTo(command.destination)) match { case Success(_) => sender() ! IoSuccess(command, ()) case Failure(failure) => sender() ! IoFailure(command, failure) } - + case command: IoWriteCommand => - Try(command.file.write(command.content)(command.openOptions, StandardCharsets.UTF_8)) match { case Success(_) => sender() ! IoSuccess(command, ()) case Failure(failure) => sender() ! IoFailure(command, failure) } - + case command: IoDeleteCommand => - Try(command.file.delete(command.swallowIOExceptions)) match { case Success(_) => sender() ! IoSuccess(command, ()) case Failure(failure) => sender() ! IoFailure(command, failure) } - + case command: IoSizeCommand => - Try(command.file.size) match { case Success(size) => sender() ! IoSuccess(command, size) case Failure(failure) => sender() ! IoFailure(command, failure) } - + case command: IoContentAsStringCommand => - Try(command.file.contentAsString) match { case Success(content) => sender() ! IoSuccess(command, content) case Failure(failure) => sender() ! IoFailure(command, failure) } - + case command: IoHashCommand => Try(command.file.md5) match { case Success(hash) => sender() ! IoSuccess(command, hash) case Failure(failure) => sender() ! IoFailure(command, failure) } - + case command: IoExistsCommand => Try(command.file.exists) match { case Success(exists) => sender() ! IoSuccess(command, exists) @@ -65,49 +60,42 @@ class SimpleIoActor extends Actor { // With context case (requestContext: Any, command: IoCopyCommand) => - Try(command.source.copyTo(command.destination, overwrite = true)) match { case Success(_) => sender() ! (requestContext -> IoSuccess(command, ())) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) } - - case (requestContext: Any, command: IoWriteCommand) => + case (requestContext: Any, command: IoWriteCommand) => Try(command.file.write(command.content)) match { case Success(_) => sender() ! (requestContext -> IoSuccess(command, ())) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) } - - case (requestContext: Any, command: IoDeleteCommand) => + case (requestContext: Any, command: IoDeleteCommand) => Try(command.file.delete(command.swallowIOExceptions)) match { case Success(_) => sender() ! (requestContext -> IoSuccess(command, ())) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) } - + case (requestContext: Any, command: IoSizeCommand) => - Try(command.file.size) match { case Success(size) => sender() ! (requestContext -> IoSuccess(command, size)) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) } - - case (requestContext: Any, command: IoContentAsStringCommand) => + case (requestContext: Any, command: IoContentAsStringCommand) => Try(command.file.contentAsString) match { case Success(content) => sender() ! (requestContext -> IoSuccess(command, content)) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) } - + case (requestContext: Any, command: IoHashCommand) => - Try(command.file.md5) match { case Success(hash) => sender() ! (requestContext -> IoSuccess(command, hash)) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) } case (requestContext: Any, command: IoExistsCommand) => - Try(command.file.exists) match { case Success(exists) => sender() ! (requestContext -> IoSuccess(command, exists)) case Failure(failure) => sender() ! (requestContext -> IoFailure(command, failure)) diff --git a/core/src/test/scala/cromwell/core/TestKitSuite.scala b/core/src/test/scala/cromwell/core/TestKitSuite.scala index d9fe5c3d56c..8febaa3edd8 100644 --- a/core/src/test/scala/cromwell/core/TestKitSuite.scala +++ b/core/src/test/scala/cromwell/core/TestKitSuite.scala @@ -18,9 +18,8 @@ abstract class TestKitSuite extends TestKitBase with Suite with BeforeAndAfterAl implicit lazy val system: ActorSystem = ActorSystem(actorSystemName, actorSystemConfig) - override protected def afterAll(): Unit = { + override protected def afterAll(): Unit = shutdown() - } // 'BlackHoleActor' swallows messages without logging them (thus reduces log file overhead): val emptyActor: ActorRef = system.actorOf(TestActors.blackholeProps, "TestKitSuiteEmptyActor") diff --git a/core/src/test/scala/cromwell/core/WorkflowOptionsSpec.scala b/core/src/test/scala/cromwell/core/WorkflowOptionsSpec.scala index 22aa4577c63..dd4de0a4773 100644 --- a/core/src/test/scala/cromwell/core/WorkflowOptionsSpec.scala +++ b/core/src/test/scala/cromwell/core/WorkflowOptionsSpec.scala @@ -21,11 +21,11 @@ class WorkflowOptionsSpec extends Matchers with AnyWordSpecLike { WorkflowOptions.fromJsonObject(workflowOptionsJson) match { case Success(options) => options.get("key") shouldEqual Success("value") - options.get("bad_key") shouldBe a [Failure[_]] + options.get("bad_key") shouldBe a[Failure[_]] options.clearEncryptedValues.asPrettyJson shouldEqual """{ - | "key": "value" - |}""".stripMargin + | "key": "value" + |}""".stripMargin case _ => fail("Expecting workflow options to be parseable") } } diff --git a/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala b/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala index e426a349448..1fe0ca6658b 100644 --- a/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala +++ b/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala @@ -189,24 +189,23 @@ class BatchActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wit } } - class BatchActorTest(processingTime: FiniteDuration = Duration.Zero, fail: Boolean = false) extends BatchActor[String](10.hours, 10) { + class BatchActorTest(processingTime: FiniteDuration = Duration.Zero, fail: Boolean = false) + extends BatchActor[String](10.hours, 10) { var processed: Vector[String] = Vector.empty - override def commandToData(snd: ActorRef) = { - case command: String => command + override def commandToData(snd: ActorRef) = { case command: String => + command } override protected def weightFunction(command: String) = command.length - override protected def process(data: NonEmptyVector[String]) = { + override protected def process(data: NonEmptyVector[String]) = if (processingTime != Duration.Zero) { processed = processed ++ data.toVector - val promise = Promise[Int]() - system.scheduler.scheduleOnce(processingTime) { promise.success(data.map(weightFunction).toVector.sum) } + val promise = Promise[Int]() + system.scheduler.scheduleOnce(processingTime)(promise.success(data.map(weightFunction).toVector.sum)) promise.future } else if (!fail) { processed = processed ++ data.toVector Future.successful(data.map(weightFunction).toVector.sum) - } - else Future.failed(new Exception("Oh nose ! (This is a test failure and is expected !)") with NoStackTrace) - } + } else Future.failed(new Exception("Oh nose ! (This is a test failure and is expected !)") with NoStackTrace) } } diff --git a/core/src/test/scala/cromwell/core/actor/RobustClientHelperSpec.scala b/core/src/test/scala/cromwell/core/actor/RobustClientHelperSpec.scala index 939a1aa1834..dc29807d061 100644 --- a/core/src/test/scala/cromwell/core/actor/RobustClientHelperSpec.scala +++ b/core/src/test/scala/cromwell/core/actor/RobustClientHelperSpec.scala @@ -14,39 +14,39 @@ import scala.language.postfixOps class RobustClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender { behavior of "RobustClientHelper" - + it should "handle Backpressure responses" in { val remoteActor = TestProbe() val delegateActor = TestProbe() - + val margin = 2.second - val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2D, 0D) + val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2d, 0d) val noResponseTimeout = 10 seconds val testActor = TestActorRef(new TestActor(delegateActor.ref, backoff, noResponseTimeout)) - + val messageToSend = TestActor.TestMessage("hello") - - //send message + + // send message testActor.underlyingActor.sendMessage(messageToSend, remoteActor.ref) - + // remote actor receives message remoteActor.expectMsg(messageToSend) - + // remote actor sends a backpressure message remoteActor.reply(BackPressure(messageToSend)) - + // remote actor expects request again after backpressureTimeout remoteActor.expectMsg(1.second + margin, messageToSend) - + // remote actor replies remoteActor.reply("world") - + // delegate actor receives response delegateActor.expectMsg("world") - + // remote actor doesn't receives new messages remoteActor.expectNoMessage() - + // Wait long enough that to make sure that we won't receive a ServiceUnreachable message, meaning the timeout timer // has been cancelled. Note that it is the responsibility of the actor to cancel it, the RobustClientHelper does not // handle that part. @@ -57,7 +57,7 @@ class RobustClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matc val remoteActor = TestProbe() val delegateActor = TestProbe() - val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2D, 0D) + val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2d, 0d) val noResponseTimeout = 20 seconds val testActor = TestActorRef(new TestActor(delegateActor.ref, backoff, noResponseTimeout)) @@ -68,13 +68,13 @@ class RobustClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matc // remote actor receives message remoteActor.expectMsg(messageToSend) - + // remote actor replies remoteActor.reply("world") - + // delegate receives response delegateActor.expectMsg("world") - + // remote actor doesn't receives new messages remoteActor.expectNoMessage() delegateActor.expectNoMessage() @@ -84,7 +84,7 @@ class RobustClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matc val remoteActor = TestProbe() val delegateActor = TestProbe() - val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2D, 0D) + val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2d, 0d) val noResponseTimeout = 2 seconds val testActor = TestActorRef(new TestActor(delegateActor.ref, backoff, noResponseTimeout)) @@ -109,9 +109,9 @@ class RobustClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matc it should "reset timeout when backpressured is received" in { val remoteActor = TestProbe() val delegateActor = TestProbe() - + val margin = 1 second - val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2D, 0D) + val backoff = SimpleExponentialBackoff(1.second, 10.seconds, 2d, 0d) val noResponseTimeout = 3 seconds val testActor = TestActorRef(new TestActor(delegateActor.ref, backoff, noResponseTimeout)) @@ -141,32 +141,31 @@ class RobustClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matc delegateActor.expectNoMessage(4 seconds) } - private [actor] object TestActor { + private[actor] object TestActor { case class TestMessage(v: String) case object ServiceUnreachable } - private class TestActor(delegateTo: ActorRef, - backoff: Backoff, - noResponseTimeout: FiniteDuration) extends Actor with ActorLogging with RobustClientHelper { + private class TestActor(delegateTo: ActorRef, backoff: Backoff, noResponseTimeout: FiniteDuration) + extends Actor + with ActorLogging + with RobustClientHelper { override def initialBackoff(): Backoff = backoff context.become(robustReceive orElse receive) var messageSent: Any = _ - - override def receive: Receive = { - case message => - cancelTimeout(messageSent) - delegateTo ! message + + override def receive: Receive = { case message => + cancelTimeout(messageSent) + delegateTo ! message } - + def sendMessage(message: Any, to: ActorRef) = { messageSent = message robustSend(message, to, noResponseTimeout) } - override protected def onTimeout(message: Any, to: ActorRef): Unit = { + override protected def onTimeout(message: Any, to: ActorRef): Unit = delegateTo ! TestActor.ServiceUnreachable - } } } diff --git a/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala b/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala index a7d42d7ffdd..b45a5e6fba7 100644 --- a/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala +++ b/core/src/test/scala/cromwell/core/actor/StreamActorHelperSpec.scala @@ -13,10 +13,9 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.ExecutionContext - class StreamActorHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender { behavior of "StreamActorHelper" - + implicit val materializer = ActorMaterializer() it should "catch EnqueueResponse message" in { @@ -26,7 +25,7 @@ class StreamActorHelperSpec extends TestKitSuite with AnyFlatSpecLike with Match expectMsg("hello") system stop actor } - + it should "send a backpressure message when messages are dropped by the queue" in { val actor = TestActorRef(new TestStreamActor(1)) val command = new TestStreamActorCommand @@ -50,14 +49,19 @@ class StreamActorHelperSpec extends TestKitSuite with AnyFlatSpecLike with Match } } - private object TestStreamActor { class TestStreamActorCommand - case class TestStreamActorContext(request: TestStreamActorCommand, replyTo: ActorRef, override val clientContext: Option[Any]) extends StreamContext + case class TestStreamActorContext(request: TestStreamActorCommand, + replyTo: ActorRef, + override val clientContext: Option[Any] + ) extends StreamContext } -private class TestStreamActor(queueSize: Int)(implicit override val materializer: ActorMaterializer) extends Actor with ActorLogging with StreamActorHelper[TestStreamActorContext] { - +private class TestStreamActor(queueSize: Int)(implicit override val materializer: ActorMaterializer) + extends Actor + with ActorLogging + with StreamActorHelper[TestStreamActorContext] { + override protected def actorReceive: Receive = { case command: TestStreamActorCommand => val replyTo = sender() @@ -69,8 +73,9 @@ private class TestStreamActor(queueSize: Int)(implicit override val materializer sendToStream(commandContext) } - override protected val streamSource = Source.queue[TestStreamActorContext](queueSize, OverflowStrategy.dropNew) - .map{ ("hello", _) } + override protected val streamSource = Source + .queue[TestStreamActorContext](queueSize, OverflowStrategy.dropNew) + .map(("hello", _)) - override implicit def ec: ExecutionContext = context.dispatcher + implicit override def ec: ExecutionContext = context.dispatcher } diff --git a/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala b/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala index 82bc6bacb25..dbd8bd4b82c 100644 --- a/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala +++ b/core/src/test/scala/cromwell/core/callcaching/HashKeySpec.scala @@ -4,7 +4,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class HashKeySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "HashKey" should "produce consistent key value" in { @@ -20,7 +19,7 @@ class HashKeySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { HashKey("output", "String myOutput"), HashKey("runtime attribute", "docker") ) - + keys map { _.key } should contain theSameElementsAs Set( "command template", "backend name", @@ -34,5 +33,5 @@ class HashKeySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "runtime attribute: docker" ) } - + } diff --git a/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala b/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala index 8e958cce518..caacfd54ed8 100644 --- a/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala +++ b/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala @@ -15,13 +15,12 @@ import scala.concurrent.ExecutionContext class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "CromwellFileSystems" - val globalConfig = ConfigFactory.parseString( - """ - |filesystems { - | fs1.class = "cromwell.core.path.MockPathBuilderFactory" - | fs2.class = "cromwell.core.path.MockPathBuilderFactory" - | fs3.class = "cromwell.core.filesystem.MockNotPathBuilderFactory" - |} + val globalConfig = ConfigFactory.parseString(""" + |filesystems { + | fs1.class = "cromwell.core.path.MockPathBuilderFactory" + | fs2.class = "cromwell.core.path.MockPathBuilderFactory" + | fs3.class = "cromwell.core.filesystem.MockNotPathBuilderFactory" + |} """.stripMargin) val cromwellFileSystems = new CromwellFileSystems(globalConfig) @@ -29,12 +28,11 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with it should "build factory builders and factories for valid configuration" in { cromwellFileSystems.factoryBuilders.keySet shouldBe Set("fs1", "fs2", "fs3") - val factoriesConfig = ConfigFactory.parseString( - """ - |filesystems { - | fs1.somekey = "somevalue" - | fs2.someotherkey = "someothervalue" - |} + val factoriesConfig = ConfigFactory.parseString(""" + |filesystems { + | fs1.somekey = "somevalue" + | fs2.someotherkey = "someothervalue" + |} """.stripMargin) val pathFactories = cromwellFileSystems.factoriesFromConfig(factoriesConfig) @@ -48,16 +46,16 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with } it should "build singleton instance if specified" in { - val rootConf = ConfigFactory.parseString( - """ - |filesystems { - | fs1 { - | class = "cromwell.core.filesystem.MockPathBuilderFactoryCustomSingletonConfig" - | global { - | class = "cromwell.core.filesystem.MockSingletonConfig" - | } - | } - |} + val rootConf = + ConfigFactory.parseString(""" + |filesystems { + | fs1 { + | class = "cromwell.core.filesystem.MockPathBuilderFactoryCustomSingletonConfig" + | global { + | class = "cromwell.core.filesystem.MockSingletonConfig" + | } + | } + |} """.stripMargin) val cromwellFileSystems = new CromwellFileSystems(rootConf) @@ -69,21 +67,33 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with val factory2 = cromwellFileSystems.buildFactory("fs1", ConfigFactory.empty) // The singleton configs should be the same for different factories - assert(factory1.toOption.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig == - factory2.toOption.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig) + assert( + factory1.toOption.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig == + factory2.toOption.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig + ) } List( - ("if the filesystem does not exist", "filesystems.fs4.key = value", NonEmptyList.one("Cannot find a filesystem with name fs4 in the configuration. Available filesystems: fs1, fs2, fs3")), - ("if the config is invalid", "filesystems.fs1 = true", NonEmptyList.one("Invalid filesystem backend configuration for fs1")), - ("the class is not a PathBuilderFactory", "filesystems.fs3.key = value", NonEmptyList.one("The filesystem class for fs3 is not an instance of PathBuilderFactory")) - ) foreach { - case (description, config, expected) => - it should s"fail to build factories $description" in { - val result = cromwellFileSystems.factoriesFromConfig(ConfigFactory.parseString(config)) - result.isLeft shouldBe true - result.swap.toOption.get shouldBe expected - } + ("if the filesystem does not exist", + "filesystems.fs4.key = value", + NonEmptyList.one( + "Cannot find a filesystem with name fs4 in the configuration. Available filesystems: fs1, fs2, fs3" + ) + ), + ("if the config is invalid", + "filesystems.fs1 = true", + NonEmptyList.one("Invalid filesystem backend configuration for fs1") + ), + ("the class is not a PathBuilderFactory", + "filesystems.fs3.key = value", + NonEmptyList.one("The filesystem class for fs3 is not an instance of PathBuilderFactory") + ) + ) foreach { case (description, config, expected) => + it should s"fail to build factories $description" in { + val result = cromwellFileSystems.factoriesFromConfig(ConfigFactory.parseString(config)) + result.isLeft shouldBe true + result.swap.toOption.get shouldBe expected + } } val classNotFoundException = AggregatedMessageException( @@ -93,7 +103,9 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with val wrongSignatureException = AggregatedMessageException( "Failed to initialize Cromwell filesystems", - List("Class cromwell.core.filesystem.MockPathBuilderFactoryWrongSignature for filesystem fs1 does not have the required constructor signature: (com.typesafe.config.Config, com.typesafe.config.Config)") + List( + "Class cromwell.core.filesystem.MockPathBuilderFactoryWrongSignature for filesystem fs1 does not have the required constructor signature: (com.typesafe.config.Config, com.typesafe.config.Config)" + ) ) val invalidConfigException = AggregatedMessageException( @@ -110,13 +122,15 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with ("is invalid", "filesystems.gcs = true", invalidConfigException), ("is missing class fields", "filesystems.fs1.notclass = hello", missingClassFieldException), ("can't find class", "filesystems.fs1.class = do.not.exists", classNotFoundException), - ("has invalid class signature", "filesystems.fs1.class = cromwell.core.filesystem.MockPathBuilderFactoryWrongSignature", wrongSignatureException) - ) foreach { - case (description, config, expected) => - it should s"fail if global filesystems config $description" in { - val ex = the[Exception] thrownBy { new CromwellFileSystems(ConfigFactory.parseString(config)) } - ex shouldBe expected - } + ("has invalid class signature", + "filesystems.fs1.class = cromwell.core.filesystem.MockPathBuilderFactoryWrongSignature", + wrongSignatureException + ) + ) foreach { case (description, config, expected) => + it should s"fail if global filesystems config $description" in { + val ex = the[Exception] thrownBy new CromwellFileSystems(ConfigFactory.parseString(config)) + ex shouldBe expected + } } } @@ -124,6 +138,10 @@ class MockPathBuilderFactoryWrongSignature() class MockNotPathBuilderFactory(globalConfig: Config, val instanceConfig: Config) class MockSingletonConfig(config: Config) -class MockPathBuilderFactoryCustomSingletonConfig(globalConfig: Config, val instanceConfig: Config, val singletonConfig: MockSingletonConfig) extends cromwell.core.path.PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = throw new UnsupportedOperationException +class MockPathBuilderFactoryCustomSingletonConfig(globalConfig: Config, + val instanceConfig: Config, + val singletonConfig: MockSingletonConfig +) extends cromwell.core.path.PathBuilderFactory { + override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = + throw new UnsupportedOperationException } diff --git a/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala b/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala index 83521ea3432..07ad722ead2 100644 --- a/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala +++ b/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala @@ -95,7 +95,7 @@ class AsyncIoSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers { } // Honor swallow exception false - //noinspection RedundantDefaultArgument + // noinspection RedundantDefaultArgument recoverToSucceededIf[NoSuchFileException] { testActor.underlyingActor.asyncIo.deleteAsync(testPath, swallowIoExceptions = false) } @@ -103,8 +103,8 @@ class AsyncIoSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers { it should "handle command creation errors asynchronously" in { val partialIoCommandBuilder = new PartialIoCommandBuilder { - override def existsCommand: PartialFunction[Path, Try[IoExistsCommand]] = { - case _ => Failure(new Exception("everything's fine, I am an expected exists fail") with NoStackTrace) + override def existsCommand: PartialFunction[Path, Try[IoExistsCommand]] = { case _ => + Failure(new Exception("everything's fine, I am an expected exists fail") with NoStackTrace) } } val testActor = @@ -120,10 +120,11 @@ class AsyncIoSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers { private class AsyncIoTestActor(override val ioActor: ActorRef, override val ioCommandBuilder: IoCommandBuilder = DefaultIoCommandBuilder - ) extends Actor with ActorLogging with AsyncIoActorClient { + ) extends Actor + with ActorLogging + with AsyncIoActorClient { - override def receive: Receive = { - case _ => + override def receive: Receive = { case _ => } } diff --git a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala index 06132ba152b..ed07aeb3fc5 100644 --- a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala +++ b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala @@ -21,10 +21,11 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers it should "intercept IoAcks and cancel timers" in { val ioActorProbe = TestProbe() val delegateProbe = TestProbe() - val backoff = SimpleExponentialBackoff(100 seconds, 10.hours, 2D, 0D) + val backoff = SimpleExponentialBackoff(100 seconds, 10.hours, 2d, 0d) val noResponseTimeout = 3 seconds - val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backoff, noResponseTimeout)) + val testActor = + TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backoff, noResponseTimeout)) val command = DefaultIoSizeCommand(mock[Path]) val response = IoSuccess(command, 5L) @@ -51,10 +52,11 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers it should "intercept IoAcks and cancel timers for a command with context" in { val ioActorProbe = TestProbe() val delegateProbe = TestProbe() - val backoff = SimpleExponentialBackoff(100 seconds, 10.hours, 2D, 0D) + val backoff = SimpleExponentialBackoff(100 seconds, 10.hours, 2d, 0d) val noResponseTimeout = 3 seconds - val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backoff, noResponseTimeout)) + val testActor = + TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backoff, noResponseTimeout)) val commandContext = "context" val command = DefaultIoSizeCommand(mock[Path]) @@ -71,7 +73,7 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers // delegate should receive the response delegateProbe.expectMsgPF(1 second) { - case (contextReceived, responseReceived) if contextReceived == "context" && responseReceived == response => + case (contextReceived, responseReceived) if contextReceived == "context" && responseReceived == response => } // And nothing else, meaning the timeout timer has been cancelled @@ -84,9 +86,12 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers private case object ServiceUnreachable private class IoClientHelperTestActor(override val ioActor: ActorRef, - delegateTo: ActorRef, - backoff: Backoff, - noResponseTimeout: FiniteDuration) extends Actor with ActorLogging with IoClientHelper { + delegateTo: ActorRef, + backoff: Backoff, + noResponseTimeout: FiniteDuration + ) extends Actor + with ActorLogging + with IoClientHelper { implicit val ioCommandBuilder: DefaultIoCommandBuilder.type = DefaultIoCommandBuilder @@ -94,21 +99,18 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers context.become(ioReceive orElse receive) - override def receive: Receive = { - case message => delegateTo ! message + override def receive: Receive = { case message => + delegateTo ! message } - def sendMessage(command: IoCommand[_]): Unit = { + def sendMessage(command: IoCommand[_]): Unit = sendIoCommandWithCustomTimeout(command, noResponseTimeout) - } - def sendMessageWithContext(context: Any, command: IoCommand[_]): Unit = { + def sendMessageWithContext(context: Any, command: IoCommand[_]): Unit = sendIoCommandWithContext(command, context, noResponseTimeout) - } - override protected def onTimeout(message: Any, to: ActorRef): Unit = { + override protected def onTimeout(message: Any, to: ActorRef): Unit = delegateTo ! ServiceUnreachable - } } } diff --git a/core/src/test/scala/cromwell/core/labels/LabelSpec.scala b/core/src/test/scala/cromwell/core/labels/LabelSpec.scala index 6bf4e046ed2..bdb443a91ac 100644 --- a/core/src/test/scala/cromwell/core/labels/LabelSpec.scala +++ b/core/src/test/scala/cromwell/core/labels/LabelSpec.scala @@ -29,7 +29,7 @@ class LabelSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "11f2468c-39d6-4be3-85c8-32735c01e66b", "", "!@#$%^&*()_+={}[]:;'<>?,./`~", - "now valid 255 character value-at vero eosd accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa", + "now valid 255 character value-at vero eosd accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa" ) val badLabelKeys = List( diff --git a/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala b/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala index d40456e0128..d3d99289a35 100644 --- a/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala +++ b/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala @@ -13,8 +13,12 @@ import org.slf4j.Logger import org.slf4j.event.Level import common.mock.MockSugar -class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar - with TableDrivenPropertyChecks { +class LoggerWrapperSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with MockSugar + with TableDrivenPropertyChecks { behavior of "LoggerWrapper" @@ -25,7 +29,6 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche "slf4jMessages", "akkaMessages" ), - ( "log error with no args", _.error("Hello {} {} {} {}"), @@ -80,7 +83,6 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche List(Slf4jMessage(Level.ERROR, List("tag: Hello {} {} {} {}", "arg1", exception))), List(AkkaMessage(Logging.ErrorLevel, s"tag: Hello arg1 {} {} {}", Option(exception))) ), - ( "log warn with no args", _.warn("Hello {} {} {} {}"), @@ -123,7 +125,6 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche List(Slf4jMessage(Level.WARN, List("tag: Hello {} {} {} {}", exception))), List(AkkaMessage(Logging.WarningLevel, s"tag: Hello {} {} {} {}\n$exceptionMessage")) ), - ( "log info with no args", _.info("Hello {} {} {} {}"), @@ -138,7 +139,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log info with one arg", - _.info("Hello {} {} {} {}", arg ="arg1"), + _.info("Hello {} {} {} {}", arg = "arg1"), List(Slf4jMessage(Level.INFO, List("tag: Hello {} {} {} {}", "arg1"))), List(AkkaMessage(Logging.InfoLevel, "tag: Hello arg1 {} {} {}")) ), @@ -166,7 +167,6 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche List(Slf4jMessage(Level.INFO, List("tag: Hello {} {} {} {}", exception))), List(AkkaMessage(Logging.InfoLevel, s"tag: Hello {} {} {} {}\n$exceptionMessage")) ), - ( "log debug with no args", _.debug("Hello {} {} {} {}"), @@ -181,7 +181,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log debug with one arg", - _.debug("Hello {} {} {} {}", argument ="arg1"), + _.debug("Hello {} {} {} {}", argument = "arg1"), List(Slf4jMessage(Level.DEBUG, List("tag: Hello {} {} {} {}", "arg1"))), List(AkkaMessage(Logging.DebugLevel, "tag: Hello arg1 {} {} {}")) ), @@ -209,7 +209,6 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche List(Slf4jMessage(Level.DEBUG, List("tag: Hello {} {} {} {}", exception))), List(AkkaMessage(Logging.DebugLevel, s"tag: Hello {} {} {} {}\n$exceptionMessage")) ), - ( "log trace with no args", _.trace("Hello {} {} {} {}"), @@ -260,40 +259,36 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche var actualAkkaMessages = List.empty[AkkaMessage] - def toList(arguments: Any): List[Any] = { + def toList(arguments: Any): List[Any] = arguments match { case array: Array[_] => toLastFlattened(array) case seq: Seq[_] => seq.toList case any => List(any) } - } /* - * Flatten the last element of the array if the last element is itself an array. - * - * org.mockito.ArgumentMatchers#anyVararg() is deprecated, but works, sending in an empty array in the tail - * position. If we tried to use org.mockito.ArgumentMatchers#any(), it ends up mocking the wrong overloaded - * method. At each logging level there are two methods with very similar signatures: - * - * cromwell.core.logging.LoggerWrapper.error(pattern: String, arguments: AnyRef*) - * cromwell.core.logging.LoggerWrapper.error(pattern: String, arg: Any) - * - * As is, the Any vs. AnyRef overloads are barely dodging the issue https://issues.scala-lang.org/browse/SI-2991. - */ - def toLastFlattened(array: Array[_]): List[Any] = { + * Flatten the last element of the array if the last element is itself an array. + * + * org.mockito.ArgumentMatchers#anyVararg() is deprecated, but works, sending in an empty array in the tail + * position. If we tried to use org.mockito.ArgumentMatchers#any(), it ends up mocking the wrong overloaded + * method. At each logging level there are two methods with very similar signatures: + * + * cromwell.core.logging.LoggerWrapper.error(pattern: String, arguments: AnyRef*) + * cromwell.core.logging.LoggerWrapper.error(pattern: String, arg: Any) + * + * As is, the Any vs. AnyRef overloads are barely dodging the issue https://issues.scala-lang.org/browse/SI-2991. + */ + def toLastFlattened(array: Array[_]): List[Any] = array.toList.reverse match { case (array: Array[_]) :: tail => tail.reverse ++ array.toList case other => other.reverse } - } - def updateSlf4jMessages(level: Level, arguments: Any): Unit = { + def updateSlf4jMessages(level: Level, arguments: Any): Unit = actualSlf4jMessages :+= Slf4jMessage(level, toList(arguments)) - } - def updateAkkaMessages(logLevel: LogLevel, message: String, causeOption: Option[Throwable] = None): Unit = { + def updateAkkaMessages(logLevel: LogLevel, message: String, causeOption: Option[Throwable] = None): Unit = actualAkkaMessages :+= AkkaMessage(logLevel, message, causeOption) - } val mockLogger = mock[Logger] @@ -333,25 +328,20 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche override val isInfoEnabled: Boolean = true override val isDebugEnabled: Boolean = true - override protected def notifyError(message: String): Unit = { + override protected def notifyError(message: String): Unit = updateAkkaMessages(Logging.ErrorLevel, message) - } - override protected def notifyError(cause: Throwable, message: String): Unit = { + override protected def notifyError(cause: Throwable, message: String): Unit = updateAkkaMessages(Logging.ErrorLevel, message, Option(cause)) - } - override protected def notifyWarning(message: String): Unit = { + override protected def notifyWarning(message: String): Unit = updateAkkaMessages(Logging.WarningLevel, message) - } - override protected def notifyInfo(message: String): Unit = { + override protected def notifyInfo(message: String): Unit = updateAkkaMessages(Logging.InfoLevel, message) - } - override protected def notifyDebug(message: String): Unit = { + override protected def notifyDebug(message: String): Unit = updateAkkaMessages(Logging.DebugLevel, message) - } } val wrapper = new LoggerWrapper { diff --git a/core/src/test/scala/cromwell/core/path/DefaultPathBuilderSpec.scala b/core/src/test/scala/cromwell/core/path/DefaultPathBuilderSpec.scala index 63a5a8604cf..5235f711063 100644 --- a/core/src/test/scala/cromwell/core/path/DefaultPathBuilderSpec.scala +++ b/core/src/test/scala/cromwell/core/path/DefaultPathBuilderSpec.scala @@ -6,7 +6,12 @@ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.scalatest.prop.Tables.Table -class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers with PathBuilderSpecUtils with TestFileUtil { +class DefaultPathBuilderSpec + extends Suite + with AnyFlatSpecLike + with Matchers + with PathBuilderSpecUtils + with TestFileUtil { private val pwd = BetterFileMethods.Cmds.pwd private val parentOption = Option(pwd.parent) @@ -57,7 +62,6 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi ) private def goodPaths = Seq( - // Normal paths, not normalized GoodPath( @@ -72,8 +76,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "world", getFileName = "world", getNameCount = 2, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a relative path", path = "hello/world", @@ -86,8 +90,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "world", getFileName = "world", getNameCount = 2, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a path with spaces", path = "/hello/world/with spaces", @@ -100,8 +104,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "with spaces", getFileName = "with spaces", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path with encode spaces", path = "/hello/world/encoded%20spaces", @@ -114,8 +118,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "encoded%20spaces", getFileName = "encoded%20spaces", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path with non-ascii characters", path = "/hello/world/with non ascii £€", @@ -128,7 +132,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "with non ascii £€", getFileName = "with non ascii £€", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // Special paths @@ -144,8 +149,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = pwdName, getFileName = "", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a path from /", path = "/", @@ -158,8 +163,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path from .", path = ".", @@ -172,8 +177,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = pwdName, getFileName = ".", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a path from ..", path = "..", @@ -186,8 +191,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = parentName, getFileName = "..", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a path including .", path = "/hello/world/with/./dots", @@ -200,8 +205,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "dots", getFileName = "dots", getNameCount = 5, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path including ..", path = "/hello/world/with/../dots", @@ -214,7 +219,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "dots", getFileName = "dots", getNameCount = 5, - isAbsolute = true), + isAbsolute = true + ), // Normalized @@ -230,8 +236,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = pwdName, getFileName = "", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a normalized path from /", path = "/", @@ -244,8 +250,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a normalized path from .", path = ".", @@ -258,8 +264,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = pwdName, getFileName = "", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a normalized path from ..", path = "..", @@ -272,8 +278,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = parentName, getFileName = "..", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a normalized path including a .", path = "/hello/world/with/./dots", @@ -286,8 +292,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "dots", getFileName = "dots", getNameCount = 4, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a normalized path including ..", path = "/hello/world/with/../dots", @@ -300,7 +306,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "dots", getFileName = "dots", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // URI @@ -316,8 +323,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "world", getFileName = "world", getNameCount = 2, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path from a file uri with encoded spaces", path = "file:///hello/world/encoded%20spaces", @@ -330,7 +337,8 @@ class DefaultPathBuilderSpec extends Suite with AnyFlatSpecLike with Matchers wi name = "encoded%20spaces", getFileName = "encoded%20spaces", getNameCount = 3, - isAbsolute = true) + isAbsolute = true + ) ) private def badPaths = Seq( diff --git a/core/src/test/scala/cromwell/core/path/PathBuilderFactorySpec.scala b/core/src/test/scala/cromwell/core/path/PathBuilderFactorySpec.scala index 7fea4f7b9f3..7119a62b1f6 100644 --- a/core/src/test/scala/cromwell/core/path/PathBuilderFactorySpec.scala +++ b/core/src/test/scala/cromwell/core/path/PathBuilderFactorySpec.scala @@ -12,24 +12,28 @@ import scala.concurrent.{ExecutionContext, Future} class PathBuilderFactorySpec extends TestKitSuite with AnyFlatSpecLike with ScalaFutures with Matchers { behavior of "PathBuilderFactory" implicit val ec = system.dispatcher - + it should "sort factories when instantiating path builders" in { val factory1 = new MockPathBuilderFactory(ConfigFactory.empty(), ConfigFactory.parseString("name=factory1")) val factory2 = new MockPathBuilderFactory(ConfigFactory.empty(), ConfigFactory.parseString("name=factory2")) PathBuilderFactory - .instantiatePathBuilders(List(DefaultPathBuilderFactory, factory1, factory2), WorkflowOptions.empty).map({ pathBuilders => - pathBuilders.last shouldBe DefaultPathBuilder - // check that the order of the other factories has not been changed - pathBuilders.map(_.name) shouldBe List("factory1", "factory2", DefaultPathBuilder.name) - }).futureValue + .instantiatePathBuilders(List(DefaultPathBuilderFactory, factory1, factory2), WorkflowOptions.empty) + .map { pathBuilders => + pathBuilders.last shouldBe DefaultPathBuilder + // check that the order of the other factories has not been changed + pathBuilders.map(_.name) shouldBe List("factory1", "factory2", DefaultPathBuilder.name) + } + .futureValue } } -class MockPathBuilderFactory(globalConfig: Config, val instanceConfig: Config) extends cromwell.core.path.PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = Future.successful( - new PathBuilder { - override def name = instanceConfig.getString("name") - override def build(pathAsString: String) = throw new UnsupportedOperationException - } - ) +class MockPathBuilderFactory(globalConfig: Config, val instanceConfig: Config) + extends cromwell.core.path.PathBuilderFactory { + override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = + Future.successful( + new PathBuilder { + override def name = instanceConfig.getString("name") + override def build(pathAsString: String) = throw new UnsupportedOperationException + } + ) } diff --git a/core/src/test/scala/cromwell/core/path/PathBuilderSpecUtils.scala b/core/src/test/scala/cromwell/core/path/PathBuilderSpecUtils.scala index 92bfd77233f..324928d2cb4 100644 --- a/core/src/test/scala/cromwell/core/path/PathBuilderSpecUtils.scala +++ b/core/src/test/scala/cromwell/core/path/PathBuilderSpecUtils.scala @@ -17,7 +17,8 @@ case class GoodPath(description: String, name: String, getFileName: String, getNameCount: Int, - isAbsolute: Boolean) + isAbsolute: Boolean +) case class BadPath(description: String, path: String, exceptionMessage: String) @@ -29,7 +30,8 @@ trait PathBuilderSpecUtils { def truncateCommonRoots(builder: => PathBuilder, pathsToTruncate: TableFor3[String, String, String], - tag: Tag = PathBuilderSpecUtils.PathTest): Unit = { + tag: Tag = PathBuilderSpecUtils.PathTest + ): Unit = { behavior of s"PathCopier" it should "truncate common roots" taggedAs tag in { diff --git a/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala b/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala index c6a008feb02..e5d947da83d 100644 --- a/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala +++ b/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala @@ -18,11 +18,10 @@ class BackoffSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { .setInitialIntervalMillis(1.second.toMillis.toInt) .setMaxIntervalMillis(2.seconds.toMillis.toInt) .setMaxElapsedTimeMillis(Integer.MAX_VALUE) - .setRandomizationFactor(0D) + .setRandomizationFactor(0d) .build() ) - exponentialBackoff.backoffMillis shouldBe 3.seconds.toMillis exponentialBackoff.next.backoffMillis shouldBe 1.second.toMillis } @@ -33,7 +32,7 @@ class BackoffSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { .setInitialIntervalMillis(1.second.toMillis.toInt) .setMaxIntervalMillis(2.seconds.toMillis.toInt) .setMaxElapsedTimeMillis(Integer.MAX_VALUE) - .setRandomizationFactor(0D) + .setRandomizationFactor(0d) .build() ).backoffMillis shouldBe 1.second.toMillis } @@ -46,7 +45,7 @@ class BackoffSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { .setInitialIntervalMillis(1.second.toMillis.toInt) .setMaxIntervalMillis(2.seconds.toMillis.toInt) .setMaxElapsedTimeMillis(Integer.MAX_VALUE) - .setRandomizationFactor(0D) + .setRandomizationFactor(0d) .build() ) } @@ -57,16 +56,16 @@ class BackoffSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { Map[String, Any]( "min" -> "5 seconds", "max" -> "30 seconds", - "multiplier" -> 6D, - "randomization-factor" -> 0D + "multiplier" -> 6d, + "randomization-factor" -> 0d ).asJava ) val backoff = SimpleExponentialBackoff(config) backoff.googleBackoff.getCurrentIntervalMillis shouldBe 5.seconds.toMillis.toInt backoff.googleBackoff.getMaxIntervalMillis shouldBe 30.seconds.toMillis.toInt - backoff.googleBackoff.getMultiplier shouldBe 6D - backoff.googleBackoff.getRandomizationFactor shouldBe 0D + backoff.googleBackoff.getMultiplier shouldBe 6d + backoff.googleBackoff.getRandomizationFactor shouldBe 0d } } diff --git a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala index 83516f3328b..134b1c1db25 100644 --- a/core/src/test/scala/cromwell/core/retry/RetrySpec.scala +++ b/core/src/test/scala/cromwell/core/retry/RetrySpec.scala @@ -16,7 +16,7 @@ class RetrySpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Sca var counter: Int = n - def doIt(): Future[Int] = { + def doIt(): Future[Int] = if (counter == 0) Future.successful(9) else { @@ -24,23 +24,22 @@ class RetrySpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Sca val ex = if (counter <= transients) new TransientException else new IllegalArgumentException("Failed") Future.failed(ex) } - } } - implicit val defaultPatience: PatienceConfig = PatienceConfig(timeout = Span(30, Seconds), interval = Span(100, Millis)) + implicit val defaultPatience: PatienceConfig = + PatienceConfig(timeout = Span(30, Seconds), interval = Span(100, Millis)) private def runRetry(retries: Int, work: MockWork, isTransient: Throwable => Boolean = Retry.throwableToFalse, - isFatal: Throwable => Boolean = Retry.throwableToFalse): Future[Int] = { - + isFatal: Throwable => Boolean = Retry.throwableToFalse + ): Future[Int] = withRetry( f = () => work.doIt(), maxRetries = Option(retries), isTransient = isTransient, isFatal = isFatal ) - } "Retry" should "retry a function until it works" in { val work = new MockWork(2) @@ -53,7 +52,7 @@ class RetrySpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Sca it should "fail if it hits the max retry count" in { whenReady(runRetry(1, new MockWork(3)).failed) { x => - x shouldBe an [CromwellFatalException] + x shouldBe an[CromwellFatalException] } } @@ -61,18 +60,19 @@ class RetrySpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Sca val work = new MockWork(3) whenReady(runRetry(3, work, isFatal = (t: Throwable) => t.isInstanceOf[IllegalArgumentException]).failed) { x => - x shouldBe an [CromwellFatalException] + x shouldBe an[CromwellFatalException] work.counter shouldBe 2 } val work2 = new MockWork(4, 2) val retry = runRetry(4, - work2, - isFatal = (t: Throwable) => t.isInstanceOf[IllegalArgumentException], - isTransient = (t: Throwable) => t.isInstanceOf[TransientException]) + work2, + isFatal = (t: Throwable) => t.isInstanceOf[IllegalArgumentException], + isTransient = (t: Throwable) => t.isInstanceOf[TransientException] + ) whenReady(retry.failed) { x => - x shouldBe an [CromwellFatalException] + x shouldBe an[CromwellFatalException] work2.counter shouldBe 3 } } diff --git a/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala b/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala index 8db76caab1d..fb347e43441 100644 --- a/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala +++ b/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala @@ -23,110 +23,165 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc case class SimpletonConversion(name: String, womValue: WomValue, simpletons: Seq[WomValueSimpleton]) val simpletonConversions = List( SimpletonConversion("foo", WomString("none"), List(WomValueSimpleton("foo", WomString("none")))), - SimpletonConversion("bar", WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))), List(WomValueSimpleton("bar[0]", WomInteger(1)), WomValueSimpleton("bar[1]", WomInteger(2)))), + SimpletonConversion( + "bar", + WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))), + List(WomValueSimpleton("bar[0]", WomInteger(1)), WomValueSimpleton("bar[1]", WomInteger(2))) + ), SimpletonConversion("empty_array", WomArray(WomArrayType(WomIntegerType), List.empty), List()), SimpletonConversion( "baz", - WomArray(WomArrayType(WomArrayType(WomIntegerType)), List( - WomArray(WomArrayType(WomIntegerType), List(WomInteger(0), WomInteger(1))), - WomArray(WomArrayType(WomIntegerType), List(WomInteger(2), WomInteger(3))))), - List(WomValueSimpleton("baz[0][0]", WomInteger(0)), WomValueSimpleton("baz[0][1]", WomInteger(1)), WomValueSimpleton("baz[1][0]", WomInteger(2)), WomValueSimpleton("baz[1][1]", WomInteger(3))) + WomArray( + WomArrayType(WomArrayType(WomIntegerType)), + List(WomArray(WomArrayType(WomIntegerType), List(WomInteger(0), WomInteger(1))), + WomArray(WomArrayType(WomIntegerType), List(WomInteger(2), WomInteger(3))) + ) + ), + List( + WomValueSimpleton("baz[0][0]", WomInteger(0)), + WomValueSimpleton("baz[0][1]", WomInteger(1)), + WomValueSimpleton("baz[1][0]", WomInteger(2)), + WomValueSimpleton("baz[1][1]", WomInteger(3)) + ) ), SimpletonConversion( "map", - WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("foo") -> WomString("foo"), - WomString("bar") -> WomString("bar"))), + WomMap(WomMapType(WomStringType, WomStringType), + Map(WomString("foo") -> WomString("foo"), WomString("bar") -> WomString("bar")) + ), List(WomValueSimpleton("map:foo", WomString("foo")), WomValueSimpleton("map:bar", WomString("bar"))) ), SimpletonConversion( "mapOfMaps", - WomMap(WomMapType(WomStringType, WomMapType(WomStringType, WomStringType)), Map( - WomString("foo") -> WomMap(WomMapType(WomStringType, WomStringType), Map(WomString("foo2") -> WomString("foo"))), - WomString("bar") ->WomMap(WomMapType(WomStringType, WomStringType), Map(WomString("bar2") -> WomString("bar"))))), - List(WomValueSimpleton("mapOfMaps:foo:foo2", WomString("foo")), WomValueSimpleton("mapOfMaps:bar:bar2", WomString("bar"))) + WomMap( + WomMapType(WomStringType, WomMapType(WomStringType, WomStringType)), + Map( + WomString("foo") -> WomMap(WomMapType(WomStringType, WomStringType), + Map(WomString("foo2") -> WomString("foo")) + ), + WomString("bar") -> WomMap(WomMapType(WomStringType, WomStringType), + Map(WomString("bar2") -> WomString("bar")) + ) + ) + ), + List(WomValueSimpleton("mapOfMaps:foo:foo2", WomString("foo")), + WomValueSimpleton("mapOfMaps:bar:bar2", WomString("bar")) + ) ), SimpletonConversion( "simplePair1", WomPair(WomInteger(1), WomString("hello")), - List(WomValueSimpleton("simplePair1:left", WomInteger(1)), WomValueSimpleton("simplePair1:right", WomString("hello"))) + List(WomValueSimpleton("simplePair1:left", WomInteger(1)), + WomValueSimpleton("simplePair1:right", WomString("hello")) + ) ), SimpletonConversion( "simplePair2", WomPair(WomString("left"), WomInteger(5)), - List(WomValueSimpleton("simplePair2:left", WomString("left")), WomValueSimpleton("simplePair2:right", WomInteger(5))) + List(WomValueSimpleton("simplePair2:left", WomString("left")), + WomValueSimpleton("simplePair2:right", WomInteger(5)) + ) ), SimpletonConversion( "pairOfPairs", - WomPair( - WomPair(WomInteger(1), WomString("one")), - WomPair(WomString("two"), WomInteger(2))), + WomPair(WomPair(WomInteger(1), WomString("one")), WomPair(WomString("two"), WomInteger(2))), List( WomValueSimpleton("pairOfPairs:left:left", WomInteger(1)), WomValueSimpleton("pairOfPairs:left:right", WomString("one")), WomValueSimpleton("pairOfPairs:right:left", WomString("two")), - WomValueSimpleton("pairOfPairs:right:right", WomInteger(2))) + WomValueSimpleton("pairOfPairs:right:right", WomInteger(2)) + ) ), SimpletonConversion( "pairOfArrayAndMap", WomPair( WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))), - WomMap(WomMapType(WomStringType, WomIntegerType), Map(WomString("left") -> WomInteger(100), WomString("right") -> WomInteger(200)))), + WomMap(WomMapType(WomStringType, WomIntegerType), + Map(WomString("left") -> WomInteger(100), WomString("right") -> WomInteger(200)) + ) + ), List( WomValueSimpleton("pairOfArrayAndMap:left[0]", WomInteger(1)), WomValueSimpleton("pairOfArrayAndMap:left[1]", WomInteger(2)), WomValueSimpleton("pairOfArrayAndMap:right:left", WomInteger(100)), - WomValueSimpleton("pairOfArrayAndMap:right:right", WomInteger(200))) + WomValueSimpleton("pairOfArrayAndMap:right:right", WomInteger(200)) + ) ), SimpletonConversion( "mapOfArrays", - WomMap(WomMapType(WomStringType, WomArrayType(WomIntegerType)), Map( - WomString("foo") -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(0), WomInteger(1))), - WomString("bar") -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(2), WomInteger(3))))), - List(WomValueSimpleton("mapOfArrays:foo[0]", WomInteger(0)), WomValueSimpleton("mapOfArrays:foo[1]", WomInteger(1)), - WomValueSimpleton("mapOfArrays:bar[0]", WomInteger(2)), WomValueSimpleton("mapOfArrays:bar[1]", WomInteger(3))) + WomMap( + WomMapType(WomStringType, WomArrayType(WomIntegerType)), + Map( + WomString("foo") -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(0), WomInteger(1))), + WomString("bar") -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(2), WomInteger(3))) + ) + ), + List( + WomValueSimpleton("mapOfArrays:foo[0]", WomInteger(0)), + WomValueSimpleton("mapOfArrays:foo[1]", WomInteger(1)), + WomValueSimpleton("mapOfArrays:bar[0]", WomInteger(2)), + WomValueSimpleton("mapOfArrays:bar[1]", WomInteger(3)) + ) ), SimpletonConversion( "escapology", - WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("foo[1]") -> WomString("foo"), - WomString("bar[[") -> WomString("bar"), - WomString("baz:qux") -> WomString("baz:qux"))), - List(WomValueSimpleton("escapology:foo\\[1\\]", WomString("foo")), + WomMap( + WomMapType(WomStringType, WomStringType), + Map(WomString("foo[1]") -> WomString("foo"), + WomString("bar[[") -> WomString("bar"), + WomString("baz:qux") -> WomString("baz:qux") + ) + ), + List( + WomValueSimpleton("escapology:foo\\[1\\]", WomString("foo")), WomValueSimpleton("escapology:bar\\[\\[", WomString("bar")), - WomValueSimpleton("escapology:baz\\:qux", WomString("baz:qux"))) + WomValueSimpleton("escapology:baz\\:qux", WomString("baz:qux")) + ) ), SimpletonConversion( "flat_object", - WomObject(Map( - "a" -> WomString("aardvark"), - "b" -> WomInteger(25), - "c" -> WomBoolean(false) - )), - List(WomValueSimpleton("flat_object:a", WomString("aardvark")), + WomObject( + Map( + "a" -> WomString("aardvark"), + "b" -> WomInteger(25), + "c" -> WomBoolean(false) + ) + ), + List( + WomValueSimpleton("flat_object:a", WomString("aardvark")), WomValueSimpleton("flat_object:b", WomInteger(25)), - WomValueSimpleton("flat_object:c", WomBoolean(false))) + WomValueSimpleton("flat_object:c", WomBoolean(false)) + ) ), SimpletonConversion( "object_with_array", - WomObject(Map( - "a" -> WomArray(WomArrayType(WomStringType), Seq(WomString("aardvark"), WomString("beetle"))) - )), + WomObject( + Map( + "a" -> WomArray(WomArrayType(WomStringType), Seq(WomString("aardvark"), WomString("beetle"))) + ) + ), List(WomValueSimpleton("object_with_array:a[0]", WomString("aardvark")), - WomValueSimpleton("object_with_array:a[1]", WomString("beetle"))) + WomValueSimpleton("object_with_array:a[1]", WomString("beetle")) + ) ), SimpletonConversion( "object_with_object", - WomObject(Map( - "a" -> WomObject(Map( - "aa" -> WomArray(WomArrayType(WomStringType), Seq(WomString("aardvark"), WomString("aaron"))), - "ab" -> WomArray(WomArrayType(WomStringType), Seq(WomString("abacus"), WomString("a bee"))) - )), - "b" -> WomObject(Map( - "ba" -> WomArray(WomArrayType(WomStringType), Seq(WomString("baa"), WomString("battle"))), - "bb" -> WomArray(WomArrayType(WomStringType), Seq(WomString("bbrrrr"), WomString("bb gun"))) - )) - )), + WomObject( + Map( + "a" -> WomObject( + Map( + "aa" -> WomArray(WomArrayType(WomStringType), Seq(WomString("aardvark"), WomString("aaron"))), + "ab" -> WomArray(WomArrayType(WomStringType), Seq(WomString("abacus"), WomString("a bee"))) + ) + ), + "b" -> WomObject( + Map( + "ba" -> WomArray(WomArrayType(WomStringType), Seq(WomString("baa"), WomString("battle"))), + "bb" -> WomArray(WomArrayType(WomStringType), Seq(WomString("bbrrrr"), WomString("bb gun"))) + ) + ) + ) + ), List( WomValueSimpleton("object_with_object:a:aa[0]", WomString("aardvark")), WomValueSimpleton("object_with_object:a:aa[1]", WomString("aaron")), @@ -135,58 +190,59 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc WomValueSimpleton("object_with_object:b:ba[0]", WomString("baa")), WomValueSimpleton("object_with_object:b:ba[1]", WomString("battle")), WomValueSimpleton("object_with_object:b:bb[0]", WomString("bbrrrr")), - WomValueSimpleton("object_with_object:b:bb[1]", WomString("bb gun")), + WomValueSimpleton("object_with_object:b:bb[1]", WomString("bb gun")) ) ), /* - * Wom object representing a directory listing - * - a single file - * - a "maybe populated file" with some properties (checksum etc..) and secondary files: - * - another single file - * - a directory listing a single file - * - an unlisted directory - * - a glob file - * - an unlisted directory - * - a glob file - * - * Note: glob files technically are never simpletonized but as WomFiles they *can* be + * Wom object representing a directory listing + * - a single file + * - a "maybe populated file" with some properties (checksum etc..) and secondary files: + * - another single file + * - a directory listing a single file + * - an unlisted directory + * - a glob file + * - an unlisted directory + * - a glob file + * + * Note: glob files technically are never simpletonized but as WomFiles they *can* be */ SimpletonConversion( "directory", WomMaybeListedDirectory( Option("outerValueName"), - Option(List( - WomSingleFile("outerSingleFile"), - WomMaybeListedDirectory(Option("innerValueName"), Option(List(WomSingleFile("innerSingleFile")))), - WomMaybePopulatedFile( - Option("populatedInnerValueName"), - Option("innerChecksum"), - Option(10L), - Option("innerFormat"), - Option("innerContents"), - List( - WomSingleFile("populatedInnerSingleFile"), - WomMaybeListedDirectory(Option("innerDirectoryValueName"), Option(List(WomSingleFile("innerDirectorySingleFile")))), - WomUnlistedDirectory("innerUnlistedDirectory"), - WomGlobFile("innerGlobFile") - ) - ), - WomUnlistedDirectory("outerUnlistedDirectory"), - WomGlobFile("outerGlobFile") - ))), + Option( + List( + WomSingleFile("outerSingleFile"), + WomMaybeListedDirectory(Option("innerValueName"), Option(List(WomSingleFile("innerSingleFile")))), + WomMaybePopulatedFile( + Option("populatedInnerValueName"), + Option("innerChecksum"), + Option(10L), + Option("innerFormat"), + Option("innerContents"), + List( + WomSingleFile("populatedInnerSingleFile"), + WomMaybeListedDirectory(Option("innerDirectoryValueName"), + Option(List(WomSingleFile("innerDirectorySingleFile"))) + ), + WomUnlistedDirectory("innerUnlistedDirectory"), + WomGlobFile("innerGlobFile") + ) + ), + WomUnlistedDirectory("outerUnlistedDirectory"), + WomGlobFile("outerGlobFile") + ) + ) + ), List( WomValueSimpleton("directory:class", WomString("Directory")), WomValueSimpleton("directory:value", WomString("outerValueName")), - WomValueSimpleton("directory:listing[0]", WomSingleFile("outerSingleFile")), - WomValueSimpleton("directory:listing[1]:class", WomString("Directory")), WomValueSimpleton("directory:listing[1]:value", WomString("innerValueName")), WomValueSimpleton("directory:listing[1]:listing[0]", WomSingleFile("innerSingleFile")), - WomValueSimpleton("directory:listing[2]:class", WomString("File")), WomValueSimpleton("directory:listing[2]:value", WomString("populatedInnerValueName")), - WomValueSimpleton("directory:listing[2]:checksum", WomString("innerChecksum")), WomValueSimpleton("directory:listing[2]:size", WomInteger(10)), WomValueSimpleton("directory:listing[2]:format", WomString("innerFormat")), @@ -194,10 +250,11 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc WomValueSimpleton("directory:listing[2]:secondaryFiles[0]", WomSingleFile("populatedInnerSingleFile")), WomValueSimpleton("directory:listing[2]:secondaryFiles[1]:class", WomString("Directory")), WomValueSimpleton("directory:listing[2]:secondaryFiles[1]:value", WomString("innerDirectoryValueName")), - WomValueSimpleton("directory:listing[2]:secondaryFiles[1]:listing[0]", WomSingleFile("innerDirectorySingleFile")), + WomValueSimpleton("directory:listing[2]:secondaryFiles[1]:listing[0]", + WomSingleFile("innerDirectorySingleFile") + ), WomValueSimpleton("directory:listing[2]:secondaryFiles[2]", WomUnlistedDirectory("innerUnlistedDirectory")), WomValueSimpleton("directory:listing[2]:secondaryFiles[3]", WomGlobFile("innerGlobFile")), - WomValueSimpleton("directory:listing[3]", WomUnlistedDirectory("outerUnlistedDirectory")), WomValueSimpleton("directory:listing[4]", WomGlobFile("outerGlobFile")) ) @@ -225,7 +282,9 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "round trip everything together with no losses" in { - val wdlValues = (simpletonConversions map { case SimpletonConversion(name, womValue, _) => WomMocks.mockOutputPort(name, womValue.womType) -> womValue }).toMap + val wdlValues = (simpletonConversions map { case SimpletonConversion(name, womValue, _) => + WomMocks.mockOutputPort(name, womValue.womType) -> womValue + }).toMap val allSimpletons = simpletonConversions flatMap { case SimpletonConversion(_, _, simpletons) => simpletons } val actualSimpletons = wdlValues.simplify @@ -239,17 +298,23 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc // coerceable back into the original type: it should "decompose then reconstruct a map in an object into a coerceable value" in { - val aMap = WomMap(WomMapType(WomStringType, WomArrayType(WomStringType)), Map( - WomString("aa") -> WomArray(WomArrayType(WomStringType), Seq(WomString("aardvark"), WomString("aaron"))), - WomString("ab") -> WomArray(WomArrayType(WomStringType), Seq(WomString("abacus"), WomString("a bee"))) - )) + val aMap = WomMap( + WomMapType(WomStringType, WomArrayType(WomStringType)), + Map( + WomString("aa") -> WomArray(WomArrayType(WomStringType), Seq(WomString("aardvark"), WomString("aaron"))), + WomString("ab") -> WomArray(WomArrayType(WomStringType), Seq(WomString("abacus"), WomString("a bee"))) + ) + ) - val bMap = WomMap(WomMapType(WomStringType, WomArrayType(WomStringType)), Map( - WomString("ba") -> WomArray(WomArrayType(WomStringType), Seq(WomString("baa"), WomString("battle"))), - WomString("bb") -> WomArray(WomArrayType(WomStringType), Seq(WomString("bbrrrr"), WomString("bb gun"))) - )) + val bMap = WomMap( + WomMapType(WomStringType, WomArrayType(WomStringType)), + Map( + WomString("ba") -> WomArray(WomArrayType(WomStringType), Seq(WomString("baa"), WomString("battle"))), + WomString("bb") -> WomArray(WomArrayType(WomStringType), Seq(WomString("bbrrrr"), WomString("bb gun"))) + ) + ) - val initial = WomObject(Map("a" -> aMap, "b" -> bMap )) + val initial = WomObject(Map("a" -> aMap, "b" -> bMap)) val map = Map(WomMocks.mockOutputPort("map_in_object") -> initial) @@ -263,9 +328,10 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc WomValueSimpleton("map_in_object:b:ba[0]", WomString("baa")), WomValueSimpleton("map_in_object:b:ba[1]", WomString("battle")), WomValueSimpleton("map_in_object:b:bb[0]", WomString("bbrrrr")), - WomValueSimpleton("map_in_object:b:bb[1]", WomString("bb gun")), + WomValueSimpleton("map_in_object:b:bb[1]", WomString("bb gun")) ), - actualSimpletons) + actualSimpletons + ) // Reconstruct: val outputPort = WomMocks.mockOutputPort(OutputDefinition("map_in_object", initial.womType, IgnoredExpression)) @@ -283,7 +349,8 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc } private def assertSimpletonsEqual(expectedSimpletons: Iterable[WomValueSimpleton], - actualSimpletons: Iterable[WomValueSimpleton]): Unit = { + actualSimpletons: Iterable[WomValueSimpleton] + ): Unit = { // Sanity check, make sure we don't lose anything when we "toSet": actualSimpletons.toSet should contain theSameElementsAs actualSimpletons diff --git a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala index ef1183e9a07..c237ee8c127 100644 --- a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala +++ b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala @@ -39,7 +39,9 @@ object AkkaTestUtil { class DeathTestActor extends Actor { protected def stoppingReceive: Actor.Receive = { case InternalStop => context.stop(self) - case ThrowException => throw new Exception("Don't panic, dear debugger! This was a deliberate exception for the test case.") with NoStackTrace + case ThrowException => + throw new Exception("Don't panic, dear debugger! This was a deliberate exception for the test case.") + with NoStackTrace } override def receive = stoppingReceive orElse Actor.ignoringBehavior } @@ -55,6 +57,6 @@ object AkkaTestUtil { def loggedReceive: Receive - override final def receive: Receive = logMessage orElse loggedReceive + final override def receive: Receive = logMessage orElse loggedReceive } } diff --git a/core/src/test/scala/cromwell/util/EncryptionSpec.scala b/core/src/test/scala/cromwell/util/EncryptionSpec.scala index d1d64f3725d..f9b91107c96 100644 --- a/core/src/test/scala/cromwell/util/EncryptionSpec.scala +++ b/core/src/test/scala/cromwell/util/EncryptionSpec.scala @@ -14,7 +14,8 @@ object EncryptionSpec { """| |Did you install the Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files? |http://www.oracle.com/technetwork/java/javase/downloads/jce8-download-2133166.html - |""".stripMargin) + |""".stripMargin + ) } class EncryptionSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { diff --git a/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala b/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala index 875ffd48e3d..214ca6c8454 100644 --- a/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala +++ b/core/src/test/scala/cromwell/util/GracefulShutdownHelperSpec.scala @@ -10,17 +10,17 @@ import org.scalatest.matchers.should.Matchers class GracefulShutdownHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { behavior of "GracefulShutdownHelper" - + it should "send ShutdownCommand to actors, wait for them to shutdown, then shut itself down" in { val testProbeA = TestProbe() val testProbeB = TestProbe() - + val testActor = system.actorOf(Props(new Actor with GracefulShutdownHelper with ActorLogging { - override def receive: Receive = { - case ShutdownCommand => waitForActorsAndShutdown(NonEmptyList.of(testProbeA.ref, testProbeB.ref)) + override def receive: Receive = { case ShutdownCommand => + waitForActorsAndShutdown(NonEmptyList.of(testProbeA.ref, testProbeB.ref)) } })) - + watch(testActor) testActor ! ShutdownCommand @@ -37,7 +37,7 @@ class GracefulShutdownHelperSpec extends TestKitSuite with AnyFlatSpecLike with expectNoMessage() system stop testProbeB.ref - + expectTerminated(testActor) } } diff --git a/core/src/test/scala/cromwell/util/SampleWdl.scala b/core/src/test/scala/cromwell/util/SampleWdl.scala index 6e790ca67be..8180049fc73 100644 --- a/core/src/test/scala/cromwell/util/SampleWdl.scala +++ b/core/src/test/scala/cromwell/util/SampleWdl.scala @@ -2,7 +2,12 @@ package cromwell.util import java.util.UUID import cromwell.core.path.{DefaultPath, DefaultPathBuilder, Path} -import cromwell.core.{WorkflowOptions, WorkflowSourceFilesCollection, WorkflowSourceFilesWithDependenciesZip, WorkflowSourceFilesWithoutImports} +import cromwell.core.{ + WorkflowOptions, + WorkflowSourceFilesCollection, + WorkflowSourceFilesWithDependenciesZip, + WorkflowSourceFilesWithoutImports +} import spray.json._ import wom.core.{ExecutableInputMap, WorkflowJson, WorkflowSource} import wom.values._ @@ -30,7 +35,8 @@ trait SampleWdl extends TestFileUtil { labels: String = "{}", workflowType: Option[String] = Option("WDL"), workflowTypeVersion: Option[String] = None, - workflowOnHold: Boolean = false): WorkflowSourceFilesCollection = { + workflowOnHold: Boolean = false + ): WorkflowSourceFilesCollection = importsZip match { case Some(zip) => WorkflowSourceFilesWithDependenciesZip( @@ -45,7 +51,8 @@ trait SampleWdl extends TestFileUtil { warnings = Vector.empty, workflowOnHold = workflowOnHold, importsZip = zip, - requestedWorkflowId = None) + requestedWorkflowId = None + ) case None => WorkflowSourceFilesWithoutImports( workflowSource = Option(workflowSource(runtime)), @@ -58,9 +65,9 @@ trait SampleWdl extends TestFileUtil { workflowTypeVersion = workflowTypeVersion, warnings = Vector.empty, workflowOnHold = workflowOnHold, - requestedWorkflowId = None) + requestedWorkflowId = None + ) } - } val rawInputs: ExecutableInputMap @@ -84,8 +91,8 @@ trait SampleWdl extends TestFileUtil { def write(x: Any): JsValue = x match { case n: Int => JsNumber(n) case s: String => JsString(s) - case b: Boolean => if(b) JsTrue else JsFalse - case s: Seq[Any] => JsArray(s map {_.toJson} toVector) + case b: Boolean => if (b) JsTrue else JsFalse + case s: Seq[Any] => JsArray(s map { _.toJson } toVector) case a: WomArray => write(a.value) case s: WomString => JsString(s.value) case i: WomInteger => JsNumber(i.value) @@ -111,20 +118,20 @@ object SampleWdl { object HelloWorld extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s""" - |task hello { - | String addressee - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - |} + |task hello { + | String addressee + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + |} """.stripMargin.replace("RUNTIME", runtime) val Addressee = "wf_hello.hello.addressee" @@ -157,35 +164,35 @@ object SampleWdl { object EmptyString extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s""" - |task hello { - | command { - | echo "Hello!" - | } - | output { - | String empty = "" - | } - | RUNTIME - |} - | - |task goodbye { - | String emptyInputString - | command { - | echo "$${emptyInputString}" - | } - | output { - | String empty = read_string(stdout()) - | } - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - | call goodbye {input: emptyInputString=hello.empty } - | output { - | hello.empty - | goodbye.empty - | } - |} + |task hello { + | command { + | echo "Hello!" + | } + | output { + | String empty = "" + | } + | RUNTIME + |} + | + |task goodbye { + | String emptyInputString + | command { + | echo "$${emptyInputString}" + | } + | output { + | String empty = read_string(stdout()) + | } + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + | call goodbye {input: emptyInputString=hello.empty } + | output { + | hello.empty + | goodbye.empty + | } + |} """.stripMargin.replace("RUNTIME", runtime) val rawInputs = Map.empty[String, Any] @@ -196,32 +203,31 @@ object SampleWdl { } object CoercionNotDefined extends SampleWdl { - override def workflowSource(runtime: String = ""): WorkflowSource = { + override def workflowSource(runtime: String = ""): WorkflowSource = s""" - |task summary { - | String bfile - | command { - | ~/plink --bfile $${bfile} --missing --hardy --out foo --allow-no-sex - | } - | output { - | File hwe = "foo.hwe" - | File log = "foo.log" - | File imiss = "foo.imiss" - | File lmiss = "foo.lmiss" - | } - | meta { - | author: "Jackie Goldstein" - | email: "jigold@broadinstitute.org" - | } - |} - | - |workflow test1 { - | call summary { - | input: bfile = bfile - | } - |} + |task summary { + | String bfile + | command { + | ~/plink --bfile $${bfile} --missing --hardy --out foo --allow-no-sex + | } + | output { + | File hwe = "foo.hwe" + | File log = "foo.log" + | File imiss = "foo.imiss" + | File lmiss = "foo.lmiss" + | } + | meta { + | author: "Jackie Goldstein" + | email: "jigold@broadinstitute.org" + | } + |} + | + |workflow test1 { + | call summary { + | input: bfile = bfile + | } + |} """.stripMargin - } override val rawInputs: ExecutableInputMap = Map("test1.bfile" -> "data/example1") } @@ -281,67 +287,66 @@ object SampleWdl { withPlaceholders.stripMargin.replace(outputSectionPlaceholder, outputsSection) } - val PatternKey ="three_step.cgrep.pattern" + val PatternKey = "three_step.cgrep.pattern" override lazy val rawInputs = Map(PatternKey -> "...") } object ThreeStep extends ThreeStepTemplate object ThreeStepWithOutputsSection extends ThreeStepTemplate { - override def workflowSource(runtime: String = ""): WorkflowJson = sourceString(outputsSection = - """ - |output { - | cgrep.count - | wc.count - |} + override def workflowSource(runtime: String = ""): WorkflowJson = sourceString(outputsSection = """ + |output { + | cgrep.count + | wc.count + |} """.stripMargin).replaceAll("RUNTIME", runtime) } object DeclarationsWorkflow extends SampleWdl { override def workflowSource(runtime: String): WorkflowSource = s""" - |task cat { - | File file - | String? flags - | String? flags2 # This should be a workflow input - | command { - | cat $${flags} $${flags2} $${file} - | } - | output { - | File procs = stdout() - | } - |} - | - |task cgrep { - | String str_decl - | String pattern - | File in_file - | command { - | grep '$${pattern}' $${in_file} | wc -l - | } - | output { - | Int count = read_int(stdout()) - | String str = str_decl - | } - |} - | - |workflow two_step { - | String flags_suffix - | String flags = "-" + flags_suffix - | String static_string = "foobarbaz" - | call cat { - | input: flags = flags - | } - | call cgrep { - | input: in_file = cat.procs - | } - |} + |task cat { + | File file + | String? flags + | String? flags2 # This should be a workflow input + | command { + | cat $${flags} $${flags2} $${file} + | } + | output { + | File procs = stdout() + | } + |} + | + |task cgrep { + | String str_decl + | String pattern + | File in_file + | command { + | grep '$${pattern}' $${in_file} | wc -l + | } + | output { + | Int count = read_int(stdout()) + | String str = str_decl + | } + |} + | + |workflow two_step { + | String flags_suffix + | String flags = "-" + flags_suffix + | String static_string = "foobarbaz" + | call cat { + | input: flags = flags + | } + | call cgrep { + | input: in_file = cat.procs + | } + |} """.stripMargin private val fileContents = s"""first line - |second line - |third line + |second line + |third line """.stripMargin override val rawInputs: ExecutableInputMap = Map( @@ -446,94 +451,94 @@ object SampleWdl { object ArrayIO extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s""" - |task serialize { - | Array[String] strs - | command { - | cat $${write_lines(strs)} - | } - | output { - | String contents = read_string(stdout()) - | } - | RUNTIME - |} - | - |workflow wf { - | Array[String] strings = ["str1", "str2", "str3"] - | call serialize { - | input: strs = strings - | } - |} + |task serialize { + | Array[String] strs + | command { + | cat $${write_lines(strs)} + | } + | output { + | String contents = read_string(stdout()) + | } + | RUNTIME + |} + | + |workflow wf { + | Array[String] strings = ["str1", "str2", "str3"] + | call serialize { + | input: strs = strings + | } + |} """.stripMargin.replace("RUNTIME", runtime) override val rawInputs: Map[String, Any] = Map.empty } class ScatterWdl extends SampleWdl { val tasks: String = s"""task A { - | command { - | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nruchi" - | } - | RUNTIME - | output { - | Array[String] A_out = read_lines(stdout()) - | } - |} - | - |task B { - | String B_in - | command { - | python -c "print(len('$${B_in}'))" - | } - | RUNTIME - | output { - | Int B_out = read_int(stdout()) - | } - |} - | - |task C { - | Int C_in - | command { - | python -c "print($${C_in}*100)" - | } - | RUNTIME - | output { - | Int C_out = read_int(stdout()) - | } - |} - | - |task D { - | Array[Int] D_in - | command { - | python -c "print($${sep = '+' D_in})" - | } - | RUNTIME - | output { - | Int D_out = read_int(stdout()) - | } - |} - | - |task E { - | command { - | python -c "print(9)" - | } - | RUNTIME - | output { - | Int E_out = read_int(stdout()) - | } - |} + | command { + | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nruchi" + | } + | RUNTIME + | output { + | Array[String] A_out = read_lines(stdout()) + | } + |} + | + |task B { + | String B_in + | command { + | python -c "print(len('$${B_in}'))" + | } + | RUNTIME + | output { + | Int B_out = read_int(stdout()) + | } + |} + | + |task C { + | Int C_in + | command { + | python -c "print($${C_in}*100)" + | } + | RUNTIME + | output { + | Int C_out = read_int(stdout()) + | } + |} + | + |task D { + | Array[Int] D_in + | command { + | python -c "print($${sep = '+' D_in})" + | } + | RUNTIME + | output { + | Int D_out = read_int(stdout()) + | } + |} + | + |task E { + | command { + | python -c "print(9)" + | } + | RUNTIME + | output { + | Int E_out = read_int(stdout()) + | } + |} """.stripMargin override def workflowSource(runtime: String = ""): WorkflowSource = s"""$tasks - | - |workflow w { - | call A - | scatter (item in A.A_out) { - | call B {input: B_in = item} - | call C {input: C_in = B.B_out} - | call E - | } - | call D {input: D_in = B.B_out} - |} + | + |workflow w { + | call A + | scatter (item in A.A_out) { + | call B {input: B_in = item} + | call C {input: C_in = B.B_out} + | call E + | } + | call D {input: D_in = B.B_out} + |} """.stripMargin.replace("RUNTIME", runtime) override lazy val rawInputs = Map.empty[String, String] @@ -542,21 +547,21 @@ object SampleWdl { object SimpleScatterWdl extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s"""task echo_int { - | Int int - | command {echo $${int}} - | output {Int out = read_int(stdout())} - | RUNTIME_PLACEHOLDER - |} - | - |workflow scatter0 { - | Array[Int] ints = [1,2,3,4,5] - | call echo_int as outside_scatter {input: int = 8000} - | scatter(i in ints) { - | call echo_int as inside_scatter { - | input: int = i - | } - | } - |} + | Int int + | command {echo $${int}} + | output {Int out = read_int(stdout())} + | RUNTIME_PLACEHOLDER + |} + | + |workflow scatter0 { + | Array[Int] ints = [1,2,3,4,5] + | call echo_int as outside_scatter {input: int = 8000} + | scatter(i in ints) { + | call echo_int as inside_scatter { + | input: int = i + | } + | } + |} """.stripMargin.replace("RUNTIME_PLACEHOLDER", runtime) override lazy val rawInputs = Map.empty[String, String] @@ -565,108 +570,108 @@ object SampleWdl { object SimpleScatterWdlWithOutputs extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s"""task echo_int { - | Int int - | command {echo $${int}} - | output {Int out = read_int(stdout())} - |} - | - |workflow scatter0 { - | Array[Int] ints = [1,2,3,4,5] - | call echo_int as outside_scatter {input: int = 8000} - | scatter(i in ints) { - | call echo_int as inside_scatter { - | input: int = i - | } - | } - | output { - | inside_scatter.* - | } - |} + | Int int + | command {echo $${int}} + | output {Int out = read_int(stdout())} + |} + | + |workflow scatter0 { + | Array[Int] ints = [1,2,3,4,5] + | call echo_int as outside_scatter {input: int = 8000} + | scatter(i in ints) { + | call echo_int as inside_scatter { + | input: int = i + | } + | } + | output { + | inside_scatter.* + | } + |} """.stripMargin override lazy val rawInputs = Map.empty[String, String] } case class PrepareScatterGatherWdl(salt: String = UUID.randomUUID().toString) extends SampleWdl { - override def workflowSource(runtime: String = ""): WorkflowSource = { + override def workflowSource(runtime: String = ""): WorkflowSource = s""" - |# - |# Goal here is to split up the input file into files of 1 line each (in the prepare) then in parallel call wc -w on each newly created file and count the words into another file then in the gather, sum the results of each parallel call to come up with - |# the word-count for the fil - |# - |# splits each line into a file with the name temp_?? (shuffle) - |task do_prepare { - | File input_file - | command { - | split -l 1 $${input_file} temp_ && ls -1 temp_?? > files.list - | } - | output { - | Array[File] split_files = read_lines("files.list") - | } - | RUNTIME - |} - |# count the number of words in the input file, writing the count to an output file overkill in this case, but simulates a real scatter-gather that would just return an Int (map) - |task do_scatter { - | String salt - | File input_file - | command { - | # $${salt} - | wc -w $${input_file} > output.txt - | } - | output { - | File count_file = "output.txt" - | } - | RUNTIME - |} - |# aggregate the results back together (reduce) - |task do_gather { - | Array[File] input_files - | command <<< - | cat $${sep = ' ' input_files} | awk '{s+=$$1} END {print s}' - | >>> - | output { - | Int sum = read_int(stdout()) - | } - | RUNTIME - |} - |workflow sc_test { - | call do_prepare - | scatter(f in do_prepare.split_files) { - | call do_scatter { - | input: input_file = f - | } - | } - | call do_gather { - | input: input_files = do_scatter.count_file - | } - |} + |# + |# Goal here is to split up the input file into files of 1 line each (in the prepare) then in parallel call wc -w on each newly created file and count the words into another file then in the gather, sum the results of each parallel call to come up with + |# the word-count for the fil + |# + |# splits each line into a file with the name temp_?? (shuffle) + |task do_prepare { + | File input_file + | command { + | split -l 1 $${input_file} temp_ && ls -1 temp_?? > files.list + | } + | output { + | Array[File] split_files = read_lines("files.list") + | } + | RUNTIME + |} + |# count the number of words in the input file, writing the count to an output file overkill in this case, but simulates a real scatter-gather that would just return an Int (map) + |task do_scatter { + | String salt + | File input_file + | command { + | # $${salt} + | wc -w $${input_file} > output.txt + | } + | output { + | File count_file = "output.txt" + | } + | RUNTIME + |} + |# aggregate the results back together (reduce) + |task do_gather { + | Array[File] input_files + | command <<< + | cat $${sep = ' ' input_files} | awk '{s+=$$1} END {print s}' + | >>> + | output { + | Int sum = read_int(stdout()) + | } + | RUNTIME + |} + |workflow sc_test { + | call do_prepare + | scatter(f in do_prepare.split_files) { + | call do_scatter { + | input: input_file = f + | } + | } + | call do_gather { + | input: input_files = do_scatter.count_file + | } + |} """.stripMargin.replace("RUNTIME", runtime) - } val contents: String = - """|the - |total number - |of words in this - |text file is 11 - |""".stripMargin + """|the + |total number + |of words in this + |text file is 11 + |""".stripMargin override lazy val rawInputs = Map( "sc_test.do_prepare.input_file" -> createCannedFile("scatter", contents).pathAsString, - "sc_test.do_scatter.salt" -> salt) + "sc_test.do_scatter.salt" -> salt + ) } object FileClobber extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s"""task read_line { - | File in - | command { cat $${in} } - | output { String out = read_string(stdout()) } - |} - | - |workflow two { - | call read_line as x - | call read_line as y - |} + | File in + | command { cat $${in} } + | output { String out = read_string(stdout()) } + |} + | + |workflow two { + | call read_line as x + | call read_line as y + |} """.stripMargin val tempDir1: DefaultPath = DefaultPathBuilder.createTempDirectory("FileClobber1") @@ -683,26 +688,26 @@ object SampleWdl { object FilePassingWorkflow extends SampleWdl { override def workflowSource(runtime: String): WorkflowSource = s"""task a { - | File in - | String out_name = "out" - | - | command { - | cat $${in} > $${out_name} - | } - | RUNTIME - | output { - | File out = "out" - | File out_interpolation = "$${out_name}" - | String contents = read_string("$${out_name}") - | } - |} - | - |workflow file_passing { - | File f - | - | call a {input: in = f} - | call a as b {input: in = a.out} - |} + | File in + | String out_name = "out" + | + | command { + | cat $${in} > $${out_name} + | } + | RUNTIME + | output { + | File out = "out" + | File out_interpolation = "$${out_name}" + | String contents = read_string("$${out_name}") + | } + |} + | + |workflow file_passing { + | File f + | + | call a {input: in = f} + | call a as b {input: in = a.out} + |} """.stripMargin.replace("RUNTIME", runtime) private val fileContents = s"foo bar baz" @@ -723,30 +728,30 @@ object SampleWdl { case class CallCachingWorkflow(salt: String) extends SampleWdl { override def workflowSource(runtime: String): WorkflowSource = s"""task a { - | File in - | String out_name = "out" - | String salt - | - | command { - | # $${salt} - | echo "Something" - | cat $${in} > $${out_name} - | } - | RUNTIME - | output { - | File out = "out" - | File out_interpolation = "$${out_name}" - | String contents = read_string("$${out_name}") - | Array[String] stdoutContent = read_lines(stdout()) - | } - |} - | - |workflow file_passing { - | File f - | - | call a {input: in = f} - | call a as b {input: in = a.out} - |} + | File in + | String out_name = "out" + | String salt + | + | command { + | # $${salt} + | echo "Something" + | cat $${in} > $${out_name} + | } + | RUNTIME + | output { + | File out = "out" + | File out_interpolation = "$${out_name}" + | String contents = read_string("$${out_name}") + | Array[String] stdoutContent = read_lines(stdout()) + | } + |} + | + |workflow file_passing { + | File f + | + | call a {input: in = f} + | call a as b {input: in = a.out} + |} """.stripMargin.replace("RUNTIME", runtime) private val fileContents = s"foo bar baz" @@ -761,29 +766,29 @@ object SampleWdl { object CallCachingHashingWdl extends SampleWdl { override def workflowSource(runtime: String): WorkflowSource = s"""task t { - | Int a - | Float b - | String c - | File d - | - | command { - | echo "$${a}" > a - | echo "$${b}" > b - | echo "$${c}" > c - | cat $${d} > d - | } - | output { - | Int w = read_int("a") + 2 - | Float x = read_float("b") - | String y = read_string("c") - | File z = "d" - | } - | RUNTIME - |} - | - |workflow w { - | call t - |} + | Int a + | Float b + | String c + | File d + | + | command { + | echo "$${a}" > a + | echo "$${b}" > b + | echo "$${c}" > c + | cat $${d} > d + | } + | output { + | Int w = read_int("a") + 2 + | Float x = read_float("b") + | String y = read_string("c") + | File z = "d" + | } + | RUNTIME + |} + | + |workflow w { + | call t + |} """.stripMargin.replace("RUNTIME", runtime) val tempDir: DefaultPath = DefaultPathBuilder.createTempDirectory("CallCachingHashingWdl") @@ -799,26 +804,26 @@ object SampleWdl { object ExpressionsInInputs extends SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s"""task echo { - | String inString - | command { - | echo $${inString} - | } - | - | output { - | String outString = read_string(stdout()) - | } - |} - | - |workflow wf { - | String a1 - | String a2 - | call echo { - | input: inString = a1 + " " + a2 - | } - | call echo as echo2 { - | input: inString = a1 + " " + echo.outString + " " + a2 - | } - |} + | String inString + | command { + | echo $${inString} + | } + | + | output { + | String outString = read_string(stdout()) + | } + |} + | + |workflow wf { + | String a1 + | String a2 + | call echo { + | input: inString = a1 + " " + a2 + | } + | call echo as echo2 { + | input: inString = a1 + " " + echo.outString + " " + a2 + | } + |} """.stripMargin override val rawInputs = Map( "wf.a1" -> WomString("hello"), @@ -830,61 +835,61 @@ object SampleWdl { override def workflowSource(runtime: String = ""): WorkflowSource = s""" task shouldCompleteFast { - | Int a - | command { - | echo "The number was: $${a}" - | } - | output { - | Int echo = a - | } - |} - | - |task shouldCompleteSlow { - | Int a - | command { - | echo "The number was: $${a}" - | # More than 1 so this should finish second - | sleep 2 - | } - | output { - | Int echo = a - | } - |} - | - |task failMeSlowly { - | Int a - | command { - | echo "The number was: $${a}" - | # Less than 2 so this should finish first - | sleep 1 - | ./NOOOOOO - | } - | output { - | Int echo = a - | } - |} - | - |task shouldNeverRun { - | Int a - | Int b - | command { - | echo "You can't fight in here - this is the war room $${a + b}" - | } - | output { - | Int echo = a - | } - |} - | - |workflow wf { - | call shouldCompleteFast as A { input: a = 5 } - | call shouldCompleteFast as B { input: a = 5 } - | - | call failMeSlowly as ohNOOOOOOOO { input: a = A.echo } - | call shouldCompleteSlow as C { input: a = B.echo } - | - | call shouldNeverRun as D { input: a = ohNOOOOOOOO.echo, b = C.echo } - | call shouldCompleteSlow as E { input: a = C.echo } - |} + | Int a + | command { + | echo "The number was: $${a}" + | } + | output { + | Int echo = a + | } + |} + | + |task shouldCompleteSlow { + | Int a + | command { + | echo "The number was: $${a}" + | # More than 1 so this should finish second + | sleep 2 + | } + | output { + | Int echo = a + | } + |} + | + |task failMeSlowly { + | Int a + | command { + | echo "The number was: $${a}" + | # Less than 2 so this should finish first + | sleep 1 + | ./NOOOOOO + | } + | output { + | Int echo = a + | } + |} + | + |task shouldNeverRun { + | Int a + | Int b + | command { + | echo "You can't fight in here - this is the war room $${a + b}" + | } + | output { + | Int echo = a + | } + |} + | + |workflow wf { + | call shouldCompleteFast as A { input: a = 5 } + | call shouldCompleteFast as B { input: a = 5 } + | + | call failMeSlowly as ohNOOOOOOOO { input: a = A.echo } + | call shouldCompleteSlow as C { input: a = B.echo } + | + | call shouldNeverRun as D { input: a = ohNOOOOOOOO.echo, b = C.echo } + | call shouldCompleteSlow as E { input: a = C.echo } + |} """.stripMargin val rawInputs = Map( diff --git a/core/src/test/scala/cromwell/util/TestFileUtil.scala b/core/src/test/scala/cromwell/util/TestFileUtil.scala index dbdad8a47ed..f13fb57891b 100644 --- a/core/src/test/scala/cromwell/util/TestFileUtil.scala +++ b/core/src/test/scala/cromwell/util/TestFileUtil.scala @@ -17,9 +17,8 @@ trait TestFileUtil { tempFile.write(contents) } - def createFile(name: String, dir: Path, contents: String): Path = { + def createFile(name: String, dir: Path, contents: String): Path = dir.createPermissionedDirectories()./(name).write(contents) - } } trait HashUtil extends TestFileUtil { @@ -36,6 +35,7 @@ trait HashUtil extends TestFileUtil { object ErrorOrUtil { implicit class EnhancedErrorOr[A](val value: ErrorOr[A]) extends AnyVal { + /** Extract a value from an `ErrorOr` box if the box is `Valid`, throw an exception if the box is `Invalid`. * For test code only. */ def get: A = value match { diff --git a/core/src/test/scala/cromwell/util/TryWithResourceSpec.scala b/core/src/test/scala/cromwell/util/TryWithResourceSpec.scala index 0764bdd9826..b15cdcebec7 100644 --- a/core/src/test/scala/cromwell/util/TryWithResourceSpec.scala +++ b/core/src/test/scala/cromwell/util/TryWithResourceSpec.scala @@ -11,31 +11,34 @@ class TryWithResourceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc behavior of "tryWithResource" it should "catch instantiation errors" in { - val triedMyBest = tryWithResource(() => if (1 == 1) throw InstantiationException else null) { _ => 5 } + val triedMyBest = tryWithResource(() => if (1 == 1) throw InstantiationException else null)(_ => 5) triedMyBest should be(Failure(InstantiationException)) } it should "close the closeable" in { val myCloseable = new MyCloseable - val triedMyBest = tryWithResource(() => myCloseable) { _.value } // Nothing special about 5... Just need to return something! + val triedMyBest = tryWithResource(() => myCloseable) { + _.value + } // Nothing special about 5... Just need to return something! triedMyBest should be(Success(5)) myCloseable.isClosed should be(true) } it should "catch errors and still close the closeable" in { val myCloseable = new MyCloseable - val triedMyBest = tryWithResource(() => myCloseable) { _.badValue } + val triedMyBest = tryWithResource(() => myCloseable)(_.badValue) triedMyBest should be(Failure(ReadValueException)) myCloseable.isClosed should be(true) } it should "be robust to failures in close methods" in { val myCloseable = new FailingCloseable - val triedMyBest = tryWithResource(() => myCloseable) { _.value } + val triedMyBest = tryWithResource(() => myCloseable)(_.value) triedMyBest should be(Failure(CloseCloseableException)) - val triedMyBest2 = tryWithResource(() => myCloseable) { _.badValue } + val triedMyBest2 = tryWithResource(() => myCloseable)(_.badValue) triedMyBest2 match { - case Failure(ReadValueException) => ReadValueException.getSuppressed.headOption should be(Some(CloseCloseableException)) + case Failure(ReadValueException) => + ReadValueException.getSuppressed.headOption should be(Some(CloseCloseableException)) case x => fail(s"$x was not equal to $ReadValueException") } } @@ -47,9 +50,8 @@ class MyCloseable extends AutoCloseable { val value = if (isClosed) throw ReadValueException else 5 // Ensures we aren't closed when .value is called def badValue = throw ReadValueException - override def close() = { + override def close() = isClosed = true - } } class FailingCloseable extends MyCloseable { diff --git a/core/src/test/scala/cromwell/util/WomMocks.scala b/core/src/test/scala/cromwell/util/WomMocks.scala index 2dc75385658..07111be17ce 100644 --- a/core/src/test/scala/cromwell/util/WomMocks.scala +++ b/core/src/test/scala/cromwell/util/WomMocks.scala @@ -6,44 +6,71 @@ import wom.RuntimeAttributes import wom.callable.Callable.OutputDefinition import wom.callable.{CallableTaskDefinition, CommandTaskDefinition, WorkflowDefinition} import wom.graph.GraphNodePort.{GraphNodeOutputPort, OutputPort} -import wom.graph.{Graph, CommandCallNode, WomIdentifier, WorkflowCallNode} +import wom.graph.{CommandCallNode, Graph, WomIdentifier, WorkflowCallNode} import wom.types.{WomStringType, WomType} import wom.values.WomValue object WomMocks { - val EmptyTaskDefinition = CallableTaskDefinition("emptyTask", Function.const(List.empty.validNel), RuntimeAttributes(Map.empty), - Map.empty, Map.empty, List.empty, List.empty, Set.empty, Map.empty, sourceLocation = None) + val EmptyTaskDefinition = CallableTaskDefinition( + "emptyTask", + Function.const(List.empty.validNel), + RuntimeAttributes(Map.empty), + Map.empty, + Map.empty, + List.empty, + List.empty, + Set.empty, + Map.empty, + sourceLocation = None + ) val EmptyWorkflowDefinition = mockWorkflowDefinition("emptyWorkflow") - def mockTaskCall(identifier: WomIdentifier, definition: CommandTaskDefinition = EmptyTaskDefinition) = { - CommandCallNode(identifier, definition, Set.empty, List.empty, Set.empty, (_, localName) => WomIdentifier(localName = localName), None) - } + def mockTaskCall(identifier: WomIdentifier, definition: CommandTaskDefinition = EmptyTaskDefinition) = + CommandCallNode(identifier, + definition, + Set.empty, + List.empty, + Set.empty, + (_, localName) => WomIdentifier(localName = localName), + None + ) - def mockWorkflowCall(identifier: WomIdentifier, definition: WorkflowDefinition = EmptyWorkflowDefinition) = { - WorkflowCallNode(identifier, definition, Set.empty, List.empty, Set.empty, (_, localName) => identifier.combine(localName), None) - } + def mockWorkflowCall(identifier: WomIdentifier, definition: WorkflowDefinition = EmptyWorkflowDefinition) = + WorkflowCallNode(identifier, + definition, + Set.empty, + List.empty, + Set.empty, + (_, localName) => identifier.combine(localName), + None + ) - def mockWorkflowDefinition(name: String) = { + def mockWorkflowDefinition(name: String) = WorkflowDefinition(name, Graph(Set.empty), Map.empty, Map.empty, None) - } - def mockTaskDefinition(name: String) = { - CallableTaskDefinition(name, Function.const(List.empty.validNel), RuntimeAttributes(Map.empty), - Map.empty, Map.empty, List.empty, List.empty, Set.empty, Map.empty, sourceLocation = None) - } + def mockTaskDefinition(name: String) = + CallableTaskDefinition( + name, + Function.const(List.empty.validNel), + RuntimeAttributes(Map.empty), + Map.empty, + Map.empty, + List.empty, + List.empty, + Set.empty, + Map.empty, + sourceLocation = None + ) - def mockOutputPort(name: String, womType: WomType = WomStringType): OutputPort = { + def mockOutputPort(name: String, womType: WomType = WomStringType): OutputPort = GraphNodeOutputPort(WomIdentifier(name, name), womType, null) - } - def mockOutputPort(outputDefinition: OutputDefinition): OutputPort = { + def mockOutputPort(outputDefinition: OutputDefinition): OutputPort = GraphNodeOutputPort(WomIdentifier(outputDefinition.name, outputDefinition.name), outputDefinition.womType, null) - } - def mockOutputExpectations(outputs: Map[String, WomValue]): CallOutputs = { - CallOutputs(outputs.map { - case (key, value) => WomMocks.mockOutputPort(key, value.womType) -> value + def mockOutputExpectations(outputs: Map[String, WomValue]): CallOutputs = + CallOutputs(outputs.map { case (key, value) => + WomMocks.mockOutputPort(key, value.womType) -> value }) - } } diff --git a/core/src/test/scala/cromwell/util/WomValueJsonFormatterSpec.scala b/core/src/test/scala/cromwell/util/WomValueJsonFormatterSpec.scala index 1c4cc15dfdd..69739d9868f 100644 --- a/core/src/test/scala/cromwell/util/WomValueJsonFormatterSpec.scala +++ b/core/src/test/scala/cromwell/util/WomValueJsonFormatterSpec.scala @@ -4,7 +4,7 @@ import common.assertion.CromwellTimeoutSpec import cromwell.util.JsonFormatting.WomValueJsonFormatter.WomValueJsonFormat import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import spray.json.{JsObject, enrichString} +import spray.json.{enrichString, JsObject} import wom.types._ import wom.values._ @@ -15,7 +15,7 @@ class WomValueJsonFormatterSpec extends AnyFlatSpec with CromwellTimeoutSpec wit it should "write WdlPair to left/right structured JsObject" in { val left = "sanders" val right = Vector("rubio", "carson", "cruz") - val wdlPair = WomPair(WomString(left), WomArray(WomArrayType(WomStringType), right.map { WomString(_) })) + val wdlPair = WomPair(WomString(left), WomArray(WomArrayType(WomStringType), right.map(WomString(_)))) val ExpectedJson: JsObject = """|{ | "left": "sanders", diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala index 3b7be2b38bd..2dbe3fa6c27 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala @@ -4,7 +4,6 @@ import common.util.VersionUtil import drs.localizer.CommandLineParser.AccessTokenStrategy._ import drs.localizer.CommandLineParser.Usage - class CommandLineParser extends scopt.OptionParser[CommandLineArguments](Usage) { lazy val localizerVersion: String = VersionUtil.getVersion("cromwell-drs-localizer") @@ -14,31 +13,30 @@ class CommandLineParser extends scopt.OptionParser[CommandLineArguments](Usage) head("cromwell-drs-localizer", localizerVersion) - arg[String]("drs-object-id").text("DRS object ID").optional(). - action((s, c) => - c.copy(drsObject = Option(s))) - arg[String]("container-path").text("Container path").optional(). - action((s, c) => - c.copy(containerPath = Option(s))) - arg[String]("requester-pays-project").text(s"Requester pays project (only valid with '$Google' auth strategy)").optional(). - action((s, c) => - c.copy(googleRequesterPaysProject = Option(s))) - opt[String]('m', "manifest-path").text("File path of manifest containing multiple files to localize"). - action((s, c) => - c.copy(manifestPath = Option(s))) - opt[String]('r', "requester-pays-project").text(s"Requester pays project (only valid with '$Google' auth strategy)").optional(). - action((s, c) => { + arg[String]("drs-object-id").text("DRS object ID").optional().action((s, c) => c.copy(drsObject = Option(s))) + arg[String]("container-path").text("Container path").optional().action((s, c) => c.copy(containerPath = Option(s))) + arg[String]("requester-pays-project") + .text(s"Requester pays project (only valid with '$Google' auth strategy)") + .optional() + .action((s, c) => c.copy(googleRequesterPaysProject = Option(s))) + opt[String]('m', "manifest-path") + .text("File path of manifest containing multiple files to localize") + .action((s, c) => c.copy(manifestPath = Option(s))) + opt[String]('r', "requester-pays-project") + .text(s"Requester pays project (only valid with '$Google' auth strategy)") + .optional() + .action { (s, c) => c.copy( googleRequesterPaysProject = Option(s), googleRequesterPaysProjectConflict = c.googleRequesterPaysProject.exists(_ != s) ) - }) - opt[String]('t', "access-token-strategy").text(s"Access token strategy, must be one of '$Azure' or '$Google' (default '$Google')"). - action((s, c) => - c.copy(accessTokenStrategy = Option(s.toLowerCase()))) - opt[String]('i', "identity-client-id").text("Azure identity client id"). - action((s, c) => - c.copy(azureIdentityClientId = Option(s))) + } + opt[String]('t', "access-token-strategy") + .text(s"Access token strategy, must be one of '$Azure' or '$Google' (default '$Google')") + .action((s, c) => c.copy(accessTokenStrategy = Option(s.toLowerCase()))) + opt[String]('i', "identity-client-id") + .text("Azure identity client id") + .action((s, c) => c.copy(azureIdentityClientId = Option(s))) checkConfig(c => if (c.googleRequesterPaysProjectConflict) failure("Requester pays project differs between positional argument and option flag") @@ -66,6 +64,7 @@ class CommandLineParser extends scopt.OptionParser[CommandLineArguments](Usage) } object CommandLineParser { + /** * These access token strategies are named simplistically as there is currently only one access token strategy being * used for each of these cloud vendors. But it is certainly possible that multiple strategies could come into use @@ -95,4 +94,5 @@ case class CommandLineArguments(accessTokenStrategy: Option[String] = Option(Goo googleRequesterPaysProject: Option[String] = None, azureIdentityClientId: Option[String] = None, manifestPath: Option[String] = None, - googleRequesterPaysProjectConflict: Boolean = false) + googleRequesterPaysProjectConflict: Boolean = false +) diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerDrsPathResolver.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerDrsPathResolver.scala index 944d418acbb..94cc30e103c 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerDrsPathResolver.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerDrsPathResolver.scala @@ -3,7 +3,7 @@ package drs.localizer import cloud.nio.impl.drs.{DrsConfig, DrsCredentials, DrsPathResolver} import common.validation.ErrorOr.ErrorOr - -class DrsLocalizerDrsPathResolver(drsConfig: DrsConfig, drsCredentials: DrsCredentials) extends DrsPathResolver(drsConfig) { +class DrsLocalizerDrsPathResolver(drsConfig: DrsConfig, drsCredentials: DrsCredentials) + extends DrsPathResolver(drsConfig) { override def getAccessToken: ErrorOr[String] = drsCredentials.getAccessToken } diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerMain.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerMain.scala index 3d99538f614..29ad9d25bd1 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerMain.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/DrsLocalizerMain.scala @@ -43,16 +43,19 @@ object DrsLocalizerMain extends IOApp with StrictLogging { // Default retry parameters for resolving a DRS url val defaultNumRetries: Int = 5 - val defaultBackoff: CloudNioBackoff = CloudNioSimpleExponentialBackoff( - initialInterval = 1 seconds, maxInterval = 60 seconds, multiplier = 2) + val defaultBackoff: CloudNioBackoff = + CloudNioSimpleExponentialBackoff(initialInterval = 1 seconds, maxInterval = 60 seconds, multiplier = 2) val defaultDownloaderFactory: DownloaderFactory = new DownloaderFactory { - override def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, requesterPaysProjectOption: Option[String]): Downloader = + override def buildGcsUriDownloader(gcsPath: String, + serviceAccountJsonOption: Option[String], + downloadLoc: String, + requesterPaysProjectOption: Option[String] + ): Downloader = GcsUriDownloader(gcsPath, serviceAccountJsonOption, downloadLoc, requesterPaysProjectOption) - override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = { + override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = BulkAccessUrlDownloader(urlsToDownload) - } } private def printUsage: IO[ExitCode] = { @@ -64,52 +67,55 @@ object DrsLocalizerMain extends IOApp with StrictLogging { * Helper function to read a CSV file as pairs of drsURL -> local download destination. * @param csvManifestPath Path to a CSV file where each row is something like: drs://asdf.ghj, path/to/my/directory */ - def loadCSVManifest(csvManifestPath: String): IO[List[UnresolvedDrsUrl]] = { + def loadCSVManifest(csvManifestPath: String): IO[List[UnresolvedDrsUrl]] = IO { val openFile = new File(csvManifestPath) val csvParser = CSVParser.parse(openFile, Charset.defaultCharset(), CSVFormat.DEFAULT) - try{ + try csvParser.getRecords.asScala.map(record => UnresolvedDrsUrl(record.get(0), record.get(1))).toList - } finally { + finally csvParser.close() - } } - } - def runLocalizer(commandLineArguments: CommandLineArguments, drsCredentials: DrsCredentials) : IO[ExitCode] = { - val urlList = (commandLineArguments.manifestPath, commandLineArguments.drsObject, commandLineArguments.containerPath) match { - case (Some(manifestPath), _, _) => { - loadCSVManifest(manifestPath) - } - case (_, Some(drsObject), Some(containerPath)) => { - IO.pure(List(UnresolvedDrsUrl(drsObject, containerPath))) - } - case(_,_,_) => { - throw new RuntimeException("Illegal command line arguments supplied to drs localizer.") + def runLocalizer(commandLineArguments: CommandLineArguments, drsCredentials: DrsCredentials): IO[ExitCode] = { + val urlList = + (commandLineArguments.manifestPath, commandLineArguments.drsObject, commandLineArguments.containerPath) match { + case (Some(manifestPath), _, _) => + loadCSVManifest(manifestPath) + case (_, Some(drsObject), Some(containerPath)) => + IO.pure(List(UnresolvedDrsUrl(drsObject, containerPath))) + case (_, _, _) => + throw new RuntimeException("Illegal command line arguments supplied to drs localizer.") } - } - val main = new DrsLocalizerMain(urlList, defaultDownloaderFactory, drsCredentials, commandLineArguments.googleRequesterPaysProject) + val main = new DrsLocalizerMain(urlList, + defaultDownloaderFactory, + drsCredentials, + commandLineArguments.googleRequesterPaysProject + ) main.resolveAndDownload().map(_.exitCode) - } + } /** * Helper function to decide which downloader to use based on data from the DRS response. * Throws a runtime exception if the DRS response is invalid. */ - def toValidatedUriType(accessUrl: Option[AccessUrl], gsUri: Option[String]): URIType = { + def toValidatedUriType(accessUrl: Option[AccessUrl], gsUri: Option[String]): URIType = // if both are provided, prefer using access urls (accessUrl, gsUri) match { case (Some(_), _) => - if(!accessUrl.get.url.startsWith("https://")) { throw new RuntimeException("Resolved Access URL does not start with https://")} + if (!accessUrl.get.url.startsWith("https://")) { + throw new RuntimeException("Resolved Access URL does not start with https://") + } URIType.ACCESS case (_, Some(_)) => - if(!gsUri.get.startsWith("gs://")) { throw new RuntimeException("Resolved Google URL does not start with gs://")} + if (!gsUri.get.startsWith("gs://")) { + throw new RuntimeException("Resolved Google URL does not start with gs://") + } URIType.GCS case (_, _) => throw new RuntimeException("DRS response did not contain any URLs") } - } - } +} object URIType extends Enumeration { type URIType = Value @@ -119,7 +125,8 @@ object URIType extends Enumeration { class DrsLocalizerMain(toResolveAndDownload: IO[List[UnresolvedDrsUrl]], downloaderFactory: DownloaderFactory, drsCredentials: DrsCredentials, - requesterPaysProjectIdOption: Option[String]) extends StrictLogging { + requesterPaysProjectIdOption: Option[String] +) extends StrictLogging { /** * This will: @@ -132,18 +139,17 @@ class DrsLocalizerMain(toResolveAndDownload: IO[List[UnresolvedDrsUrl]], val downloadResults = buildDownloaders().flatMap { downloaderList => downloaderList.map(downloader => downloader.download).traverse(identity) } - downloadResults.map{list => + downloadResults.map { list => list.find(result => result != DownloadSuccess).getOrElse(DownloadSuccess) } } - def getDrsPathResolver: IO[DrsLocalizerDrsPathResolver] = { + def getDrsPathResolver: IO[DrsLocalizerDrsPathResolver] = IO { val drsConfig = DrsConfig.fromEnv(sys.env) logger.info(s"Using ${drsConfig.drsResolverUrl} to resolve DRS Objects") new DrsLocalizerDrsPathResolver(drsConfig, drsCredentials) } - } /** * After resolving all of the URLs, this sorts them into an "Access" or "GCS" bucket. @@ -151,80 +157,97 @@ class DrsLocalizerMain(toResolveAndDownload: IO[List[UnresolvedDrsUrl]], * All google URLs will be downloaded individually in their own google downloader. * @return List of all downloaders required to fulfill the request. */ - def buildDownloaders() : IO[List[Downloader]] = { + def buildDownloaders(): IO[List[Downloader]] = resolveUrls(toResolveAndDownload).map { pendingDownloads => val accessUrls = pendingDownloads.filter(url => url.uriType == URIType.ACCESS) val googleUrls = pendingDownloads.filter(url => url.uriType == URIType.GCS) - val bulkDownloader: List[Downloader] = if (accessUrls.isEmpty) List() else List(buildBulkAccessUrlDownloader(accessUrls)) + val bulkDownloader: List[Downloader] = + if (accessUrls.isEmpty) List() else List(buildBulkAccessUrlDownloader(accessUrls)) val googleDownloaders: List[Downloader] = if (googleUrls.isEmpty) List() else buildGoogleDownloaders(googleUrls) bulkDownloader ++ googleDownloaders } - } - def buildGoogleDownloaders(resolvedGoogleUrls: List[ResolvedDrsUrl]) : List[Downloader] = { - resolvedGoogleUrls.map{url=> + def buildGoogleDownloaders(resolvedGoogleUrls: List[ResolvedDrsUrl]): List[Downloader] = + resolvedGoogleUrls.map { url => downloaderFactory.buildGcsUriDownloader( gcsPath = url.drsResponse.gsUri.get, serviceAccountJsonOption = url.drsResponse.googleServiceAccount.map(_.data.spaces2), downloadLoc = url.downloadDestinationPath, - requesterPaysProjectOption = requesterPaysProjectIdOption) + requesterPaysProjectOption = requesterPaysProjectIdOption + ) } - } - def buildBulkAccessUrlDownloader(resolvedUrls: List[ResolvedDrsUrl]) : Downloader = { + def buildBulkAccessUrlDownloader(resolvedUrls: List[ResolvedDrsUrl]): Downloader = downloaderFactory.buildBulkAccessUrlDownloader(resolvedUrls) - } /** * Runs a synchronous HTTP request to resolve the provided DRS URL with the provided resolver. */ - def resolveSingleUrl(resolverObject: DrsLocalizerDrsPathResolver, drsUrlToResolve: UnresolvedDrsUrl): IO[ResolvedDrsUrl] = { - val fields = NonEmptyList.of(DrsResolverField.GsUri, DrsResolverField.GoogleServiceAccount, DrsResolverField.AccessUrl, DrsResolverField.Hashes) + def resolveSingleUrl(resolverObject: DrsLocalizerDrsPathResolver, + drsUrlToResolve: UnresolvedDrsUrl + ): IO[ResolvedDrsUrl] = { + val fields = NonEmptyList.of(DrsResolverField.GsUri, + DrsResolverField.GoogleServiceAccount, + DrsResolverField.AccessUrl, + DrsResolverField.Hashes + ) val drsResponse = resolverObject.resolveDrs(drsUrlToResolve.drsUrl, fields) - drsResponse.map(resp => ResolvedDrsUrl(resp, drsUrlToResolve.downloadDestinationPath, toValidatedUriType(resp.accessUrl, resp.gsUri))) + drsResponse.map(resp => + ResolvedDrsUrl(resp, drsUrlToResolve.downloadDestinationPath, toValidatedUriType(resp.accessUrl, resp.gsUri)) + ) } - - val defaultBackoff: CloudNioBackoff = CloudNioSimpleExponentialBackoff( - initialInterval = 10 seconds, maxInterval = 60 seconds, multiplier = 2) + val defaultBackoff: CloudNioBackoff = + CloudNioSimpleExponentialBackoff(initialInterval = 10 seconds, maxInterval = 60 seconds, multiplier = 2) /** * Runs synchronous HTTP requests to resolve all the DRS urls. */ - def resolveUrls(unresolvedUrls: IO[List[UnresolvedDrsUrl]]): IO[List[ResolvedDrsUrl]] = { + def resolveUrls(unresolvedUrls: IO[List[UnresolvedDrsUrl]]): IO[List[ResolvedDrsUrl]] = unresolvedUrls.flatMap { unresolvedList => getDrsPathResolver.flatMap { resolver => - unresolvedList.map { unresolvedUrl => - resolveWithRetries(resolver, unresolvedUrl, defaultNumRetries, Option(defaultBackoff)) - }.traverse(identity) + unresolvedList + .map { unresolvedUrl => + resolveWithRetries(resolver, unresolvedUrl, defaultNumRetries, Option(defaultBackoff)) + } + .traverse(identity) } } - } def resolveWithRetries(resolverObject: DrsLocalizerDrsPathResolver, drsUrlToResolve: UnresolvedDrsUrl, resolutionRetries: Int, backoff: Option[CloudNioBackoff], - resolutionAttempt: Int = 0) : IO[ResolvedDrsUrl] = { + resolutionAttempt: Int = 0 + ): IO[ResolvedDrsUrl] = { - def maybeRetryForResolutionFailure(t: Throwable): IO[ResolvedDrsUrl] = { + def maybeRetryForResolutionFailure(t: Throwable): IO[ResolvedDrsUrl] = if (resolutionAttempt < resolutionRetries) { backoff foreach { b => Thread.sleep(b.backoffMillis) } - logger.warn(s"Attempting retry $resolutionAttempt of $resolutionRetries drs resolution retries to resolve ${drsUrlToResolve.drsUrl}", t) - resolveWithRetries(resolverObject, drsUrlToResolve, resolutionRetries, backoff map { _.next }, resolutionAttempt+1) + logger.warn( + s"Attempting retry $resolutionAttempt of $resolutionRetries drs resolution retries to resolve ${drsUrlToResolve.drsUrl}", + t + ) + resolveWithRetries(resolverObject, + drsUrlToResolve, + resolutionRetries, + backoff map { _.next }, + resolutionAttempt + 1 + ) } else { - IO.raiseError(new RuntimeException(s"Exhausted $resolutionRetries resolution retries to resolve $drsUrlToResolve.drsUrl", t)) + IO.raiseError( + new RuntimeException(s"Exhausted $resolutionRetries resolution retries to resolve $drsUrlToResolve.drsUrl", t) + ) } - } resolveSingleUrl(resolverObject, drsUrlToResolve).redeemWith( recover = maybeRetryForResolutionFailure, bind = { - case f: FatalRetryDisposition => - IO.raiseError(new RuntimeException(s"Fatal error resolving DRS URL: $f")) - case _: RegularRetryDisposition => - resolveWithRetries(resolverObject, drsUrlToResolve, resolutionRetries, backoff, resolutionAttempt+1) - case o => IO.pure(o) - }) + case f: FatalRetryDisposition => + IO.raiseError(new RuntimeException(s"Fatal error resolving DRS URL: $f")) + case _: RegularRetryDisposition => + resolveWithRetries(resolverObject, drsUrlToResolve, resolutionRetries, backoff, resolutionAttempt + 1) + case o => IO.pure(o) + } + ) } } - diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala index f47265d7730..00957083bc9 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/BulkAccessUrlDownloader.scala @@ -1,7 +1,7 @@ package drs.localizer.downloaders import cats.effect.{ExitCode, IO} -import cloud.nio.impl.drs.{AccessUrl} +import cloud.nio.impl.drs.AccessUrl import com.typesafe.scalalogging.StrictLogging import java.nio.charset.StandardCharsets @@ -9,19 +9,21 @@ import java.nio.file.{Files, Path, Paths} import scala.sys.process.{Process, ProcessLogger} import scala.util.matching.Regex import drs.localizer.ResolvedDrsUrl -import spray.json.DefaultJsonProtocol.{StringJsonFormat, listFormat, mapFormat} +import spray.json.DefaultJsonProtocol.{listFormat, mapFormat, StringJsonFormat} import spray.json._ case class GetmResult(returnCode: Int, stderr: String) + /** * Getm is a python tool that is used to download resolved DRS uris quickly and in parallel. * This class builds a getm-manifest.json file that it uses for input, and builds/executes a shell command * to invoke the Getm tool, which is expected to already be installed in the local environment. * @param resolvedUrls */ -case class BulkAccessUrlDownloader(resolvedUrls : List[ResolvedDrsUrl]) extends Downloader with StrictLogging { +case class BulkAccessUrlDownloader(resolvedUrls: List[ResolvedDrsUrl]) extends Downloader with StrictLogging { val getmManifestPath: Path = Paths.get("getm-manifest.json") + /** * Write a json manifest to disk that looks like: * // [ @@ -42,39 +44,42 @@ case class BulkAccessUrlDownloader(resolvedUrls : List[ResolvedDrsUrl]) extends * @param resolvedUrls * @return Filepath of a getm-manifest.json that Getm can use to download multiple files in parallel. */ - def generateJsonManifest(resolvedUrls : List[ResolvedDrsUrl]): IO[Path] = { - def resolvedUrlToJsonMap(resolvedUrl: ResolvedDrsUrl): Map[String,String] = { + def generateJsonManifest(resolvedUrls: List[ResolvedDrsUrl]): IO[Path] = { + def resolvedUrlToJsonMap(resolvedUrl: ResolvedDrsUrl): Map[String, String] = { val accessUrl: AccessUrl = resolvedUrl.drsResponse.accessUrl.getOrElse(AccessUrl("missing", None)) - resolvedUrl.drsResponse.hashes.map{_ => - val checksum = GetmChecksum(resolvedUrl.drsResponse.hashes, accessUrl).value.getOrElse("error_calculating_checksum") - val checksumAlgorithm = GetmChecksum(resolvedUrl.drsResponse.hashes, accessUrl).getmAlgorithm - Map( - ("url", accessUrl.url), - ("filepath", resolvedUrl.downloadDestinationPath), - ("checksum", checksum), - ("checksum-algorithm", checksumAlgorithm) + resolvedUrl.drsResponse.hashes + .map { _ => + val checksum = + GetmChecksum(resolvedUrl.drsResponse.hashes, accessUrl).value.getOrElse("error_calculating_checksum") + val checksumAlgorithm = GetmChecksum(resolvedUrl.drsResponse.hashes, accessUrl).getmAlgorithm + Map( + ("url", accessUrl.url), + ("filepath", resolvedUrl.downloadDestinationPath), + ("checksum", checksum), + ("checksum-algorithm", checksumAlgorithm) + ) + } + .getOrElse( + Map( + ("url", accessUrl.url), + ("filepath", resolvedUrl.downloadDestinationPath) + ) ) - }.getOrElse(Map( - ("url", accessUrl.url), - ("filepath", resolvedUrl.downloadDestinationPath) - )) } val jsonArray: String = resolvedUrls.map(resolved => resolvedUrlToJsonMap(resolved)).toJson.prettyPrint IO(Files.write(getmManifestPath, jsonArray.getBytes(StandardCharsets.UTF_8))) } - def deleteJsonManifest() = { + def deleteJsonManifest() = Files.deleteIfExists(getmManifestPath) - } - def generateGetmCommand(pathToMainfestJson : Path) : String = { + def generateGetmCommand(pathToMainfestJson: Path): String = s"""getm --manifest ${pathToMainfestJson.toString}""" - } - def runGetm: IO[GetmResult] = { - generateJsonManifest(resolvedUrls).flatMap{ manifestPath => + def runGetm: IO[GetmResult] = + generateJsonManifest(resolvedUrls).flatMap { manifestPath => val script = generateGetmCommand(manifestPath) - val copyCommand : Seq[String] = Seq("bash", "-c", script) + val copyCommand: Seq[String] = Seq("bash", "-c", script) logger.info(script) val copyProcess = Process(copyCommand) val stderr = new StringBuilder() @@ -84,7 +89,6 @@ case class BulkAccessUrlDownloader(resolvedUrls : List[ResolvedDrsUrl]) extends logger.info(stderr.toString().trim()) IO(GetmResult(returnCode, stderr.toString().trim())) } - } override def download: IO[DownloadResult] = { // We don't want to log the unmasked signed URL here. On a PAPI backend this log will end up under the user's @@ -94,7 +98,7 @@ case class BulkAccessUrlDownloader(resolvedUrls : List[ResolvedDrsUrl]) extends runGetm map toDownloadResult } - def toDownloadResult(getmResult: GetmResult): DownloadResult = { + def toDownloadResult(getmResult: GetmResult): DownloadResult = getmResult match { case GetmResult(0, stderr) if stderr.isEmpty => DownloadSuccess @@ -122,10 +126,9 @@ case class BulkAccessUrlDownloader(resolvedUrls : List[ResolvedDrsUrl]) extends UnrecognizedRetryableDownloadFailure(ExitCode(rc)) } } - } } -object BulkAccessUrlDownloader{ +object BulkAccessUrlDownloader { type Hashes = Option[Map[String, String]] val ChecksumFailureMessage: Regex = raw""".*AssertionError: Checksum failed!.*""".r diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/DownloaderFactory.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/DownloaderFactory.scala index 6c7f27e8a6e..c35a2b1634e 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/DownloaderFactory.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/DownloaderFactory.scala @@ -3,10 +3,11 @@ package drs.localizer.downloaders import drs.localizer.ResolvedDrsUrl trait DownloaderFactory { - def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]) : Downloader + def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, - requesterPaysProjectOption: Option[String]): Downloader + requesterPaysProjectOption: Option[String] + ): Downloader } diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GcsUriDownloader.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GcsUriDownloader.scala index 8991e79f5fd..74f5bc64621 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GcsUriDownloader.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GcsUriDownloader.scala @@ -12,15 +12,16 @@ import scala.sys.process.{Process, ProcessLogger} case class GcsUriDownloader(gcsUrl: String, serviceAccountJson: Option[String], downloadLoc: String, - requesterPaysProjectIdOption: Option[String]) extends Downloader with StrictLogging { + requesterPaysProjectIdOption: Option[String] +) extends Downloader + with StrictLogging { val defaultNumRetries: Int = 5 - val defaultBackoff: CloudNioBackoff = CloudNioSimpleExponentialBackoff( - initialInterval = 1 seconds, maxInterval = 60 seconds, multiplier = 2) + val defaultBackoff: CloudNioBackoff = + CloudNioSimpleExponentialBackoff(initialInterval = 1 seconds, maxInterval = 60 seconds, multiplier = 2) - override def download: IO[DownloadResult] = { + override def download: IO[DownloadResult] = downloadWithRetries(defaultNumRetries, Option(defaultBackoff)) - } def runDownloadCommand: IO[DownloadResult] = { @@ -45,27 +46,30 @@ case class GcsUriDownloader(gcsUrl: String, // run the multiple bash script to download file and log stream sent to stdout and stderr using ProcessLogger val returnCode = copyProcess ! ProcessLogger(logger.underlying.info, logger.underlying.error) - val result = if (returnCode == 0) DownloadSuccess else RecognizedRetryableDownloadFailure(exitCode = ExitCode(returnCode)) + val result = + if (returnCode == 0) DownloadSuccess else RecognizedRetryableDownloadFailure(exitCode = ExitCode(returnCode)) IO.pure(result) } def downloadWithRetries(downloadRetries: Int, backoff: Option[CloudNioBackoff], - downloadAttempt: Int = 0): IO[DownloadResult] = - { + downloadAttempt: Int = 0 + ): IO[DownloadResult] = { - def maybeRetryForDownloadFailure(t: Throwable): IO[DownloadResult] = { + def maybeRetryForDownloadFailure(t: Throwable): IO[DownloadResult] = if (downloadAttempt < downloadRetries) { backoff foreach { b => Thread.sleep(b.backoffMillis) } logger.warn(s"Attempting download retry $downloadAttempt of $downloadRetries for a GCS url", t) - downloadWithRetries(downloadRetries, backoff map { - _.next - }, downloadAttempt + 1) + downloadWithRetries(downloadRetries, + backoff map { + _.next + }, + downloadAttempt + 1 + ) } else { IO.raiseError(new RuntimeException(s"Exhausted $downloadRetries resolution retries to download GCS file", t)) } - } runDownloadCommand.redeemWith( recover = maybeRetryForDownloadFailure, @@ -73,12 +77,13 @@ case class GcsUriDownloader(gcsUrl: String, case s: DownloadSuccess.type => IO.pure(s) case _: RecognizedRetryableDownloadFailure => - downloadWithRetries(downloadRetries, backoff, downloadAttempt+1) + downloadWithRetries(downloadRetries, backoff, downloadAttempt + 1) case _: UnrecognizedRetryableDownloadFailure => - downloadWithRetries(downloadRetries, backoff, downloadAttempt+1) + downloadWithRetries(downloadRetries, backoff, downloadAttempt + 1) case _ => - downloadWithRetries(downloadRetries, backoff, downloadAttempt+1) - }) + downloadWithRetries(downloadRetries, backoff, downloadAttempt + 1) + } + ) } /** @@ -88,7 +93,7 @@ case class GcsUriDownloader(gcsUrl: String, def gcsCopyCommand(flag: String = ""): String = s"gsutil $flag cp $gcsUrl $downloadLoc" - def setServiceAccount(): String = { + def setServiceAccount(): String = saJsonPathOption match { case Some(saJsonPath) => s"""# Set gsutil to use the service account returned from the DRS Resolver @@ -103,9 +108,8 @@ case class GcsUriDownloader(gcsUrl: String, |""".stripMargin case None => "" } - } - def recoverWithRequesterPays(): String = { + def recoverWithRequesterPays(): String = requesterPaysProjectIdOption match { case Some(userProject) => s"""if [ "$$RC_GSUTIL" != "0" ]; then @@ -119,7 +123,6 @@ case class GcsUriDownloader(gcsUrl: String, |""".stripMargin case None => "" } - } // bash to download the GCS file using gsutil s"""set -euo pipefail @@ -145,5 +148,5 @@ case class GcsUriDownloader(gcsUrl: String, } object GcsUriDownloader { - private final val RequesterPaysErrorMsg = "requester pays bucket but no user project" + final private val RequesterPaysErrorMsg = "requester pays bucket but no user project" } diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GetmChecksum.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GetmChecksum.scala index 2ca1bd3d2e3..a72459cdb7b 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GetmChecksum.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/downloaders/GetmChecksum.scala @@ -8,12 +8,11 @@ import org.apache.commons.codec.binary.Base64.encodeBase64String import org.apache.commons.codec.binary.Hex.decodeHex import org.apache.commons.text.StringEscapeUtils - sealed trait GetmChecksum { def getmAlgorithm: String def rawValue: String def value: ErrorOr[String] = rawValue.validNel - def args: ErrorOr[String] = { + def args: ErrorOr[String] = // The value for `--checksum-algorithm` is constrained by the algorithm names in the `sealed` hierarchy of // `GetmChecksum`, but the value for `--checksum` is largely a function of data returned by the DRS server. // Shell escape this to avoid injection. @@ -21,7 +20,6 @@ sealed trait GetmChecksum { val escapedValue = StringEscapeUtils.escapeXSI(v) s"--checksum-algorithm '$getmAlgorithm' --checksum $escapedValue" } - } } case class Md5(override val rawValue: String) extends GetmChecksum { @@ -33,7 +31,8 @@ case class Crc32c(override val rawValue: String) extends GetmChecksum { // The DRS spec says that all hash values should be hex strings, // but getm expects crc32c values to be base64. override def value: ErrorOr[String] = - GetmChecksum.validateHex(rawValue) + GetmChecksum + .validateHex(rawValue) .map(decodeHex) .map(encodeBase64String) @@ -52,7 +51,7 @@ case class Unsupported(override val rawValue: String) extends GetmChecksum { } object GetmChecksum { - def apply(hashes: Hashes, accessUrl: AccessUrl): GetmChecksum = { + def apply(hashes: Hashes, accessUrl: AccessUrl): GetmChecksum = hashes match { case Some(hashes) if hashes.nonEmpty => // `hashes` is keyed by the DRS Resolver names for these hash algorithms, which in turn are the forwarded DRS @@ -61,8 +60,7 @@ object GetmChecksum { // but all of the other algorithm names currently differ between DRS providers and `getm`. if (hashes.contains("md5")) { Md5(hashes("md5")) - } - else if (hashes.contains("crc32c")) { + } else if (hashes.contains("crc32c")) { Crc32c(hashes("crc32c")) } // etags could be anything; only ask `getm` to check s3 etags if this actually looks like an s3 signed url. @@ -81,7 +79,6 @@ object GetmChecksum { } case _ => Null // None or an empty hashes map. } - } def validateHex(s: String): ErrorOr[String] = { val trimmed = s.trim diff --git a/cromwell-drs-localizer/src/test/scala/drs/localizer/CommandLineParserSpec.scala b/cromwell-drs-localizer/src/test/scala/drs/localizer/CommandLineParserSpec.scala index 7be30be8ac0..6658428e650 100644 --- a/cromwell-drs-localizer/src/test/scala/drs/localizer/CommandLineParserSpec.scala +++ b/cromwell-drs-localizer/src/test/scala/drs/localizer/CommandLineParserSpec.scala @@ -63,7 +63,9 @@ class CommandLineParserSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "successfully parse with three arguments and requester pays project" in { - val args = parser.parse(Array(drsObject, containerPath, requesterPaysProject, "-r", requesterPaysProject), CommandLineArguments()).get + val args = parser + .parse(Array(drsObject, containerPath, requesterPaysProject, "-r", requesterPaysProject), CommandLineArguments()) + .get args.drsObject.get shouldBe drsObject args.containerPath.get shouldBe containerPath @@ -74,7 +76,9 @@ class CommandLineParserSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "fail if requester pays argument and flag specify different projects" in { - parser.parse(Array(drsObject, containerPath, requesterPaysProject, "-r", "boom!"), CommandLineArguments()) shouldBe None + parser.parse(Array(drsObject, containerPath, requesterPaysProject, "-r", "boom!"), + CommandLineArguments() + ) shouldBe None } it should "successfully parse args with a manifest file" in { @@ -99,12 +103,18 @@ class CommandLineParserSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "successfully parse an explicit Google access token strategy invocation" in { - val args = parser.parse(Array( - "--access-token-strategy", "google", - drsObject, - containerPath, - "--requester-pays-project", requesterPaysProject - ), CommandLineArguments()).get + val args = parser + .parse(Array( + "--access-token-strategy", + "google", + drsObject, + containerPath, + "--requester-pays-project", + requesterPaysProject + ), + CommandLineArguments() + ) + .get args.drsObject.get shouldBe drsObject args.containerPath.get shouldBe containerPath @@ -115,19 +125,26 @@ class CommandLineParserSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "fail to parse an Azure invocation that specifies requester pays" in { - val args = parser.parse(Array( - "--access-token-strategy", AccessTokenStrategy.Azure, - drsObject, - containerPath, - "--requester-pays-project", requesterPaysProject), CommandLineArguments()) + val args = parser.parse( + Array("--access-token-strategy", + AccessTokenStrategy.Azure, + drsObject, + containerPath, + "--requester-pays-project", + requesterPaysProject + ), + CommandLineArguments() + ) args shouldBe None } it should "successfully parse an Azure invocation" in { - val args = parser.parse(Array( - "--access-token-strategy", AccessTokenStrategy.Azure, - drsObject, containerPath), CommandLineArguments()).get + val args = parser + .parse(Array("--access-token-strategy", AccessTokenStrategy.Azure, drsObject, containerPath), + CommandLineArguments() + ) + .get args.drsObject.get shouldBe drsObject args.containerPath.get shouldBe containerPath @@ -138,10 +155,18 @@ class CommandLineParserSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "successfully parse an Azure invocation with identity" in { - val args = parser.parse(Array( - "--access-token-strategy", AccessTokenStrategy.Azure, - "--identity-client-id", azureIdentityClientId, - drsObject, containerPath), CommandLineArguments()).get + val args = parser + .parse( + Array("--access-token-strategy", + AccessTokenStrategy.Azure, + "--identity-client-id", + azureIdentityClientId, + drsObject, + containerPath + ), + CommandLineArguments() + ) + .get args.drsObject.get shouldBe drsObject args.containerPath.get shouldBe containerPath @@ -152,7 +177,8 @@ class CommandLineParserSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } it should "fail to parse with an unrecognized access token strategy" in { - val args = parser.parse(Array("--access-token-strategy", "nebulous", drsObject, containerPath), CommandLineArguments()) + val args = + parser.parse(Array("--access-token-strategy", "nebulous", drsObject, containerPath), CommandLineArguments()) args shouldBe None } } diff --git a/cromwell-drs-localizer/src/test/scala/drs/localizer/DrsLocalizerMainSpec.scala b/cromwell-drs-localizer/src/test/scala/drs/localizer/DrsLocalizerMainSpec.scala index 52fa4c99330..1f3bfcfc623 100644 --- a/cromwell-drs-localizer/src/test/scala/drs/localizer/DrsLocalizerMainSpec.scala +++ b/cromwell-drs-localizer/src/test/scala/drs/localizer/DrsLocalizerMainSpec.scala @@ -12,33 +12,40 @@ import drs.localizer.downloaders._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val fakeDownloadLocation = "/root/foo/foo-123.bam" val fakeRequesterPaysId = "fake-billing-project" - val fakeGoogleInput : IO[List[UnresolvedDrsUrl]] = IO(List( - UnresolvedDrsUrl(fakeDrsUrlWithGcsResolutionOnly, "/path/to/nowhere") - )) + val fakeGoogleInput: IO[List[UnresolvedDrsUrl]] = IO( + List( + UnresolvedDrsUrl(fakeDrsUrlWithGcsResolutionOnly, "/path/to/nowhere") + ) + ) - val fakeAccessInput: IO[List[UnresolvedDrsUrl]] = IO(List( - UnresolvedDrsUrl("https://my-fake-access-url.com", "/path/to/somewhereelse") - )) + val fakeAccessInput: IO[List[UnresolvedDrsUrl]] = IO( + List( + UnresolvedDrsUrl("https://my-fake-access-url.com", "/path/to/somewhereelse") + ) + ) - val fakeBulkGoogleInput: IO[List[UnresolvedDrsUrl]] = IO(List( - UnresolvedDrsUrl("drs://my-fake-google-url.com", "/path/to/nowhere"), - UnresolvedDrsUrl("drs://my-fake-google-url.com2", "/path/to/nowhere2"), - UnresolvedDrsUrl("drs://my-fake-google-url.com3", "/path/to/nowhere3"), - UnresolvedDrsUrl("drs://my-fake-google-url.com4", "/path/to/nowhere4") - )) + val fakeBulkGoogleInput: IO[List[UnresolvedDrsUrl]] = IO( + List( + UnresolvedDrsUrl("drs://my-fake-google-url.com", "/path/to/nowhere"), + UnresolvedDrsUrl("drs://my-fake-google-url.com2", "/path/to/nowhere2"), + UnresolvedDrsUrl("drs://my-fake-google-url.com3", "/path/to/nowhere3"), + UnresolvedDrsUrl("drs://my-fake-google-url.com4", "/path/to/nowhere4") + ) + ) - val fakeBulkAccessInput: IO[List[UnresolvedDrsUrl]] = IO(List( - UnresolvedDrsUrl("drs://my-fake-access-url.com", "/path/to/somewhereelse"), - UnresolvedDrsUrl("drs://my-fake-access-url2.com", "/path/to/somewhereelse2"), - UnresolvedDrsUrl("drs://my-fake-access-url3.com", "/path/to/somewhereelse3"), - UnresolvedDrsUrl("drs://my-fake-access-url4.com", "/path/to/somewhereelse4") - )) + val fakeBulkAccessInput: IO[List[UnresolvedDrsUrl]] = IO( + List( + UnresolvedDrsUrl("drs://my-fake-access-url.com", "/path/to/somewhereelse"), + UnresolvedDrsUrl("drs://my-fake-access-url2.com", "/path/to/somewhereelse2"), + UnresolvedDrsUrl("drs://my-fake-access-url3.com", "/path/to/somewhereelse3"), + UnresolvedDrsUrl("drs://my-fake-access-url4.com", "/path/to/somewhereelse4") + ) + ) behavior of "DrsLocalizerMain" @@ -52,34 +59,43 @@ class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "tolerate no URLs being provided" in { val mockDownloadFactory = new DownloaderFactory { - override def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, requesterPaysProjectOption: Option[String]): Downloader = { + override def buildGcsUriDownloader(gcsPath: String, + serviceAccountJsonOption: Option[String], + downloadLoc: String, + requesterPaysProjectOption: Option[String] + ): Downloader = // This test path should never ask for the Google downloader throw new RuntimeException("test failure111") - } - override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = { + override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = // This test path should never ask for the Bulk downloader throw new RuntimeException("test failure111") - } } - val mockdrsLocalizer = new MockDrsLocalizerMain(IO(List()), mockDownloadFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val mockdrsLocalizer = + new MockDrsLocalizerMain(IO(List()), mockDownloadFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) val downloaders: List[Downloader] = mockdrsLocalizer.buildDownloaders().unsafeRunSync() downloaders.length shouldBe 0 } it should "build correct downloader(s) for a single google URL" in { val mockDownloadFactory = new DownloaderFactory { - override def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, requesterPaysProjectOption: Option[String]): Downloader = { + override def buildGcsUriDownloader(gcsPath: String, + serviceAccountJsonOption: Option[String], + downloadLoc: String, + requesterPaysProjectOption: Option[String] + ): Downloader = GcsUriDownloader(gcsPath, serviceAccountJsonOption, downloadLoc, requesterPaysProjectOption) - } - override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = { + override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = // This test path should never ask for the Bulk downloader throw new RuntimeException("test failure111") - } } - val mockdrsLocalizer = new MockDrsLocalizerMain(IO(List(fakeGoogleUrls.head._1)), mockDownloadFactory,FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val mockdrsLocalizer = new MockDrsLocalizerMain(IO(List(fakeGoogleUrls.head._1)), + mockDownloadFactory, + FakeAccessTokenStrategy, + Option(fakeRequesterPaysId) + ) val downloaders: List[Downloader] = mockdrsLocalizer.buildDownloaders().unsafeRunSync() downloaders.length shouldBe 1 @@ -92,17 +108,23 @@ class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "build correct downloader(s) for a single access URL" in { val mockDownloadFactory = new DownloaderFactory { - override def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, requesterPaysProjectOption: Option[String]): Downloader = { + override def buildGcsUriDownloader(gcsPath: String, + serviceAccountJsonOption: Option[String], + downloadLoc: String, + requesterPaysProjectOption: Option[String] + ): Downloader = // This test path should never ask for the GCS downloader throw new RuntimeException("test failure") - } - override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = { + override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = BulkAccessUrlDownloader(urlsToDownload) - } } - val mockdrsLocalizer = new MockDrsLocalizerMain(IO(List(fakeAccessUrls.head._1)), mockDownloadFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val mockdrsLocalizer = new MockDrsLocalizerMain(IO(List(fakeAccessUrls.head._1)), + mockDownloadFactory, + FakeAccessTokenStrategy, + Option(fakeRequesterPaysId) + ) val downloaders: List[Downloader] = mockdrsLocalizer.buildDownloaders().unsafeRunSync() downloaders.length shouldBe 1 @@ -114,48 +136,64 @@ class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "build correct downloader(s) for multiple google URLs" in { val mockDownloadFactory = new DownloaderFactory { - override def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, requesterPaysProjectOption: Option[String]): Downloader = { + override def buildGcsUriDownloader(gcsPath: String, + serviceAccountJsonOption: Option[String], + downloadLoc: String, + requesterPaysProjectOption: Option[String] + ): Downloader = GcsUriDownloader(gcsPath, serviceAccountJsonOption, downloadLoc, requesterPaysProjectOption) - } - override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = { + override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = // This test path should never ask for the GCS downloader throw new RuntimeException("test failure") - } } - val unresolvedUrls : List[UnresolvedDrsUrl] = fakeGoogleUrls.map(pair => pair._1).toList - val mockdrsLocalizer = new MockDrsLocalizerMain(IO(unresolvedUrls), mockDownloadFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val unresolvedUrls: List[UnresolvedDrsUrl] = fakeGoogleUrls.map(pair => pair._1).toList + val mockdrsLocalizer = new MockDrsLocalizerMain(IO(unresolvedUrls), + mockDownloadFactory, + FakeAccessTokenStrategy, + Option(fakeRequesterPaysId) + ) val downloaders: List[Downloader] = mockdrsLocalizer.buildDownloaders().unsafeRunSync() downloaders.length shouldBe unresolvedUrls.length - val countGoogleDownloaders = downloaders.count(downloader => downloader match { - case _: GcsUriDownloader => true - case _ => false - }) + val countGoogleDownloaders = downloaders.count(downloader => + downloader match { + case _: GcsUriDownloader => true + case _ => false + } + ) // We expect one GCS downloader for each GCS uri provided countGoogleDownloaders shouldBe downloaders.length } it should "build a single bulk downloader for multiple access URLs" in { val mockDownloadFactory = new DownloaderFactory { - override def buildGcsUriDownloader(gcsPath: String, serviceAccountJsonOption: Option[String], downloadLoc: String, requesterPaysProjectOption: Option[String]): Downloader = { + override def buildGcsUriDownloader(gcsPath: String, + serviceAccountJsonOption: Option[String], + downloadLoc: String, + requesterPaysProjectOption: Option[String] + ): Downloader = // This test path should never ask for the GCS downloader throw new RuntimeException("test failure") - } - override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = { + override def buildBulkAccessUrlDownloader(urlsToDownload: List[ResolvedDrsUrl]): Downloader = BulkAccessUrlDownloader(urlsToDownload) - } } val unresolvedUrls: List[UnresolvedDrsUrl] = fakeAccessUrls.map(pair => pair._1).toList - val mockdrsLocalizer = new MockDrsLocalizerMain(IO(unresolvedUrls), mockDownloadFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val mockdrsLocalizer = new MockDrsLocalizerMain(IO(unresolvedUrls), + mockDownloadFactory, + FakeAccessTokenStrategy, + Option(fakeRequesterPaysId) + ) val downloaders: List[Downloader] = mockdrsLocalizer.buildDownloaders().unsafeRunSync() downloaders.length shouldBe 1 - val countBulkDownloaders = downloaders.count(downloader => downloader match { - case _: BulkAccessUrlDownloader => true - case _ => false - }) + val countBulkDownloaders = downloaders.count(downloader => + downloader match { + case _: BulkAccessUrlDownloader => true + case _ => false + } + ) // We expect one total Bulk downloader for all access URIs to share countBulkDownloaders shouldBe 1 val expected = BulkAccessUrlDownloader( @@ -165,23 +203,32 @@ class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } it should "build 1 bulk downloader and 5 google downloaders for a mix of URLs" in { - val unresolvedUrls: List[UnresolvedDrsUrl] = fakeAccessUrls.map(pair => pair._1).toList ++ fakeGoogleUrls.map(pair => pair._1).toList - val mockdrsLocalizer = new MockDrsLocalizerMain(IO(unresolvedUrls), DrsLocalizerMain.defaultDownloaderFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val unresolvedUrls: List[UnresolvedDrsUrl] = + fakeAccessUrls.map(pair => pair._1).toList ++ fakeGoogleUrls.map(pair => pair._1).toList + val mockdrsLocalizer = new MockDrsLocalizerMain(IO(unresolvedUrls), + DrsLocalizerMain.defaultDownloaderFactory, + FakeAccessTokenStrategy, + Option(fakeRequesterPaysId) + ) val downloaders: List[Downloader] = mockdrsLocalizer.buildDownloaders().unsafeRunSync() downloaders.length shouldBe 6 - //we expect a single bulk downloader despite 5 access URLs being provided - val countBulkDownloaders = downloaders.count(downloader => downloader match { - case _: BulkAccessUrlDownloader => true - case _ => false - }) + // we expect a single bulk downloader despite 5 access URLs being provided + val countBulkDownloaders = downloaders.count(downloader => + downloader match { + case _: BulkAccessUrlDownloader => true + case _ => false + } + ) // We expect one GCS downloader for each GCS uri provided countBulkDownloaders shouldBe 1 - val countGoogleDownloaders = downloaders.count(downloader => downloader match { - case _: GcsUriDownloader => true - case _ => false - }) + val countGoogleDownloaders = downloaders.count(downloader => + downloader match { + case _: GcsUriDownloader => true + case _ => false + } + ) // We expect one GCS downloader for each GCS uri provided countBulkDownloaders shouldBe 1 countGoogleDownloaders shouldBe 5 @@ -189,24 +236,34 @@ class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "accept arguments and run successfully without Requester Pays ID" in { val unresolved = fakeGoogleUrls.head._1 - val mockDrsLocalizer = new MockDrsLocalizerMain(IO(List(unresolved)), DrsLocalizerMain.defaultDownloaderFactory, FakeAccessTokenStrategy, None) + val mockDrsLocalizer = new MockDrsLocalizerMain(IO(List(unresolved)), + DrsLocalizerMain.defaultDownloaderFactory, + FakeAccessTokenStrategy, + None + ) val expected = GcsUriDownloader( gcsUrl = fakeGoogleUrls.get(unresolved).get.drsResponse.gsUri.get, serviceAccountJson = None, downloadLoc = unresolved.downloadDestinationPath, - requesterPaysProjectIdOption = None) + requesterPaysProjectIdOption = None + ) val downloader: Downloader = mockDrsLocalizer.buildDownloaders().unsafeRunSync().head downloader shouldBe expected } it should "run successfully with all 3 arguments" in { val unresolved = fakeGoogleUrls.head._1 - val mockDrsLocalizer = new MockDrsLocalizerMain(IO(List(unresolved)), DrsLocalizerMain.defaultDownloaderFactory, FakeAccessTokenStrategy, Option(fakeRequesterPaysId)) + val mockDrsLocalizer = new MockDrsLocalizerMain(IO(List(unresolved)), + DrsLocalizerMain.defaultDownloaderFactory, + FakeAccessTokenStrategy, + Option(fakeRequesterPaysId) + ) val expected = GcsUriDownloader( gcsUrl = fakeGoogleUrls.get(unresolved).get.drsResponse.gsUri.get, serviceAccountJson = None, downloadLoc = unresolved.downloadDestinationPath, - requesterPaysProjectIdOption = Option(fakeRequesterPaysId)) + requesterPaysProjectIdOption = Option(fakeRequesterPaysId) + ) val downloader: Downloader = mockDrsLocalizer.buildDownloaders().unsafeRunSync().head downloader shouldBe expected } @@ -214,10 +271,18 @@ class DrsLocalizerMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "successfully identify uri types, preferring access" in { val exampleAccessResponse = DrsResolverResponse(accessUrl = Option(AccessUrl("https://something.com", FakeHashes))) val exampleGoogleResponse = DrsResolverResponse(gsUri = Option("gs://something")) - val exampleMixedResponse = DrsResolverResponse(accessUrl = Option(AccessUrl("https://something.com", FakeHashes)), gsUri = Option("gs://something")) - DrsLocalizerMain.toValidatedUriType(exampleAccessResponse.accessUrl, exampleAccessResponse.gsUri) shouldBe URIType.ACCESS - DrsLocalizerMain.toValidatedUriType(exampleGoogleResponse.accessUrl, exampleGoogleResponse.gsUri) shouldBe URIType.GCS - DrsLocalizerMain.toValidatedUriType(exampleMixedResponse.accessUrl, exampleMixedResponse.gsUri) shouldBe URIType.ACCESS + val exampleMixedResponse = DrsResolverResponse(accessUrl = Option(AccessUrl("https://something.com", FakeHashes)), + gsUri = Option("gs://something") + ) + DrsLocalizerMain.toValidatedUriType(exampleAccessResponse.accessUrl, + exampleAccessResponse.gsUri + ) shouldBe URIType.ACCESS + DrsLocalizerMain.toValidatedUriType(exampleGoogleResponse.accessUrl, + exampleGoogleResponse.gsUri + ) shouldBe URIType.GCS + DrsLocalizerMain.toValidatedUriType(exampleMixedResponse.accessUrl, + exampleMixedResponse.gsUri + ) shouldBe URIType.ACCESS } it should "throw an exception if the DRS Resolver response is invalid" in { @@ -246,48 +311,87 @@ object MockDrsPaths { val fakeDrsUrlWithoutAnyResolution = "drs://foo/bar/no-gcs-path" val fakeGoogleUrls: Map[UnresolvedDrsUrl, ResolvedDrsUrl] = Map( - (UnresolvedDrsUrl("drs://abc/foo-123/google/0", "/path/to/google/local0"), ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri0")), "/path/to/google/local0", URIType.GCS)), - (UnresolvedDrsUrl("drs://abc/foo-123/google/1", "/path/to/google/local1"), ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri1")), "/path/to/google/local1", URIType.GCS)), - (UnresolvedDrsUrl("drs://abc/foo-123/google/2", "/path/to/google/local2"), ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri2")), "/path/to/google/local2", URIType.GCS)), - (UnresolvedDrsUrl("drs://abc/foo-123/google/3", "/path/to/google/local3"), ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri3")), "/path/to/google/local3", URIType.GCS)), - (UnresolvedDrsUrl("drs://abc/foo-123/google/4", "/path/to/google/local4"), ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri4")), "/path/to/google/local4", URIType.GCS)) + (UnresolvedDrsUrl("drs://abc/foo-123/google/0", "/path/to/google/local0"), + ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri0")), "/path/to/google/local0", URIType.GCS) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/google/1", "/path/to/google/local1"), + ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri1")), "/path/to/google/local1", URIType.GCS) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/google/2", "/path/to/google/local2"), + ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri2")), "/path/to/google/local2", URIType.GCS) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/google/3", "/path/to/google/local3"), + ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri3")), "/path/to/google/local3", URIType.GCS) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/google/4", "/path/to/google/local4"), + ResolvedDrsUrl(DrsResolverResponse(gsUri = Option("gs://some/uri4")), "/path/to/google/local4", URIType.GCS) + ) ) val fakeAccessUrls: Map[UnresolvedDrsUrl, ResolvedDrsUrl] = Map( - (UnresolvedDrsUrl("drs://abc/foo-123/access/0", "/path/to/access/local0"), ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/0", FakeHashes))), "/path/to/access/local0", URIType.ACCESS)), - (UnresolvedDrsUrl("drs://abc/foo-123/access/1", "/path/to/access/local1"), ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/1", FakeHashes))), "/path/to/access/local1", URIType.ACCESS)), - (UnresolvedDrsUrl("drs://abc/foo-123/access/2", "/path/to/access/local2"), ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/2", FakeHashes))), "/path/to/access/local2", URIType.ACCESS)), - (UnresolvedDrsUrl("drs://abc/foo-123/access/3", "/path/to/access/local3"), ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/3", FakeHashes))), "/path/to/access/local3", URIType.ACCESS)), - (UnresolvedDrsUrl("drs://abc/foo-123/access/4", "/path/to/access/local4"), ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/4", FakeHashes))), "/path/to/access/local4", URIType.ACCESS)) + (UnresolvedDrsUrl("drs://abc/foo-123/access/0", "/path/to/access/local0"), + ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/0", FakeHashes))), + "/path/to/access/local0", + URIType.ACCESS + ) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/access/1", "/path/to/access/local1"), + ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/1", FakeHashes))), + "/path/to/access/local1", + URIType.ACCESS + ) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/access/2", "/path/to/access/local2"), + ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/2", FakeHashes))), + "/path/to/access/local2", + URIType.ACCESS + ) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/access/3", "/path/to/access/local3"), + ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/3", FakeHashes))), + "/path/to/access/local3", + URIType.ACCESS + ) + ), + (UnresolvedDrsUrl("drs://abc/foo-123/access/4", "/path/to/access/local4"), + ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://abc/foo-123/access/4", FakeHashes))), + "/path/to/access/local4", + URIType.ACCESS + ) + ) ) } - class MockDrsLocalizerMain(toResolveAndDownload: IO[List[UnresolvedDrsUrl]], downloaderFactory: DownloaderFactory, drsCredentials: DrsCredentials, requesterPaysProjectIdOption: Option[String] - ) +) extends DrsLocalizerMain(toResolveAndDownload, + downloaderFactory, + FakeAccessTokenStrategy, + requesterPaysProjectIdOption + ) { - extends DrsLocalizerMain(toResolveAndDownload, downloaderFactory, FakeAccessTokenStrategy, requesterPaysProjectIdOption) { - - override def getDrsPathResolver: IO[DrsLocalizerDrsPathResolver] = { + override def getDrsPathResolver: IO[DrsLocalizerDrsPathResolver] = IO { new MockDrsLocalizerDrsPathResolver(cloud.nio.impl.drs.MockDrsPaths.mockDrsConfig) } - } - override def resolveSingleUrl(resolverObject: DrsLocalizerDrsPathResolver, drsUrlToResolve: UnresolvedDrsUrl): IO[ResolvedDrsUrl] = { + override def resolveSingleUrl(resolverObject: DrsLocalizerDrsPathResolver, + drsUrlToResolve: UnresolvedDrsUrl + ): IO[ResolvedDrsUrl] = IO { if (!fakeAccessUrls.contains(drsUrlToResolve) && !fakeGoogleUrls.contains(drsUrlToResolve)) { throw new RuntimeException("Unexpected URI during testing") } - fakeAccessUrls.getOrElse(drsUrlToResolve, fakeGoogleUrls.getOrElse(drsUrlToResolve, ResolvedDrsUrl(DrsResolverResponse(),"/12/3/", URIType.UNKNOWN))) + fakeAccessUrls.getOrElse( + drsUrlToResolve, + fakeGoogleUrls.getOrElse(drsUrlToResolve, ResolvedDrsUrl(DrsResolverResponse(), "/12/3/", URIType.UNKNOWN)) + ) } - } } -class MockDrsLocalizerDrsPathResolver(drsConfig: DrsConfig) extends - DrsLocalizerDrsPathResolver(drsConfig, FakeAccessTokenStrategy) { +class MockDrsLocalizerDrsPathResolver(drsConfig: DrsConfig) + extends DrsLocalizerDrsPathResolver(drsConfig, FakeAccessTokenStrategy) { override def resolveDrs(drsPath: String, fields: NonEmptyList[DrsResolverField.Value]): IO[DrsResolverResponse] = { @@ -298,17 +402,15 @@ class MockDrsLocalizerDrsPathResolver(drsConfig: DrsConfig) extends IO.pure(drsPath) map { case MockDrsPaths.fakeDrsUrlWithGcsResolutionOnly => - drsResolverResponse.copy( - gsUri = Option("gs://abc/foo-123/abc123")) + drsResolverResponse.copy(gsUri = Option("gs://abc/foo-123/abc123")) case MockDrsPaths.fakeDrsUrlWithoutAnyResolution => drsResolverResponse case MockDrsPaths.fakeDrsUrlWithAccessUrlResolutionOnly => - drsResolverResponse.copy( - accessUrl = Option(AccessUrl(url = "http://abc/def/ghi.bam", headers = None))) + drsResolverResponse.copy(accessUrl = Option(AccessUrl(url = "http://abc/def/ghi.bam", headers = None))) case MockDrsPaths.fakeDrsUrlWithAccessUrlAndGcsResolution => - drsResolverResponse.copy( - accessUrl = Option(AccessUrl(url = "http://abc/def/ghi.bam", headers = None)), - gsUri = Option("gs://some/uri")) + drsResolverResponse.copy(accessUrl = Option(AccessUrl(url = "http://abc/def/ghi.bam", headers = None)), + gsUri = Option("gs://some/uri") + ) case e => throw new RuntimeException(s"Unexpected exception in DRS localization test code: $e") } } diff --git a/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala b/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala index 0e0714febb8..4255b407b1a 100644 --- a/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala +++ b/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/BulkAccessUrlDownloaderSpec.scala @@ -12,10 +12,19 @@ import org.scalatest.matchers.should.Matchers import java.nio.file.Path class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val ex1 = ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://my.fake/url123", None))), "path/to/local/download/dest", URIType.ACCESS) - val ex2 = ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://my.fake/url1234", None))), "path/to/local/download/dest2", URIType.ACCESS) - val ex3 = ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://my.fake/url1235", None))), "path/to/local/download/dest3", URIType.ACCESS) - val emptyList : List[ResolvedDrsUrl] = List() + val ex1 = ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://my.fake/url123", None))), + "path/to/local/download/dest", + URIType.ACCESS + ) + val ex2 = ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://my.fake/url1234", None))), + "path/to/local/download/dest2", + URIType.ACCESS + ) + val ex3 = ResolvedDrsUrl(DrsResolverResponse(accessUrl = Option(AccessUrl("https://my.fake/url1235", None))), + "path/to/local/download/dest3", + URIType.ACCESS + ) + val emptyList: List[ResolvedDrsUrl] = List() val oneElement: List[ResolvedDrsUrl] = List(ex1) val threeElements: List[ResolvedDrsUrl] = List(ex1, ex2, ex3) @@ -35,7 +44,9 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w val filepath: IO[Path] = downloader.generateJsonManifest(threeElements) val source = scala.io.Source.fromFile(filepath.unsafeRunSync().toString) - val lines = try source.mkString finally source.close() + val lines = + try source.mkString + finally source.close() lines shouldBe expected } @@ -44,7 +55,9 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w val downloader = BulkAccessUrlDownloader(emptyList) val filepath: IO[Path] = downloader.generateJsonManifest(emptyList) val source = scala.io.Source.fromFile(filepath.unsafeRunSync().toString) - val lines = try source.mkString finally source.close() + val lines = + try source.mkString + finally source.close() lines shouldBe expected } @@ -58,7 +71,9 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w val downloader = BulkAccessUrlDownloader(oneElement) val filepath: IO[Path] = downloader.generateJsonManifest(oneElement) val source = scala.io.Source.fromFile(filepath.unsafeRunSync().toString) - val lines = try source.mkString finally source.close() + val lines = + try source.mkString + finally source.close() lines shouldBe expected } @@ -82,15 +97,30 @@ class BulkAccessUrlDownloaderSpec extends AnyFlatSpec with CromwellTimeoutSpec w // Unrecognized because of non-zero exit code without an HTTP status. (1, " foobar ", UnrecognizedRetryableDownloadFailure(ExitCode(1))), // Unrecognized because of zero exit status with stderr that does not look like a checksum failure. - (0, """ERROR:getm.cli possibly some words "status_code": 503 words""", UnrecognizedRetryableDownloadFailure(ExitCode(0))), + (0, + """ERROR:getm.cli possibly some words "status_code": 503 words""", + UnrecognizedRetryableDownloadFailure(ExitCode(0)) + ), // Recognized because of non-zero exit status and an HTTP status. - (1, """ERROR:getm.cli possibly some words "status_code": 503 words""", RecognizedRetryableDownloadFailure(ExitCode(1))), + (1, + """ERROR:getm.cli possibly some words "status_code": 503 words""", + RecognizedRetryableDownloadFailure(ExitCode(1)) + ), // Recognized because of non-zero exit status and an HTTP status. - (1, """ERROR:getm.cli possibly some words "status_code": 408 more words""", RecognizedRetryableDownloadFailure(ExitCode(1))), + (1, + """ERROR:getm.cli possibly some words "status_code": 408 more words""", + RecognizedRetryableDownloadFailure(ExitCode(1)) + ), // Recognized and non-retryable because of non-zero exit status and 404 HTTP status. - (1, """ERROR:getm.cli possibly some words "status_code": 404 even more words""", FatalDownloadFailure(ExitCode(1))), + (1, + """ERROR:getm.cli possibly some words "status_code": 404 even more words""", + FatalDownloadFailure(ExitCode(1)) + ), // Unrecognized because of zero exit status and 404 HTTP status. - (0, """ERROR:getm.cli possibly some words "status_code": 404 even more words""", UnrecognizedRetryableDownloadFailure(ExitCode(0))), + (0, + """ERROR:getm.cli possibly some words "status_code": 404 even more words""", + UnrecognizedRetryableDownloadFailure(ExitCode(0)) + ) ) val bulkDownloader = BulkAccessUrlDownloader(null) diff --git a/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/GetmChecksumSpec.scala b/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/GetmChecksumSpec.scala index 69c063ff616..a8ac76fa6c0 100644 --- a/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/GetmChecksumSpec.scala +++ b/cromwell-drs-localizer/src/test/scala/drs/localizer/downloaders/GetmChecksumSpec.scala @@ -17,9 +17,15 @@ class GetmChecksumSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher (Option(Map("something weird" -> "012345", "md5" -> "abcdefg")), "https://whatever", Md5("abcdefg")), (Option(Map("something weird" -> "abcdefg", "crc32c" -> "012345")), "https://whatever", Crc32c("012345")), (Option(Map("etag" -> "abcdefg", "crc32c" -> "012345")), "https://whatever", Crc32c("012345")), - (Option(Map("etag" -> "abcdefg", "something weird" -> "012345")), "https://whatever", Unsupported("etag, something weird")), - (Option(Map("etag" -> "abcdefg", "something weird" -> "012345")), "https://whatever.s3.amazonaws.com/foo", AwsEtag("abcdefg")), - (None, "https://whatever.s3.amazonaws.com/foo", Null), + (Option(Map("etag" -> "abcdefg", "something weird" -> "012345")), + "https://whatever", + Unsupported("etag, something weird") + ), + (Option(Map("etag" -> "abcdefg", "something weird" -> "012345")), + "https://whatever.s3.amazonaws.com/foo", + AwsEtag("abcdefg") + ), + (None, "https://whatever.s3.amazonaws.com/foo", Null) ) forAll(results) { (hashes, url, expected) => @@ -31,14 +37,23 @@ class GetmChecksumSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val results = Table( ("description", "algorithm", "expected"), ("md5 hex", Md5("abcdef"), "--checksum-algorithm 'md5' --checksum abcdef".validNel), - ("md5 base64", Md5("cR84lXY1y17c3q7/7riLEA=="), "Invalid checksum value, expected hex but got: cR84lXY1y17c3q7/7riLEA==".invalidNel), - ("md5 gibberish", Md5("what is this???"), "Invalid checksum value, expected hex but got: what is this???".invalidNel), + ("md5 base64", + Md5("cR84lXY1y17c3q7/7riLEA=="), + "Invalid checksum value, expected hex but got: cR84lXY1y17c3q7/7riLEA==".invalidNel + ), + ("md5 gibberish", + Md5("what is this???"), + "Invalid checksum value, expected hex but got: what is this???".invalidNel + ), ("crc32c", Crc32c("012345"), "--checksum-algorithm 'gs_crc32c' --checksum ASNF".validNel), ("crc32c gibberish", Crc32c("????"), "Invalid checksum value, expected hex but got: ????".invalidNel), ("AWS ETag", AwsEtag("012345"), "--checksum-algorithm 's3_etag' --checksum 012345".validNel), // Escape checksum values constructed from unvalidated data returned by DRS servers. - ("Unsupported", Unsupported("Robert'); DROP TABLE Students;\n --\\"), raw"--checksum-algorithm 'null' --checksum Robert\'\)\;\ DROP\ TABLE\ Students\;\ --\\".validNel), - ("Null", Null, "--checksum-algorithm 'null' --checksum null".validNel), + ("Unsupported", + Unsupported("Robert'); DROP TABLE Students;\n --\\"), + raw"--checksum-algorithm 'null' --checksum Robert\'\)\;\ DROP\ TABLE\ Students\;\ --\\".validNel + ), + ("Null", Null, "--checksum-algorithm 'null' --checksum null".validNel) ) forAll(results) { (description, algorithm, expected) => @@ -55,7 +70,7 @@ class GetmChecksumSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher (" ", "Invalid checksum value, expected hex but got: ".invalidNel), ("myfavoritestring", "Invalid checksum value, expected hex but got: myfavoritestring".invalidNel), (" AbC123 ", "AbC123".validNel), - ("456", "456".validNel), + ("456", "456".validNel) ) forAll(results) { (testString, expected) => diff --git a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala index b2a088e681b..18df2ff5244 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/CromwellClient.scala @@ -20,10 +20,10 @@ import scala.concurrent.ExecutionContext class CromwellClient(val cromwellUrl: URL, val apiVersion: String, - val defaultCredentials: Option[HttpCredentials]=None) - (implicit actorSystem: ActorSystem, materializer: ActorMaterializer) { + val defaultCredentials: Option[HttpCredentials] = None +)(implicit actorSystem: ActorSystem, materializer: ActorMaterializer) { - lazy val defaultAuthorization: Option[Authorization] = defaultCredentials.map { Authorization(_) } + lazy val defaultAuthorization: Option[Authorization] = defaultCredentials.map(Authorization(_)) lazy val defaultHeaders: List[HttpHeader] = defaultAuthorization.toList lazy val engineEndpoint = s"$cromwellUrl/engine/$apiVersion" @@ -33,7 +33,7 @@ class CromwellClient(val cromwellUrl: URL, lazy val submitEndpoint = workflowsEndpoint lazy val batchSubmitEndpoint = s"$submitEndpoint/batch" - def describeEndpoint= s"$womtoolEndpoint/describe" + def describeEndpoint = s"$womtoolEndpoint/describe" def queryEndpoint(args: List[(String, String)]): Uri = { val base = s"$workflowsEndpoint/query" @@ -49,11 +49,20 @@ class CromwellClient(val cromwellUrl: URL, def abortEndpoint(workflowId: WorkflowId): Uri = workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "abort") def statusEndpoint(workflowId: WorkflowId): Uri = workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "status") - def metadataEndpoint(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None): Uri = workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "metadata", args) - def outputsEndpoint(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None): Uri = workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "outputs", args) + def metadataEndpoint(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None): Uri = + workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "metadata", args) + def outputsEndpoint(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None): Uri = + workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "outputs", args) def labelsEndpoint(workflowId: WorkflowId): Uri = workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "labels") - def logsEndpoint(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None): Uri = workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "logs", args) - def diffEndpoint(workflowA: WorkflowId, callA: String, indexA: ShardIndex, workflowB: WorkflowId, callB: String, indexB: ShardIndex): String = { + def logsEndpoint(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None): Uri = + workflowSpecificGetEndpoint(workflowsEndpoint, workflowId, "logs", args) + def diffEndpoint(workflowA: WorkflowId, + callA: String, + indexA: ShardIndex, + workflowB: WorkflowId, + callB: String, + indexB: ShardIndex + ): String = { def shardParam(aOrB: String, s: ShardIndex) = s.index.map(i => s"&index$aOrB=$i.toString").getOrElse("") s"$workflowsEndpoint/callcaching/diff?workflowA=$workflowA&callA=$callA&workflowB=$workflowB&callB=$callB${shardParam("A", indexA)}${shardParam("B", indexB)}" } @@ -69,40 +78,48 @@ class CromwellClient(val cromwellUrl: URL, import model.WorkflowDescriptionJsonSupport._ import model.CromwellQueryResultJsonSupport._ - def submit(workflow: WorkflowSubmission) - (implicit ec: ExecutionContext): FailureResponseOrT[SubmittedWorkflow] = { + def submit(workflow: WorkflowSubmission)(implicit ec: ExecutionContext): FailureResponseOrT[SubmittedWorkflow] = { val requestEntity = requestEntityForSubmit(workflow) - makeRequest[CromwellStatus](HttpRequest(HttpMethods.POST, submitEndpoint, List.empty[HttpHeader], requestEntity)) map { status => + makeRequest[CromwellStatus]( + HttpRequest(HttpMethods.POST, submitEndpoint, List.empty[HttpHeader], requestEntity) + ) map { status => SubmittedWorkflow(WorkflowId.fromString(status.id), cromwellUrl, workflow) } } - def describe(workflow: WorkflowDescribeRequest) - (implicit ec: ExecutionContext): FailureResponseOrT[WaasDescription] = { + def describe( + workflow: WorkflowDescribeRequest + )(implicit ec: ExecutionContext): FailureResponseOrT[WaasDescription] = { val requestEntity = requestEntityForDescribe(workflow) makeRequest[WaasDescription](HttpRequest(HttpMethods.POST, describeEndpoint, List.empty[HttpHeader], requestEntity)) } - def submitBatch(workflow: WorkflowBatchSubmission) - (implicit ec: ExecutionContext): FailureResponseOrT[List[SubmittedWorkflow]] = { + def submitBatch( + workflow: WorkflowBatchSubmission + )(implicit ec: ExecutionContext): FailureResponseOrT[List[SubmittedWorkflow]] = { import DefaultJsonProtocol._ val requestEntity = requestEntityForSubmit(workflow) // Make a set of submissions that represent the batch (so we can zip with the results later): - val submissionSet = workflow.inputsBatch.map(inputs => WorkflowSingleSubmission( - workflowSource = workflow.workflowSource, - workflowUrl = workflow.workflowUrl, - workflowRoot = workflow.workflowRoot, - workflowType = workflow.workflowType, - workflowTypeVersion = workflow.workflowTypeVersion, - inputsJson = Option(inputs), - options = workflow.options, - labels = workflow.labels, - zippedImports = workflow.zippedImports)) - - makeRequest[List[CromwellStatus]](HttpRequest(HttpMethods.POST, batchSubmitEndpoint, List.empty[HttpHeader], requestEntity)) map { statuses => + val submissionSet = workflow.inputsBatch.map(inputs => + WorkflowSingleSubmission( + workflowSource = workflow.workflowSource, + workflowUrl = workflow.workflowUrl, + workflowRoot = workflow.workflowRoot, + workflowType = workflow.workflowType, + workflowTypeVersion = workflow.workflowTypeVersion, + inputsJson = Option(inputs), + options = workflow.options, + labels = workflow.labels, + zippedImports = workflow.zippedImports + ) + ) + + makeRequest[List[CromwellStatus]]( + HttpRequest(HttpMethods.POST, batchSubmitEndpoint, List.empty[HttpHeader], requestEntity) + ) map { statuses => val zipped = submissionSet.zip(statuses) zipped map { case (submission, status) => SubmittedWorkflow(WorkflowId.fromString(status.id), cromwellUrl, submission) @@ -110,48 +127,42 @@ class CromwellClient(val cromwellUrl: URL, } } - def abort(workflowId: WorkflowId)(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowStatus] = { + def abort(workflowId: WorkflowId)(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowStatus] = simpleRequest[CromwellStatus](uri = abortEndpoint(workflowId), method = HttpMethods.POST) map WorkflowStatus.apply - } - def status(workflowId: WorkflowId)(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowStatus] = { + def status(workflowId: WorkflowId)(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowStatus] = simpleRequest[CromwellStatus](statusEndpoint(workflowId)) map WorkflowStatus.apply - } def metadata(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None, headers: List[HttpHeader] = defaultHeaders - )(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowMetadata] = { - simpleRequest[String](metadataEndpoint(workflowId, args), headers=headers) map WorkflowMetadata - } + )(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowMetadata] = + simpleRequest[String](metadataEndpoint(workflowId, args), headers = headers) map WorkflowMetadata - def outputs(workflowId: WorkflowId, - args: Option[Map[String, List[String]]] = None)(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowOutputs] = { + def outputs(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None)(implicit + ec: ExecutionContext + ): FailureResponseOrT[WorkflowOutputs] = simpleRequest[WorkflowOutputs](outputsEndpoint(workflowId, args)) - } - def labels(workflowId: WorkflowId, - headers: List[HttpHeader] = defaultHeaders) - (implicit ec: ExecutionContext): FailureResponseOrT[WorkflowLabels] = { + def labels(workflowId: WorkflowId, headers: List[HttpHeader] = defaultHeaders)(implicit + ec: ExecutionContext + ): FailureResponseOrT[WorkflowLabels] = simpleRequest[WorkflowLabels](labelsEndpoint(workflowId), headers = headers) - } - def addLabels(workflowId: WorkflowId, - newLabels: List[Label], - headers: List[HttpHeader] = defaultHeaders) - (implicit ec: ExecutionContext): FailureResponseOrT[WorkflowLabels] = { + def addLabels(workflowId: WorkflowId, newLabels: List[Label], headers: List[HttpHeader] = defaultHeaders)(implicit + ec: ExecutionContext + ): FailureResponseOrT[WorkflowLabels] = { val requestEntity = requestEntityForAddLabels(newLabels) makeRequest[WorkflowLabels](HttpRequest(HttpMethods.PATCH, labelsEndpoint(workflowId), headers, requestEntity)) } - def logs(workflowId: WorkflowId, - args: Option[Map[String, List[String]]] = None)(implicit ec: ExecutionContext): FailureResponseOrT[WorkflowMetadata] = { + def logs(workflowId: WorkflowId, args: Option[Map[String, List[String]]] = None)(implicit + ec: ExecutionContext + ): FailureResponseOrT[WorkflowMetadata] = simpleRequest[String](logsEndpoint(workflowId, args)) map WorkflowMetadata - } - def query(workflowId: WorkflowId)(implicit ec: ExecutionContext): FailureResponseOrT[CromwellQueryResults] = { + def query(workflowId: WorkflowId)(implicit ec: ExecutionContext): FailureResponseOrT[CromwellQueryResults] = simpleRequest[CromwellQueryResults](queryEndpoint(List(("id", workflowId.id.toString)))) - } def callCacheDiff(workflowA: WorkflowId, callA: String, @@ -159,27 +170,26 @@ class CromwellClient(val cromwellUrl: URL, workflowB: WorkflowId, callB: String, shardIndexB: ShardIndex - )(implicit ec: ExecutionContext): FailureResponseOrT[CallCacheDiff] = { + )(implicit ec: ExecutionContext): FailureResponseOrT[CallCacheDiff] = simpleRequest[CallCacheDiff](diffEndpoint(workflowA, callA, shardIndexA, workflowB, callB, shardIndexB)) - } - def backends(implicit ec: ExecutionContext): FailureResponseOrT[CromwellBackends] = { + def backends(implicit ec: ExecutionContext): FailureResponseOrT[CromwellBackends] = simpleRequest[CromwellBackends](backendsEndpoint) - } - def version(implicit ec: ExecutionContext): FailureResponseOrT[CromwellVersion] = { + def version(implicit ec: ExecutionContext): FailureResponseOrT[CromwellVersion] = simpleRequest[CromwellVersion](versionEndpoint) - } - private [api] def executeRequest(request: HttpRequest, headers: List[HttpHeader]) = Http().singleRequest(request.withHeaders(headers)) + private[api] def executeRequest(request: HttpRequest, headers: List[HttpHeader]) = + Http().singleRequest(request.withHeaders(headers)) /** * * @tparam A The type of response expected. Must be supported by an implicit unmarshaller from ResponseEntity. */ - private def makeRequest[A](request: HttpRequest, headers: List[HttpHeader] = defaultHeaders) - (implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): - FailureResponseOrT[A] = { + private def makeRequest[A](request: HttpRequest, headers: List[HttpHeader] = defaultHeaders)(implicit + um: Unmarshaller[ResponseEntity, A], + ec: ExecutionContext + ): FailureResponseOrT[A] = { implicit def cs = IO.contextShift(ec) for { response <- executeRequest(request, headers).asFailureResponseOrT @@ -191,11 +201,9 @@ class CromwellClient(val cromwellUrl: URL, private def simpleRequest[A](uri: Uri, method: HttpMethod = HttpMethods.GET, - headers: List[HttpHeader] = defaultHeaders) - (implicit um: Unmarshaller[ResponseEntity, A], - ec: ExecutionContext): FailureResponseOrT[A] = { + headers: List[HttpHeader] = defaultHeaders + )(implicit um: Unmarshaller[ResponseEntity, A], ec: ExecutionContext): FailureResponseOrT[A] = makeRequest[A](HttpRequest(uri = uri, method = method), headers) - } private val decoders = Map( HttpEncodings.gzip -> Gzip, @@ -203,15 +211,14 @@ class CromwellClient(val cromwellUrl: URL, HttpEncodings.identity -> NoCoding ) - private def decodeResponse(response: HttpResponse): IO[HttpResponse] = { + private def decodeResponse(response: HttpResponse): IO[HttpResponse] = decoders.get(response.encoding) map { decoder => IO(decoder.decodeMessage(response)) } getOrElse IO.raiseError(UnsuccessfulRequestException(s"No decoder for ${response.encoding}", response)) - } } object CromwellClient { - final implicit class EnhancedHttpResponse(val response: HttpResponse) extends AnyVal { + implicit final class EnhancedHttpResponse(val response: HttpResponse) extends AnyVal { def toEntity: IO[Unmarshal[ResponseEntity]] = response match { case HttpResponse(_: StatusCodes.Success, _, entity, _) => IO(Unmarshal(entity)) @@ -233,18 +240,17 @@ object CromwellClient { "workflowInputs" -> workflowSubmission.inputsJson, "workflowOptions" -> workflowSubmission.options, "labels" -> workflowSubmission.labels.map(_.toJson.toString) - ) collect { - case (name, Some(source: String)) => - Multipart.FormData.BodyPart(name, HttpEntity(MediaTypes.`application/json`, ByteString(source))) + ) collect { case (name, Some(source: String)) => + Multipart.FormData.BodyPart(name, HttpEntity(MediaTypes.`application/json`, ByteString(source))) } val zipBodyParts = Map( "workflowDependencies" -> workflowSubmission.zippedImports - ) collect { - case (name, Some(file)) => Multipart.FormData.BodyPart.fromPath(name, MediaTypes.`application/zip`, file.path) + ) collect { case (name, Some(file)) => + Multipart.FormData.BodyPart.fromPath(name, MediaTypes.`application/zip`, file.path) } - val multipartFormData = Multipart.FormData((sourceBodyParts ++ zipBodyParts).toSeq : _*) + val multipartFormData = Multipart.FormData((sourceBodyParts ++ zipBodyParts).toSeq: _*) multipartFormData.toEntity() } @@ -256,12 +262,11 @@ object CromwellClient { "workflowType" -> describeRequest.workflowType, "workflowTypeVersion" -> describeRequest.workflowTypeVersion, "workflowInputs" -> describeRequest.inputsJson - ) collect { - case (name, Some(source: String)) => - Multipart.FormData.BodyPart(name, HttpEntity(MediaTypes.`application/json`, ByteString(source))) + ) collect { case (name, Some(source: String)) => + Multipart.FormData.BodyPart(name, HttpEntity(MediaTypes.`application/json`, ByteString(source))) } - val multipartFormData = Multipart.FormData(sourceBodyParts.toSeq : _*) + val multipartFormData = Multipart.FormData(sourceBodyParts.toSeq: _*) multipartFormData.toEntity() } @@ -273,12 +278,16 @@ object CromwellClient { /** * @param args an optional map of HTTP arguments which will be added to the URL */ - private [api] def workflowSpecificGetEndpoint(submitEndpoint: String, workflowId: WorkflowId, endpoint: String, args: Option[Map[String, List[String]]] = None) = { + private[api] def workflowSpecificGetEndpoint(submitEndpoint: String, + workflowId: WorkflowId, + endpoint: String, + args: Option[Map[String, List[String]]] = None + ) = { val url = s"$submitEndpoint/$workflowId/$endpoint" val queryBuilder = Uri.Query.newBuilder - args.getOrElse(Map.empty).foreach({ - case (key, l) => l.foreach(v => queryBuilder.+=(key -> v)) - }) + args.getOrElse(Map.empty).foreach { case (key, l) => + l.foreach(v => queryBuilder.+=(key -> v)) + } val queryResult = queryBuilder.result() Uri(url).withQuery(queryResult) } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala index fa4e7fb9187..64cafc2b611 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CallCacheDiff.scala @@ -4,9 +4,17 @@ import ShardIndexFormatter._ import WorkflowIdJsonFormatter._ import spray.json.DefaultJsonProtocol -case class CallCacheDiffCallDescription(executionStatus: String, allowResultReuse: Boolean, callFqn: String, jobIndex: ShardIndex, workflowId: WorkflowId) +case class CallCacheDiffCallDescription(executionStatus: String, + allowResultReuse: Boolean, + callFqn: String, + jobIndex: ShardIndex, + workflowId: WorkflowId +) case class HashDifference(hashKey: String, callA: Option[String], callB: Option[String]) -case class CallCacheDiff(callA: CallCacheDiffCallDescription, callB: CallCacheDiffCallDescription, hashDifferential: List[HashDifference]) +case class CallCacheDiff(callA: CallCacheDiffCallDescription, + callB: CallCacheDiffCallDescription, + hashDifferential: List[HashDifference] +) object CallCacheDiffJsonSupport extends DefaultJsonProtocol { implicit val CallCacheDiffCallDescriptionFormat = jsonFormat5(CallCacheDiffCallDescription) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala index 4a54cb58469..658ea72489f 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/CromwellQueryResult.scala @@ -7,7 +7,13 @@ import cromwell.api.model.WorkflowStatusJsonFormatter._ case class CromwellQueryResults(results: Seq[CromwellQueryResult]) -case class CromwellQueryResult(name: Option[String], id: WorkflowId, status: WorkflowStatus, end: Option[OffsetDateTime], start: Option[OffsetDateTime], metadataArchiveStatus: String) +case class CromwellQueryResult(name: Option[String], + id: WorkflowId, + status: WorkflowStatus, + end: Option[OffsetDateTime], + start: Option[OffsetDateTime], + metadataArchiveStatus: String +) object CromwellQueryResultJsonSupport extends DefaultJsonProtocol { implicit val CromwellQueryResultJsonFormat = jsonFormat6(CromwellQueryResult) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala index fd9d88d2177..0d111626f84 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/Label.scala @@ -5,7 +5,7 @@ import scala.language.postfixOps object LabelsJsonFormatter extends DefaultJsonProtocol { implicit object LabelJsonFormat extends RootJsonFormat[List[Label]] { - def write(l: List[Label]) = JsObject(l map { label => label.key -> JsString(label.value)} :_* ) + def write(l: List[Label]) = JsObject(l map { label => label.key -> JsString(label.value) }: _*) def read(value: JsValue) = value.asJsObject.fields map { case (k, JsString(v)) => Label(k, v) case other => throw new UnsupportedOperationException(s"Cannot deserialize $other to a Label") diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/TimeUtil.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/TimeUtil.scala index 1688da92ff7..0db0f1c8f66 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/TimeUtil.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/TimeUtil.scala @@ -4,6 +4,7 @@ import java.time.format.DateTimeFormatter import java.time.{OffsetDateTime, ZoneOffset} object TimeUtil { + /** * Instead of "one of" the valid ISO-8601 formats, standardize on this one: * https://github.com/openjdk/jdk/blob/jdk8-b120/jdk/src/share/classes/java/time/OffsetDateTime.java#L1886 @@ -11,12 +12,15 @@ object TimeUtil { private val Iso8601MillisecondsFormat = DateTimeFormatter.ofPattern("uuuu-MM-dd'T'HH:mm:ss.SSSXXXXX") implicit class EnhancedOffsetDateTime(val offsetDateTime: OffsetDateTime) extends AnyVal { + /** * Discards the original timezone and shifts the time to UTC, then returns the ISO-8601 formatted string with * exactly three digits of milliseconds. */ - def toUtcMilliString: String = Option(offsetDateTime).map( - _.atZoneSameInstant(ZoneOffset.UTC).format(Iso8601MillisecondsFormat) - ).orNull + def toUtcMilliString: String = Option(offsetDateTime) + .map( + _.atZoneSameInstant(ZoneOffset.UTC).format(Iso8601MillisecondsFormat) + ) + .orNull } } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WaasDescription.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WaasDescription.scala index e1f679a7b35..f83ca95fb9a 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WaasDescription.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WaasDescription.scala @@ -22,17 +22,20 @@ final case class WaasDescription(valid: Boolean, importedDescriptorTypes: List[WaasWorkflowDescriptorType], meta: JsObject, parameterMeta: JsObject, - isRunnableWorkflow: Boolean) + isRunnableWorkflow: Boolean +) final case class WaasDescriptionInputDefinition(name: String, valueType: WaasDescriptionWomType, optional: Option[Boolean], default: Option[JsValue], - typeDisplayName: String) + typeDisplayName: String +) final case class WaasDescriptionOutputDefinition(name: String, valueType: WaasDescriptionWomType, - typeDisplayName: String) + typeDisplayName: String +) final case class WaasDescriptionWomType(typeName: String) final case class WaasWorkflowDescriptorType(descriptorType: Option[String], descriptorTypeVersion: Option[String]) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowDescribeRequest.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowDescribeRequest.scala index 9ab7ea45ec4..1a46e2117f9 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowDescribeRequest.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowDescribeRequest.scala @@ -4,4 +4,5 @@ final case class WorkflowDescribeRequest(workflowSource: Option[String], workflowUrl: Option[String], workflowType: Option[String], workflowTypeVersion: Option[String], - inputsJson: Option[String]) + inputsJson: Option[String] +) diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala index f52495136c3..20c3a558790 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowId.scala @@ -30,4 +30,3 @@ object WorkflowIdJsonFormatter extends DefaultJsonProtocol { } } } - diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala index 6da1282d2da..70f9aa1efeb 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowStatus.scala @@ -25,7 +25,7 @@ case object Running extends NonTerminalStatus case object Aborting extends NonTerminalStatus object WorkflowStatus { - def apply(status: String): WorkflowStatus = { + def apply(status: String): WorkflowStatus = status match { case "Submitted" => Submitted case "Running" => Running @@ -35,7 +35,6 @@ object WorkflowStatus { case "Succeeded" => Succeeded case bad => throw new IllegalArgumentException(s"No such status: $bad") } - } def apply(workflowStatus: CromwellStatus): WorkflowStatus = apply(workflowStatus.status) } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala index 5f59368de13..e22d3e93e2e 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/WorkflowSubmission.scala @@ -22,7 +22,8 @@ final case class WorkflowSingleSubmission(workflowSource: Option[String], inputsJson: Option[String], options: Option[String], labels: Option[List[Label]], - zippedImports: Option[File]) extends WorkflowSubmission + zippedImports: Option[File] +) extends WorkflowSubmission final case class WorkflowBatchSubmission(workflowSource: Option[String], workflowUrl: Option[String], @@ -32,7 +33,8 @@ final case class WorkflowBatchSubmission(workflowSource: Option[String], inputsBatch: List[String], options: Option[String], labels: Option[List[Label]], - zippedImports: Option[File]) extends WorkflowSubmission { + zippedImports: Option[File] +) extends WorkflowSubmission { override val inputsJson: Option[String] = Option(inputsBatch.mkString(start = "[", sep = ",", end = "]")) } diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala index 9067ce7157a..ccd75efac1f 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/package.scala @@ -45,7 +45,7 @@ package object model { } implicit class EnhancedFailureResponseOrHttpResponseT(val responseIoT: FailureResponseOrT[HttpResponse]) - extends AnyVal { + extends AnyVal { def asHttpResponse: Future[HttpResponse] = { val io = responseIoT.value map { case Left(response) => response @@ -55,13 +55,14 @@ package object model { } } - implicit class EnhancedFailureResponseOrT[SuccessType](val responseIoT: FailureResponseOrT[SuccessType]) extends AnyVal { - final def timeout(duration: FiniteDuration) - (implicit timer: Timer[IO], cs: ContextShift[IO]): FailureResponseOrT[SuccessType] = { + implicit class EnhancedFailureResponseOrT[SuccessType](val responseIoT: FailureResponseOrT[SuccessType]) + extends AnyVal { + final def timeout( + duration: FiniteDuration + )(implicit timer: Timer[IO], cs: ContextShift[IO]): FailureResponseOrT[SuccessType] = EitherT(responseIoT.value.timeout(duration)) - } - def asIo(implicit materializer: ActorMaterializer, executionContext: ExecutionContext): IO[SuccessType] = { + def asIo(implicit materializer: ActorMaterializer, executionContext: ExecutionContext): IO[SuccessType] = responseIoT.value flatMap { case Left(response) => implicit def cs = IO.contextShift(executionContext) @@ -72,15 +73,13 @@ package object model { }) case Right(a) => IO.pure(a) } - } /** * Transforms the IO error from one type to another. */ def mapErrorWith(mapper: Throwable => IO[Nothing]): FailureResponseOrT[SuccessType] = { - def handleErrorIo[A](ioIn: IO[A]): IO[A] = { + def handleErrorIo[A](ioIn: IO[A]): IO[A] = ioIn handleErrorWith mapper - } responseIoT.mapK(FunctionK.lift(handleErrorIo)) } diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala index 1bfecebc0d5..ab653523801 100644 --- a/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala +++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala @@ -11,7 +11,6 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers - class CromwellClientSpec extends AsyncFlatSpec with BeforeAndAfterAll with Matchers with TableDrivenPropertyChecks { behavior of "CromwellClient" @@ -40,86 +39,81 @@ class CromwellClientSpec extends AsyncFlatSpec with BeforeAndAfterAll with Match private val okRequestEntityTests = Table( ("description", "workflowSubmission", "expectedJsons", "expectedFiles"), - ("submit a wdl", - WorkflowSingleSubmission(Option("wdl"), None, None, None, None, None, None, None, None), - Map("workflowSource" -> "wdl"), - Map() + WorkflowSingleSubmission(Option("wdl"), None, None, None, None, None, None, None, None), + Map("workflowSource" -> "wdl"), + Map() ), - ("batch submit a wdl", - WorkflowBatchSubmission(Option("wdl"), None, None, None, None, List(), None, None, None), - Map("workflowSource" -> "wdl", "workflowInputs" -> "[]"), - Map() + WorkflowBatchSubmission(Option("wdl"), None, None, None, None, List(), None, None, None), + Map("workflowSource" -> "wdl", "workflowInputs" -> "[]"), + Map() ), - ("submit a wdl with data", - WorkflowSingleSubmission( - Option("wdl"), - None, - None, - Option("wfType"), - Option("wfTypeVersion"), - Option("inputsJson"), - Option("optionsJson"), - Option(List(Label("labelKey", "labelValue"))), - Option(tempFile) - ), - Map( - "workflowSource" -> "wdl", - "workflowType" -> "wfType", - "workflowTypeVersion" -> "wfTypeVersion", - "workflowInputs" -> "inputsJson", - "workflowOptions" -> "optionsJson", - "labels" -> """{"labelKey":"labelValue"}""" - ), - Map("workflowDependencies" -> tempFile) + WorkflowSingleSubmission( + Option("wdl"), + None, + None, + Option("wfType"), + Option("wfTypeVersion"), + Option("inputsJson"), + Option("optionsJson"), + Option(List(Label("labelKey", "labelValue"))), + Option(tempFile) + ), + Map( + "workflowSource" -> "wdl", + "workflowType" -> "wfType", + "workflowTypeVersion" -> "wfTypeVersion", + "workflowInputs" -> "inputsJson", + "workflowOptions" -> "optionsJson", + "labels" -> """{"labelKey":"labelValue"}""" + ), + Map("workflowDependencies" -> tempFile) ), - ("submit a wdl using workflow url", - WorkflowSingleSubmission( - None, - Option("https://link-to-url"), - None, - Option("wfType"), - Option("wfTypeVersion"), - Option("inputsJson"), - Option("optionsJson"), - Option(List(Label("labelKey", "labelValue"))), - Option(tempFile) - ), - Map( - "workflowUrl" -> "https://link-to-url", - "workflowType" -> "wfType", - "workflowTypeVersion" -> "wfTypeVersion", - "workflowInputs" -> "inputsJson", - "workflowOptions" -> "optionsJson", - "labels" -> """{"labelKey":"labelValue"}""" - ), - Map("workflowDependencies" -> tempFile) + WorkflowSingleSubmission( + None, + Option("https://link-to-url"), + None, + Option("wfType"), + Option("wfTypeVersion"), + Option("inputsJson"), + Option("optionsJson"), + Option(List(Label("labelKey", "labelValue"))), + Option(tempFile) + ), + Map( + "workflowUrl" -> "https://link-to-url", + "workflowType" -> "wfType", + "workflowTypeVersion" -> "wfTypeVersion", + "workflowInputs" -> "inputsJson", + "workflowOptions" -> "optionsJson", + "labels" -> """{"labelKey":"labelValue"}""" + ), + Map("workflowDependencies" -> tempFile) ), - ("batch submit a wdl with data", - WorkflowBatchSubmission( - Option("wdl"), - None, - None, - Option("wfType"), - Option("wfTypeVersion"), - List("inputsJson1", "inputsJson2"), - Option("optionsJson"), - Option(List(Label("labelKey", "labelValue"))), - Option(tempFile) - ), - Map( - "workflowSource" -> "wdl", - "workflowType" -> "wfType", - "workflowTypeVersion" -> "wfTypeVersion", - "workflowInputs" -> "[inputsJson1,inputsJson2]", - "workflowOptions" -> "optionsJson", - "labels" -> """{"labelKey":"labelValue"}""" - ), - Map("workflowDependencies" -> tempFile) + WorkflowBatchSubmission( + Option("wdl"), + None, + None, + Option("wfType"), + Option("wfTypeVersion"), + List("inputsJson1", "inputsJson2"), + Option("optionsJson"), + Option(List(Label("labelKey", "labelValue"))), + Option(tempFile) + ), + Map( + "workflowSource" -> "wdl", + "workflowType" -> "wfType", + "workflowTypeVersion" -> "wfTypeVersion", + "workflowInputs" -> "[inputsJson1,inputsJson2]", + "workflowOptions" -> "optionsJson", + "labels" -> """{"labelKey":"labelValue"}""" + ), + Map("workflowDependencies" -> tempFile) ) ) @@ -132,24 +126,22 @@ class CromwellClientSpec extends AsyncFlatSpec with BeforeAndAfterAll with Match contentType.mediaType.isMultipart should be(true) val boundary = contentType.mediaType.params("boundary") - val expectedJsonChunks = expectedJsons map { - case (chunkKey, chunkValue) => - s"""|--$boundary - |Content-Type: application/json - |Content-Disposition: form-data; name="$chunkKey" - | - |$chunkValue - |""".stripMargin.replace("\n", "\r\n").trim + val expectedJsonChunks = expectedJsons map { case (chunkKey, chunkValue) => + s"""|--$boundary + |Content-Type: application/json + |Content-Disposition: form-data; name="$chunkKey" + | + |$chunkValue + |""".stripMargin.replace("\n", "\r\n").trim } - val expectedFileChunks = expectedFiles.iterator map { - case (chunkKey, chunkFile) => - s"""|--$boundary - |Content-Type: application/zip - |Content-Disposition: form-data; filename="${chunkFile.name}"; name="$chunkKey" - |""".stripMargin.replace("\n", "\r\n").trim + val expectedFileChunks = expectedFiles.iterator map { case (chunkKey, chunkFile) => + s"""|--$boundary + |Content-Type: application/zip + |Content-Disposition: form-data; filename="${chunkFile.name}"; name="$chunkKey" + |""".stripMargin.replace("\n", "\r\n").trim } - val expectedFileContents = expectedFiles.iterator map { - case (_, chunkFile) => chunkFile.contentAsString + val expectedFileContents = expectedFiles.iterator map { case (_, chunkFile) => + chunkFile.contentAsString } val boundaryEnd = s"--$boundary--" diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala index 06841bfad71..34e901857b7 100644 --- a/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala +++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellResponseFailedSpec.scala @@ -15,15 +15,18 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.concurrent.{Await, Future} -class CromwellResponseFailedSpec extends TestKit(ActorSystem("CromwellResponseFailedSpec")) - with AsyncFlatSpecLike with Matchers with BeforeAndAfterAll { +class CromwellResponseFailedSpec + extends TestKit(ActorSystem("CromwellResponseFailedSpec")) + with AsyncFlatSpecLike + with Matchers + with BeforeAndAfterAll { override def afterAll(): Unit = { Await.ready(system.terminate(), 10.seconds.dilated) super.afterAll() } - + implicit val materializer: ActorMaterializer = ActorMaterializer() - + "CromwellAPIClient" should "fail the Future if the HttpResponse is unsuccessful" in { val errorMessage = """|{ @@ -32,14 +35,15 @@ class CromwellResponseFailedSpec extends TestKit(ActorSystem("CromwellResponseFa |} |""".stripMargin.trim val client = new CromwellClient(new URL("http://fakeurl"), "v1") { - override def executeRequest(request: HttpRequest, headers: List[HttpHeader]): Future[HttpResponse] = Future.successful( - new HttpResponse( - StatusCodes.ServiceUnavailable, - List.empty[HttpHeader], - HttpEntity(ContentTypes.`application/json`, errorMessage), - HttpProtocols.`HTTP/1.1` + override def executeRequest(request: HttpRequest, headers: List[HttpHeader]): Future[HttpResponse] = + Future.successful( + new HttpResponse( + StatusCodes.ServiceUnavailable, + List.empty[HttpHeader], + HttpEntity(ContentTypes.`application/json`, errorMessage), + HttpProtocols.`HTTP/1.1` + ) ) - ) } for { diff --git a/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala index 9ac224970bf..4eb0a450aca 100644 --- a/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala +++ b/cromwellApiClient/src/test/scala/cromwell/api/model/CromwellQueryResultJsonFormatterSpec.scala @@ -7,50 +7,51 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import spray.json._ - class CromwellQueryResultJsonFormatterSpec extends AnyFlatSpec with Matchers { behavior of "CromwellQueryResultJsonFormat" - val sampleQueryResult = CromwellQueryResults(results = List( - CromwellQueryResult( - Option("switcheroo"), - WorkflowId.fromString("bee51f36-396d-4e22-8a81-33dedff66bf6"), - Failed, - Option(OffsetDateTime.parse("2017-07-24T14:44:34.010Z")), + val sampleQueryResult = CromwellQueryResults(results = + List( + CromwellQueryResult( + Option("switcheroo"), + WorkflowId.fromString("bee51f36-396d-4e22-8a81-33dedff66bf6"), + Failed, + Option(OffsetDateTime.parse("2017-07-24T14:44:34.010Z")), Option(OffsetDateTime.parse("2017-07-24T14:44:33.227Z")), - "Archived" - ), - CromwellQueryResult( - Option("switcheroo"), - WorkflowId.fromString("0071495e-39eb-478e-bc98-8614b986c91e"), - Succeeded, + "Archived" + ), + CromwellQueryResult( + Option("switcheroo"), + WorkflowId.fromString("0071495e-39eb-478e-bc98-8614b986c91e"), + Succeeded, Option(OffsetDateTime.parse("2017-07-24T15:06:45.940Z")), - Option(OffsetDateTime.parse("2017-07-24T15:04:54.372Z")), - "Unarchived" - ), - )) - - val sampleJson = """|{ - | "results": [ - | { - | "name": "switcheroo", - | "id": "bee51f36-396d-4e22-8a81-33dedff66bf6", - | "status": "Failed", - | "end": "2017-07-24T14:44:34.010Z", - | "start": "2017-07-24T14:44:33.227Z", - | "metadataArchiveStatus": "Archived" - | }, - | { - | "name": "switcheroo", - | "id": "0071495e-39eb-478e-bc98-8614b986c91e", - | "status": "Succeeded", - | "end": "2017-07-24T15:06:45.940Z", - | "start": "2017-07-24T15:04:54.372Z", - | "metadataArchiveStatus": "Unarchived" - | } - | ] - |}""".stripMargin.parseJson.asJsObject + Option(OffsetDateTime.parse("2017-07-24T15:04:54.372Z")), + "Unarchived" + ) + ) + ) + + val sampleJson = """|{ + | "results": [ + | { + | "name": "switcheroo", + | "id": "bee51f36-396d-4e22-8a81-33dedff66bf6", + | "status": "Failed", + | "end": "2017-07-24T14:44:34.010Z", + | "start": "2017-07-24T14:44:33.227Z", + | "metadataArchiveStatus": "Archived" + | }, + | { + | "name": "switcheroo", + | "id": "0071495e-39eb-478e-bc98-8614b986c91e", + | "status": "Succeeded", + | "end": "2017-07-24T15:06:45.940Z", + | "start": "2017-07-24T15:04:54.372Z", + | "metadataArchiveStatus": "Unarchived" + | } + | ] + |}""".stripMargin.parseJson.asJsObject it should "write a query result as a structured JsObject" in { sampleQueryResult.toJson shouldEqual sampleJson diff --git a/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala index c1f1e89c08a..ab0c7e08cbc 100644 --- a/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala +++ b/cromwellApiClient/src/test/scala/cromwell/api/model/LabelsJsonFormatterSpec.scala @@ -4,18 +4,17 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import spray.json._ - class LabelsJsonFormatterSpec extends AnyFlatSpec with Matchers { import cromwell.api.model.LabelsJsonFormatter._ behavior of "WdlValueJsonFormat" val sampleLabels = List(Label("key-1", "value-1"), Label("key-2", "value-2"), Label("key-3", "value-3")) - val sampleJson = """|{ - | "key-1":"value-1", - | "key-2":"value-2", - | "key-3":"value-3" - |}""".stripMargin.parseJson.asJsObject + val sampleJson = """|{ + | "key-1":"value-1", + | "key-2":"value-2", + | "key-3":"value-3" + |}""".stripMargin.parseJson.asJsObject it should "write a Label as a structured JsObject" in { val label = List(Label("test-key", "test-value")) diff --git a/cromwellApiClient/src/test/scala/cromwell/api/model/WaasDescriptionJsonSupportSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/model/WaasDescriptionJsonSupportSpec.scala index 5bad5f8b21f..7e328ee4725 100644 --- a/cromwellApiClient/src/test/scala/cromwell/api/model/WaasDescriptionJsonSupportSpec.scala +++ b/cromwellApiClient/src/test/scala/cromwell/api/model/WaasDescriptionJsonSupportSpec.scala @@ -3,7 +3,6 @@ package cromwell.api.model import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class WaasDescriptionJsonSupportSpec extends AnyFlatSpec with Matchers { it should "deserialize invalid result JSON" in { @@ -25,7 +24,6 @@ class WaasDescriptionJsonSupportSpec extends AnyFlatSpec with Matchers { | "isRunnableWorkflow": false |}""".stripMargin - import cromwell.api.model.WorkflowDescriptionJsonSupport._ import spray.json._ @@ -33,7 +31,11 @@ class WaasDescriptionJsonSupportSpec extends AnyFlatSpec with Matchers { val deserialized = jsonAst.convertTo[WaasDescription] deserialized.valid should be(false) - deserialized.errors should be(List("""Failed to import workflow sub_workflow_aborted_import.wdl.:\nBad import sub_workflow_aborted_import.wdl: Failed to resolve 'sub_workflow_aborted_import.wdl' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Relative path""")) + deserialized.errors should be( + List( + """Failed to import workflow sub_workflow_aborted_import.wdl.:\nBad import sub_workflow_aborted_import.wdl: Failed to resolve 'sub_workflow_aborted_import.wdl' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Relative path""" + ) + ) deserialized.validWorkflow should be(false) deserialized.name should be("") deserialized.inputs should be(List.empty) diff --git a/database/migration/src/main/scala/cromwell/database/migration/WdlTransformation.scala b/database/migration/src/main/scala/cromwell/database/migration/WdlTransformation.scala index cea987fc89a..4f493492d7e 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/WdlTransformation.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/WdlTransformation.scala @@ -11,15 +11,16 @@ import wom.types.{WomPrimitiveType, WomType} import scala.util.Try -private [migration] object WdlTransformation { +private[migration] object WdlTransformation { def inflate(value: String): Try[String] = Try { Option(value) match { - case Some(v) => IOUtils.toString(new GZIPInputStream(new ByteArrayInputStream(Base64.decodeBase64(v))), Charset.defaultCharset) + case Some(v) => + IOUtils.toString(new GZIPInputStream(new ByteArrayInputStream(Base64.decodeBase64(v))), Charset.defaultCharset) case None => null } - } recover { - case _: IOException => value + } recover { case _: IOException => + value } def coerceStringToWdl(wdlString: String, womType: WomType) = womType match { diff --git a/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala b/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala index 49db7e4446f..4d5eb16de18 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/custom/BatchedTaskChange.scala @@ -9,6 +9,7 @@ import liquibase.exception.CustomChangeException * Runs a migration as a series of batches. */ trait BatchedTaskChange extends MigrationTaskChange { + /** * Returns sql to retrieve the maximum primary key for the table. * @@ -80,14 +81,16 @@ trait BatchedTaskChange extends MigrationTaskChange { override def migrate(connection: JdbcConnection): Unit = { - logger.info(s"Running migration $migrationName with a read batch size of " + - s"$readBatchSize and a write batch size of $writeBatchSize") + logger.info( + s"Running migration $migrationName with a read batch size of " + + s"$readBatchSize and a write batch size of $writeBatchSize" + ) /* - * Keep count of the size of the batch. - * - * @see writeBatchSize - */ + * Keep count of the size of the batch. + * + * @see writeBatchSize + */ var batchMigrationCounter: Int = 0 val readCount = getReadCount(connection) @@ -101,25 +104,24 @@ trait BatchedTaskChange extends MigrationTaskChange { val paginator = new QueryPaginator(readBatchStatement, readBatchSize, readCount) // Loop over pages - paginator.zipWithIndex foreach { - case (resultBatch, page) => - // Loop over rows in page - new ResultSetIterator(resultBatch) foreach { row => - batchMigrationCounter += migrateBatchRow(row, migrateBatchStatements) - // batchMigrationCounter can actually be bigger than writeBatchSize as wdlValues are processed atomically, - // so this is a best effort - if (batchMigrationCounter >= writeBatchSize) { - migrateBatchStatements.foreach(_.executeBatch()) - connection.commit() - batchMigrationCounter = 0 - } + paginator.zipWithIndex foreach { case (resultBatch, page) => + // Loop over rows in page + new ResultSetIterator(resultBatch) foreach { row => + batchMigrationCounter += migrateBatchRow(row, migrateBatchStatements) + // batchMigrationCounter can actually be bigger than writeBatchSize as wdlValues are processed atomically, + // so this is a best effort + if (batchMigrationCounter >= writeBatchSize) { + migrateBatchStatements.foreach(_.executeBatch()) + connection.commit() + batchMigrationCounter = 0 } + } - resultBatch.close() + resultBatch.close() - val progress = Math.min((page + 1) * 100 / pageCount, 100) - val progressMessage = s"[$migrationName] $progress%" - logger.info(progressMessage) + val progress = Math.min((page + 1) * 100 / pageCount, 100) + val progressMessage = s"[$migrationName] $progress%" + logger.info(progressMessage) } if (batchMigrationCounter != 0) { diff --git a/database/migration/src/main/scala/cromwell/database/migration/custom/MigrationTaskChange.scala b/database/migration/src/main/scala/cromwell/database/migration/custom/MigrationTaskChange.scala index 4fa7935e632..f02d0c46d28 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/custom/MigrationTaskChange.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/custom/MigrationTaskChange.scala @@ -24,7 +24,7 @@ trait MigrationTaskChange extends CustomTaskChange with LazyLogging { */ def migrate(connection: JdbcConnection): Unit - override def execute(database: Database): Unit = { + override def execute(database: Database): Unit = try { val dbConn = database.getConnection.asInstanceOf[JdbcConnection] val autoCommit = dbConn.getAutoCommit @@ -36,7 +36,6 @@ trait MigrationTaskChange extends CustomTaskChange with LazyLogging { case exception: Exception => throw new CustomChangeException(s"Could not apply migration script for $migrationName", exception) } - } override def setUp() = {} diff --git a/database/migration/src/main/scala/cromwell/database/migration/custom/QueryPaginator.scala b/database/migration/src/main/scala/cromwell/database/migration/custom/QueryPaginator.scala index 0e6514043c3..cb85ffdbcac 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/custom/QueryPaginator.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/custom/QueryPaginator.scala @@ -2,13 +2,10 @@ package cromwell.database.migration.custom import java.sql.{PreparedStatement, ResultSet} - -class QueryPaginator(statement: PreparedStatement, - batchSize: Int, - count: Int) extends Iterator[ResultSet] { +class QueryPaginator(statement: PreparedStatement, batchSize: Int, count: Int) extends Iterator[ResultSet] { var cursor = 0 - def next(): ResultSet = { + def next(): ResultSet = { statement.setInt(1, cursor) statement.setInt(2, cursor + batchSize) diff --git a/database/migration/src/main/scala/cromwell/database/migration/failuremetadata/DeduplicateFailureMessageIds.scala b/database/migration/src/main/scala/cromwell/database/migration/failuremetadata/DeduplicateFailureMessageIds.scala index 74ab0769b34..9ffea901284 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/failuremetadata/DeduplicateFailureMessageIds.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/failuremetadata/DeduplicateFailureMessageIds.scala @@ -17,8 +17,10 @@ class DeduplicateFailureMessageIds extends BatchedTaskChange { val callFqnColumn = "CALL_FQN" val jobScatterIndexColumn = "JOB_SCATTER_INDEX" val retryAttemptColumn = "JOB_RETRY_ATTEMPT" - val contentEqualityCheck = List(workflowIdColumn, metadataKeyColumn, callFqnColumn, jobScatterIndexColumn, retryAttemptColumn) - .map(s => s"(t2.$s = t1.$s OR (t2.$s IS NULL AND t1.$s IS NULL))").mkString(" AND ") + val contentEqualityCheck = + List(workflowIdColumn, metadataKeyColumn, callFqnColumn, jobScatterIndexColumn, retryAttemptColumn) + .map(s => s"(t2.$s = t1.$s OR (t2.$s IS NULL AND t1.$s IS NULL))") + .mkString(" AND ") val fixableFailureMessageFilter = "METADATA_KEY LIKE '%failures[%]%:message'" diff --git a/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala b/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala index 2365dcf1e4b..e56fa88bc36 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala @@ -1,7 +1,7 @@ package cromwell.database.migration.liquibase import liquibase.database.Database -import liquibase.diff.{DiffResult, Difference, ObjectDifferences} +import liquibase.diff.{Difference, DiffResult, ObjectDifferences} import liquibase.structure.DatabaseObject import liquibase.structure.core._ @@ -11,6 +11,7 @@ import scala.jdk.CollectionConverters._ * Filters liquibase results. */ object DiffResultFilter { + /** * A filter for a database object. */ @@ -42,19 +43,16 @@ object DiffResultFilter { val unexpectedObjects = diffResult.getUnexpectedObjects.asScala val changedObjects = diffResult.getChangedObjects.asScala - val newDiffResult = new DiffResult( - diffResult.getReferenceSnapshot, - diffResult.getComparisonSnapshot, - diffResult.getCompareControl) + val newDiffResult = + new DiffResult(diffResult.getReferenceSnapshot, diffResult.getComparisonSnapshot, diffResult.getCompareControl) missingObjects.filterNot(unchangedFilter(referenceDatabase, _)).foreach(newDiffResult.addMissingObject) unexpectedObjects.filterNot(unchangedFilter(comparisonDatabase, _)).foreach(newDiffResult.addUnexpectedObject) val filteredChangedObjects = changedObjects.filterNot(isSameObject(referenceDatabase, comparisonDatabase, changedFilters)) - for ((obj, difference) <- filteredChangedObjects) { + for ((obj, difference) <- filteredChangedObjects) newDiffResult.addChangedObject(obj, difference) - } newDiffResult } @@ -71,12 +69,14 @@ object DiffResultFilter { * @param objectAndDiff A tuple of the object and the set of differences. * @return True if the object is actually the same. */ - def isSameObject(referenceDatabase: Database, comparisonDatabase: Database, filters: Seq[DiffFilter]) - (objectAndDiff: (DatabaseObject, ObjectDifferences)): Boolean = { + def isSameObject(referenceDatabase: Database, comparisonDatabase: Database, filters: Seq[DiffFilter])( + objectAndDiff: (DatabaseObject, ObjectDifferences) + ): Boolean = { val (obj, objectDifferences) = objectAndDiff val differences = objectDifferences.getDifferences.asScala val filtered = filters.foldLeft(differences)((diffs, diffFilter) => - diffs.filterNot(diffFilter(referenceDatabase, comparisonDatabase, obj, _))) + diffs.filterNot(diffFilter(referenceDatabase, comparisonDatabase, obj, _)) + ) filtered.isEmpty } @@ -89,16 +89,19 @@ object DiffResultFilter { * @param difference The difference reported. * @return True if the object is actually the same with slightly different column widths. */ - def isVarchar255(referenceDatabase: Database, comparisonDatabase: Database, - databaseObject: DatabaseObject, difference: Difference): Boolean = { + def isVarchar255(referenceDatabase: Database, + comparisonDatabase: Database, + databaseObject: DatabaseObject, + difference: Difference + ): Boolean = { val compared = difference.getComparedValue val referenced = difference.getReferenceValue compared.isInstanceOf[DataType] && referenced.isInstanceOf[DataType] && { val comparedDataType = compared.asInstanceOf[DataType] val referencedDataType = referenced.asInstanceOf[DataType] comparedDataType.getTypeName == "VARCHAR" && referencedDataType.getTypeName == "VARCHAR" && - // Our liquibase copypasta defaults VARCHAR to 255. Slick without a value defaults to 254 - (comparedDataType.getColumnSize + referencedDataType.getColumnSize == 255 + 254) + // Our liquibase copypasta defaults VARCHAR to 255. Slick without a value defaults to 254 + (comparedDataType.getColumnSize + referencedDataType.getColumnSize == 255 + 254) } } @@ -114,9 +117,11 @@ object DiffResultFilter { * @param difference The difference reported. * @return True if the object is actually similar based on type. */ - def isTypeSimilar(similarTypes: String*) - (referenceDatabase: Database, comparisonDatabase: Database, - databaseObject: DatabaseObject, difference: Difference): Boolean = { + def isTypeSimilar(similarTypes: String*)(referenceDatabase: Database, + comparisonDatabase: Database, + databaseObject: DatabaseObject, + difference: Difference + ): Boolean = { val compared = difference.getComparedValue val referenced = difference.getReferenceValue compared.isInstanceOf[DataType] && referenced.isInstanceOf[DataType] && { @@ -137,10 +142,12 @@ object DiffResultFilter { * @param difference The difference reported. * @return True if the object is actually similar based on type. */ - def isReordered(referenceDatabase: Database, comparisonDatabase: Database, - databaseObject: DatabaseObject, difference: Difference): Boolean = { + def isReordered(referenceDatabase: Database, + comparisonDatabase: Database, + databaseObject: DatabaseObject, + difference: Difference + ): Boolean = difference.getField == "order" - } /** * Returns true if the object is a change log object. @@ -149,7 +156,7 @@ object DiffResultFilter { * @param databaseObject The database object. * @return True if the object is a change log object. */ - def isChangeLog(database: Database, databaseObject: DatabaseObject): Boolean = { + def isChangeLog(database: Database, databaseObject: DatabaseObject): Boolean = databaseObject match { case table: Table => table.getName.contains("DATABASECHANGELOG") case column: Column => isChangeLog(database, column.getRelation) @@ -157,7 +164,6 @@ object DiffResultFilter { case key: PrimaryKey => isChangeLog(database, key.getTable) case _ => false } - } /** * Returns true if the object is liquibase database object. @@ -166,9 +172,8 @@ object DiffResultFilter { * @param databaseObject The database object. * @return True if the object is a liquibase database object. */ - def isLiquibaseObject(database: Database, databaseObject: DatabaseObject): Boolean = { + def isLiquibaseObject(database: Database, databaseObject: DatabaseObject): Boolean = database.isLiquibaseObject(databaseObject) - } /** * Returns true if the object is a member of the excluded table. @@ -178,23 +183,19 @@ object DiffResultFilter { * @param databaseObject The database object. * @return True if the object is a member of the tables. */ - def isTableObject(tables: Seq[String]) - (database: Database, databaseObject: DatabaseObject): Boolean = { + def isTableObject(tables: Seq[String])(database: Database, databaseObject: DatabaseObject): Boolean = isTableObject(tables, databaseObject) - } - private def isTableObject(tables: Seq[String], databaseObject: DatabaseObject): Boolean = { + private def isTableObject(tables: Seq[String], databaseObject: DatabaseObject): Boolean = tables.exists(table => databaseObject.getName.equalsIgnoreCase(table) || getContainingObjects(databaseObject).exists(isTableObject(tables, _)) ) - } // getContainingObjects is ill-mannered and returns null when really it ought to return an empty array, so wrap // in an `Option` and `getOrElse`. - private def getContainingObjects(databaseObject: DatabaseObject): Array[DatabaseObject] = { + private def getContainingObjects(databaseObject: DatabaseObject): Array[DatabaseObject] = Option(databaseObject.getContainingObjects).getOrElse(Array.empty) - } /** * Adds utility methods to a liquibase diff result. @@ -202,6 +203,7 @@ object DiffResultFilter { * @param diffResult The origin diff result. */ implicit class EnhancedDiffResult(val diffResult: DiffResult) extends AnyVal { + /** * Filters changelogs. * diff --git a/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala b/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala index e4823b8b0ac..d8c32f7b8fa 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala @@ -34,7 +34,7 @@ object LiquibaseUtils { * @param settings The liquibase settings. * @param jdbcConnection A jdbc connection to the database. */ - def updateSchema(settings: LiquibaseSettings)(jdbcConnection: Connection): Unit = { + def updateSchema(settings: LiquibaseSettings)(jdbcConnection: Connection): Unit = mutex.synchronized { val liquibaseConnection = newConnection(jdbcConnection) try { @@ -44,11 +44,9 @@ object LiquibaseUtils { val liquibase = new Liquibase(settings.changeLogResourcePath, new ClassLoaderResourceAccessor(), database) updateSchema(liquibase) - } finally { + } finally closeConnection(liquibaseConnection) - } } - } /** * Wraps a jdbc connection in the database with the appropriate liquibase connection. @@ -58,21 +56,19 @@ object LiquibaseUtils { * @param jdbcConnection The liquibase connection. * @return */ - private def newConnection(jdbcConnection: Connection): DatabaseConnection = { + private def newConnection(jdbcConnection: Connection): DatabaseConnection = jdbcConnection.getMetaData.getDatabaseProductName match { case HsqlDatabaseProperties.PRODUCT_NAME => new HsqlConnection(jdbcConnection) case _ => new JdbcConnection(jdbcConnection) } - } /** * Updates the liquibase database. * * @param liquibase The facade for interacting with liquibase. */ - private def updateSchema(liquibase: Liquibase): Unit = { + private def updateSchema(liquibase: Liquibase): Unit = liquibase.update(DefaultContexts, DefaultLabelExpression) - } /** * Converts a liquibase connection to a liquibase database. @@ -80,9 +76,8 @@ object LiquibaseUtils { * @param liquibaseConnection The liquibase connection. * @return The liquibase database. */ - private def toDatabase(liquibaseConnection: DatabaseConnection): Database = { + private def toDatabase(liquibaseConnection: DatabaseConnection): Database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(liquibaseConnection) - } /** * Compares a reference to a comparison liquibase database. @@ -91,9 +86,8 @@ object LiquibaseUtils { * @param comparisonDatabase The comparison liquibase database. * @return The complete diff results. */ - private def compare(referenceDatabase: Database, comparisonDatabase: Database): DiffResult = { + private def compare(referenceDatabase: Database, comparisonDatabase: Database): DiffResult = DiffGeneratorFactory.getInstance().compare(referenceDatabase, comparisonDatabase, CompareControl.STANDARD) - } /** * Compares a reference to a comparison JDBC connection. @@ -103,7 +97,7 @@ object LiquibaseUtils { * @param block Block of code to run before closing the connections. * @return The complete diff results. */ - def compare[T](referenceJdbc: Connection, comparisonJdbc: Connection)(block: DiffResult => T): T = { + def compare[T](referenceJdbc: Connection, comparisonJdbc: Connection)(block: DiffResult => T): T = mutex.synchronized { withConnection(referenceJdbc) { referenceLiquibase => withConnection(comparisonJdbc) { comparisonLiquibase => @@ -112,7 +106,6 @@ object LiquibaseUtils { } } } - } /** * Provides a connection to a block of code, closing the connection afterwards. @@ -124,11 +117,10 @@ object LiquibaseUtils { */ private def withConnection[T](jdbcConnection: Connection)(block: DatabaseConnection => T): T = { val liquibaseConnection = newConnection(jdbcConnection) - try { + try block(liquibaseConnection) - } finally { + finally closeConnection(liquibaseConnection) - } } /** @@ -136,13 +128,12 @@ object LiquibaseUtils { * * @param connection The liquibase connection. */ - private def closeConnection(connection: DatabaseConnection): Unit = { - try { + private def closeConnection(connection: DatabaseConnection): Unit = + try connection.close() - } finally { + finally { /* ignore */ } - } /** * Returns the changelog for a liquibase setting. @@ -165,11 +156,10 @@ object LiquibaseUtils { * @param settings The liquibase settings. * @return The database change sets. */ - def getChangeSets(settings: LiquibaseSettings): Seq[ChangeSet] = { + def getChangeSets(settings: LiquibaseSettings): Seq[ChangeSet] = mutex.synchronized { getChangeLog(settings).getChangeSets.asScala.toList } - } /** * Returns a schema snapshot. @@ -177,7 +167,7 @@ object LiquibaseUtils { * @param jdbcConnection A jdbc connection to the database. * @return The database change sets. */ - def getSnapshot(jdbcConnection: Connection): DatabaseSnapshot = { + def getSnapshot(jdbcConnection: Connection): DatabaseSnapshot = mutex.synchronized { withConnection(jdbcConnection) { referenceLiquibase => val database = toDatabase(referenceLiquibase) @@ -191,10 +181,8 @@ object LiquibaseUtils { database, new SnapshotControl(database) ) - } finally { + } finally database.setObjectQuotingStrategy(objectQuotingStrategy) - } } } - } } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/MetadataCustomSql.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/MetadataCustomSql.scala index 13515de2e8c..84734d37b5a 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/MetadataCustomSql.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/MetadataCustomSql.scala @@ -17,15 +17,13 @@ abstract class MetadataCustomSql extends CustomSqlChange { def queries: Array[String] - override def generateStatements(database: Database): Array[SqlStatement] = { - queries map { query => new RawSqlStatement(query) } - } + override def generateStatements(database: Database): Array[SqlStatement] = + queries map { query => new RawSqlStatement(query) } override def setUp(): Unit = () - override def validate(database: Database): ValidationErrors = { + override def validate(database: Database): ValidationErrors = new ValidationErrors() - } override def setFileOpener(resourceAccessor: ResourceAccessor): Unit = () } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/ExecutionTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/ExecutionTableMigration.scala index 2792c00ccf0..991bcacb0f0 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/ExecutionTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/ExecutionTableMigration.scala @@ -32,9 +32,8 @@ class ExecutionTableMigration extends MetadataCustomSql { FROM TMP_EXECUTION_MIGRATION e JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID WHERE - e.START_DT IS NOT NULL;""" - , - """INSERT INTO METADATA_JOURNAL ( + e.START_DT IS NOT NULL;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -54,9 +53,8 @@ class ExecutionTableMigration extends MetadataCustomSql { 'string', NOW() FROM TMP_EXECUTION_MIGRATION e - JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""" - , - s"""INSERT INTO METADATA_JOURNAL ( + JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""", + s"""INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -78,9 +76,8 @@ class ExecutionTableMigration extends MetadataCustomSql { FROM TMP_EXECUTION_MIGRATION e JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID WHERE - e.END_DT IS NOT NULL;""" - , - """INSERT INTO METADATA_JOURNAL ( + e.END_DT IS NOT NULL;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -100,9 +97,8 @@ class ExecutionTableMigration extends MetadataCustomSql { 'string', NOW() FROM TMP_EXECUTION_MIGRATION e - JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""" - , - """INSERT INTO METADATA_JOURNAL ( + JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -123,9 +119,8 @@ class ExecutionTableMigration extends MetadataCustomSql { NOW() FROM TMP_EXECUTION_MIGRATION e JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID - WHERE e.RC IS NOT NULL;""" - , - s"""INSERT INTO METADATA_JOURNAL ( + WHERE e.RC IS NOT NULL;""", + s"""INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -145,9 +140,8 @@ class ExecutionTableMigration extends MetadataCustomSql { 'boolean', NOW() FROM TMP_EXECUTION_MIGRATION e - JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""" - , - s"""INSERT INTO METADATA_JOURNAL ( + JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""", + s"""INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -169,9 +163,8 @@ class ExecutionTableMigration extends MetadataCustomSql { FROM TMP_EXECUTION_MIGRATION e JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID LEFT JOIN RUNTIME_ATTRIBUTES ra ON e.EXECUTION_ID = ra.EXECUTION_ID AND ra.ATTRIBUTE_NAME = 'preemptible' - WHERE ra.ATTRIBUTE_VALUE IS NOT NULL;""" - , - """INSERT INTO METADATA_JOURNAL ( + WHERE ra.ATTRIBUTE_VALUE IS NOT NULL;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -187,9 +180,8 @@ class ExecutionTableMigration extends MetadataCustomSql { ATTEMPT, '1900-01-01' FROM TMP_EXECUTION_MIGRATION e - JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""" - , - """INSERT INTO METADATA_JOURNAL ( + JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -205,9 +197,8 @@ class ExecutionTableMigration extends MetadataCustomSql { ATTEMPT, '1900-01-01' FROM TMP_EXECUTION_MIGRATION e - JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""" - , - """INSERT INTO METADATA_JOURNAL ( + JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, @@ -223,9 +214,8 @@ class ExecutionTableMigration extends MetadataCustomSql { ATTEMPT, '1900-01-01' FROM TMP_EXECUTION_MIGRATION e - JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""" - , - """INSERT INTO METADATA_JOURNAL ( + JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID;""", + """INSERT INTO METADATA_JOURNAL ( WORKFLOW_EXECUTION_UUID, METADATA_KEY, CALL_FQN, diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/FailureEventTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/FailureEventTableMigration.scala index 60d12342e77..01fa8de0bdf 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/FailureEventTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/FailureEventTableMigration.scala @@ -5,7 +5,7 @@ import cromwell.database.migration.metadata.MetadataCustomSql class FailureEventTableMigration extends MetadataCustomSql { import MetadataCustomSql._ - override def queries: Array[String] = { + override def queries: Array[String] = Array( """ |INSERT INTO METADATA_JOURNAL ( @@ -32,31 +32,30 @@ class FailureEventTableMigration extends MetadataCustomSql { | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; """.stripMargin, s""" - |INSERT INTO METADATA_JOURNAL ( - | WORKFLOW_EXECUTION_UUID, - | METADATA_KEY, - | CALL_FQN, - | JOB_SCATTER_INDEX, - | JOB_RETRY_ATTEMPT, - | METADATA_VALUE, - | METADATA_VALUE_TYPE, - | METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | CONCAT('failures[', fe.FAILURE_EVENT_ID ,']:timestamp'), - | CALL_FQN, - | IDX, - | ATTEMPT, - | DATE_FORMAT(fe.EVENT_TIMESTAMP, '%Y-%m-%dT%T.%f$Offset'), - | 'string', - | NOW() - |FROM FAILURE_EVENT fe - | LEFT JOIN EXECUTION e ON fe.EXECUTION_ID = e.EXECUTION_ID - | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; + |INSERT INTO METADATA_JOURNAL ( + | WORKFLOW_EXECUTION_UUID, + | METADATA_KEY, + | CALL_FQN, + | JOB_SCATTER_INDEX, + | JOB_RETRY_ATTEMPT, + | METADATA_VALUE, + | METADATA_VALUE_TYPE, + | METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | CONCAT('failures[', fe.FAILURE_EVENT_ID ,']:timestamp'), + | CALL_FQN, + | IDX, + | ATTEMPT, + | DATE_FORMAT(fe.EVENT_TIMESTAMP, '%Y-%m-%dT%T.%f$Offset'), + | 'string', + | NOW() + |FROM FAILURE_EVENT fe + | LEFT JOIN EXECUTION e ON fe.EXECUTION_ID = e.EXECUTION_ID + | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; """.stripMargin ) - } override def getConfirmationMessage: String = "Failure Table migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableDescriptionMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableDescriptionMigration.scala index 8ee97fcc90d..4181d369572 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableDescriptionMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableDescriptionMigration.scala @@ -4,7 +4,7 @@ import cromwell.database.migration.metadata.MetadataCustomSql class ExecutionEventTableDescriptionMigration extends MetadataCustomSql { - override def queries: Array[String] = { + override def queries: Array[String] = Array( """ |INSERT INTO METADATA_JOURNAL ( @@ -31,7 +31,6 @@ class ExecutionEventTableDescriptionMigration extends MetadataCustomSql { | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; """.stripMargin ) - } override def getConfirmationMessage: String = "Execution Event Table (Description field) migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableEndMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableEndMigration.scala index d3d80e74f46..669bfd823bc 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableEndMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableEndMigration.scala @@ -5,34 +5,33 @@ import MetadataCustomSql._ class ExecutionEventTableEndMigration extends MetadataCustomSql { - override def queries: Array[String] = { + override def queries: Array[String] = Array( s""" - |INSERT INTO METADATA_JOURNAL ( - | WORKFLOW_EXECUTION_UUID, - | METADATA_KEY, - | CALL_FQN, - | JOB_SCATTER_INDEX, - | JOB_RETRY_ATTEMPT, - | METADATA_VALUE, - | METADATA_VALUE_TYPE, - | METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | CONCAT('executionEvents[', ev.EVENT_ID ,']:endTime'), - | CALL_FQN, - | IDX, - | ATTEMPT, - | DATE_FORMAT(ev.END_DT, '%Y-%m-%dT%T.%f$Offset'), - | 'string', - | NOW() - |FROM EXECUTION_EVENT ev - | LEFT JOIN EXECUTION e ON ev.EXECUTION_ID = e.EXECUTION_ID - | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; + |INSERT INTO METADATA_JOURNAL ( + | WORKFLOW_EXECUTION_UUID, + | METADATA_KEY, + | CALL_FQN, + | JOB_SCATTER_INDEX, + | JOB_RETRY_ATTEMPT, + | METADATA_VALUE, + | METADATA_VALUE_TYPE, + | METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | CONCAT('executionEvents[', ev.EVENT_ID ,']:endTime'), + | CALL_FQN, + | IDX, + | ATTEMPT, + | DATE_FORMAT(ev.END_DT, '%Y-%m-%dT%T.%f$Offset'), + | 'string', + | NOW() + |FROM EXECUTION_EVENT ev + | LEFT JOIN EXECUTION e ON ev.EXECUTION_ID = e.EXECUTION_ID + | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; """.stripMargin ) - } override def getConfirmationMessage: String = "Execution Event Table (End field) migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableStartMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableStartMigration.scala index e0c9b418460..86168daddf9 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableStartMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/executionevent/ExecutionEventTableStartMigration.scala @@ -5,33 +5,31 @@ import MetadataCustomSql._ class ExecutionEventTableStartMigration extends MetadataCustomSql { - override def queries: Array[String] = { - Array( - s""" - |INSERT INTO METADATA_JOURNAL ( - | WORKFLOW_EXECUTION_UUID, - | METADATA_KEY, - | CALL_FQN, - | JOB_SCATTER_INDEX, - | JOB_RETRY_ATTEMPT, - | METADATA_VALUE, - | METADATA_VALUE_TYPE, - | METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | CONCAT('executionEvents[', ev.EVENT_ID ,']:startTime'), - | CALL_FQN, - | IDX, - | ATTEMPT, - | DATE_FORMAT(ev.START_DT, '%Y-%m-%dT%T.%f$Offset'), - | 'string', - | NOW() - |FROM EXECUTION_EVENT ev - | LEFT JOIN EXECUTION e ON ev.EXECUTION_ID = e.EXECUTION_ID - | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; + override def queries: Array[String] = + Array(s""" + |INSERT INTO METADATA_JOURNAL ( + | WORKFLOW_EXECUTION_UUID, + | METADATA_KEY, + | CALL_FQN, + | JOB_SCATTER_INDEX, + | JOB_RETRY_ATTEMPT, + | METADATA_VALUE, + | METADATA_VALUE_TYPE, + | METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | CONCAT('executionEvents[', ev.EVENT_ID ,']:startTime'), + | CALL_FQN, + | IDX, + | ATTEMPT, + | DATE_FORMAT(ev.START_DT, '%Y-%m-%dT%T.%f$Offset'), + | 'string', + | NOW() + |FROM EXECUTION_EVENT ev + | LEFT JOIN EXECUTION e ON ev.EXECUTION_ID = e.EXECUTION_ID + | JOIN WORKFLOW_EXECUTION we ON we.WORKFLOW_EXECUTION_ID = e.WORKFLOW_EXECUTION_ID; """.stripMargin) - } override def getConfirmationMessage: String = "Execution Event Table (Start field) migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/CallOutputSymbolTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/CallOutputSymbolTableMigration.scala index 822e8e15fbe..c148fc2dce5 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/CallOutputSymbolTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/CallOutputSymbolTableMigration.scala @@ -11,23 +11,20 @@ class CallOutputSymbolTableMigration extends SymbolTableMigration { symbolScope: String, symbolIndex: Option[Int], symbolAttempt: Option[Int], - womValue: WomValue): Int = { - + womValue: WomValue + ): Int = (symbolIndex, symbolAttempt) match { case (Some(index), Some(attempt)) => - val metadataStatementForCall = new MetadataStatementForCall(statement, - workflowUuid, - symbolScope, - index, - attempt - ) + val metadataStatementForCall = + new MetadataStatementForCall(statement, workflowUuid, symbolScope, index, attempt) addWdlValue(s"outputs:$symbolName", womValue, metadataStatementForCall) case _ => - logger.warn(s"Found output without index or attempt: [$workflowUuid] $symbolScope - $symbolName:$symbolIndex:$symbolAttempt") + logger.warn( + s"Found output without index or attempt: [$workflowUuid] $symbolScope - $symbolName:$symbolIndex:$symbolAttempt" + ) 0 } - } override def getConfirmationMessage: String = "Call outputs from Symbol Table migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/InputSymbolTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/InputSymbolTableMigration.scala index 13021ed9c72..6aed6fb870c 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/InputSymbolTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/InputSymbolTableMigration.scala @@ -12,17 +12,13 @@ class InputSymbolTableMigration extends SymbolTableMigration { symbolScope: String, symbolIndex: Option[Int], symbolAttempt: Option[Int], - womValue: WomValue): Int = { - + womValue: WomValue + ): Int = (symbolIndex, symbolAttempt) match { - case (Some(index) , Some(attempt)) => + case (Some(index), Some(attempt)) => // Call scoped - val metadataStatementForCall = new MetadataStatementForCall(statement, - workflowUuid, - symbolScope, - index, - attempt - ) + val metadataStatementForCall = + new MetadataStatementForCall(statement, workflowUuid, symbolScope, index, attempt) addWdlValue(s"inputs:$symbolName", womValue, metadataStatementForCall) case (None, None) if !symbolScope.contains('.') => @@ -31,7 +27,6 @@ class InputSymbolTableMigration extends SymbolTableMigration { case _ => 0 } - } override def getConfirmationMessage: String = "Inputs from Symbol Table migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/MetadataStatement.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/MetadataStatement.scala index da3decae867..eb2d6b89e52 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/MetadataStatement.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/MetadataStatement.scala @@ -42,7 +42,7 @@ class MetadataStatementForWorkflow(preparedStatement: PreparedStatement, workflo val dawn = OffsetDateTime.of(0, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toSystemTimestamp var batchSizeCounter: Int = 0 - private def metadataType(value: Any) = { + private def metadataType(value: Any) = value match { case WomInteger(_) => "int" case WomFloat(_) => "number" @@ -51,16 +51,14 @@ class MetadataStatementForWorkflow(preparedStatement: PreparedStatement, workflo case _: Int | Long => "int" case _: Double | Float => "number" case _: Boolean => "boolean" - case _ =>"string" + case _ => "string" } - } - private def metadataValue(value: Any) = { + private def metadataValue(value: Any) = value match { - case v: WomValue => v.valueString + case v: WomValue => v.valueString case v => v.toString } - } protected def setStatement() = { preparedStatement.setString(MetadataStatement.WorkflowIdIdx, workflowId) @@ -93,12 +91,11 @@ class MetadataStatementForWorkflow(preparedStatement: PreparedStatement, workflo } /** Adds a non-null value to the metadata journal. */ - override def addKeyValue(key: String, value: Any) = { + override def addKeyValue(key: String, value: Any) = if (value != null) { preparedStatement.setTimestamp(MetadataStatement.TimestampIdx, OffsetDateTime.now().toSystemTimestamp) add(key, value, s"Failed to migrate metadata value $value with key $key for workflow $workflowId") } - } override def addEmptyValue(key: String): Unit = { preparedStatement.setTimestamp(MetadataStatement.TimestampIdx, dawn) @@ -106,11 +103,16 @@ class MetadataStatementForWorkflow(preparedStatement: PreparedStatement, workflo } } -class MetadataStatementForCall(preparedStatement: PreparedStatement, workflowId: String, callFqn: String, index: Int, attempt: Int) extends MetadataStatementForWorkflow(preparedStatement, workflowId) { +class MetadataStatementForCall(preparedStatement: PreparedStatement, + workflowId: String, + callFqn: String, + index: Int, + attempt: Int +) extends MetadataStatementForWorkflow(preparedStatement, workflowId) { override def setStatement() = { preparedStatement.setString(MetadataStatement.WorkflowIdIdx, workflowId) preparedStatement.setString(MetadataStatement.CallFqnIdx, callFqn) preparedStatement.setInt(MetadataStatement.CallIndexIdx, index) preparedStatement.setInt(MetadataStatement.CallAttemptIdx, attempt) } -} \ No newline at end of file +} diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/SymbolTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/SymbolTableMigration.scala index f3d148ed0c5..4ca834535d7 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/SymbolTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/SymbolTableMigration.scala @@ -25,16 +25,16 @@ trait SymbolTableMigration extends BatchedTaskChange { override val readCountQuery = SymbolTableMigration.NbRowsQuery override val readBatchQuery: String = """ - |SELECT - | WORKFLOW_EXECUTION_UUID, - | SYMBOL_NAME, - | SYMBOL_SCOPE, - | SYMBOL_INDEX, - | SYMBOL_ATTEMPT, - | WDL_TYPE, - | WDL_VALUE - | FROM TMP_SYMBOL - | WHERE TMP_SYMBOL_ID >= ? AND TMP_SYMBOL_ID < ?; + |SELECT + | WORKFLOW_EXECUTION_UUID, + | SYMBOL_NAME, + | SYMBOL_SCOPE, + | SYMBOL_INDEX, + | SYMBOL_ATTEMPT, + | WDL_TYPE, + | WDL_VALUE + | FROM TMP_SYMBOL + | WHERE TMP_SYMBOL_ID >= ? AND TMP_SYMBOL_ID < ?; """.stripMargin override val migrateBatchQueries = List(MetadataStatement.InsertSql) @@ -66,7 +66,9 @@ trait SymbolTableMigration extends BatchedTaskChange { case Failure(f) => logger.error( s"""Could not parse symbol of type ${row.getString("WDL_TYPE")} - |for Workflow $workflowUuid - Call $symbolScope:$symbolIndex""".stripMargin, f) + |for Workflow $workflowUuid - Call $symbolScope:$symbolIndex""".stripMargin, + f + ) 0 } } @@ -77,14 +79,18 @@ trait SymbolTableMigration extends BatchedTaskChange { symbolScope: String, symbolIndex: Option[Int], symbolAttempt: Option[Int], - womValue: WomValue): Int + womValue: WomValue + ): Int /** * Add all necessary statements to the batch for the provided WomValue. */ - protected final def addWdlValue(metadataKey: String, womValue: WomValue, metadataStatementForCall: MetadataStatement): Int = { + final protected def addWdlValue(metadataKey: String, + womValue: WomValue, + metadataStatementForCall: MetadataStatement + ): Int = womValue match { - // simplify doesn't handle WdlExpression + // simplify doesn't handle WdlExpression case expr: WdlExpression => metadataStatementForCall.addKeyValue(metadataKey, expr.valueString) 1 @@ -95,5 +101,4 @@ trait SymbolTableMigration extends BatchedTaskChange { } simplified.size } - } } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/WorkflowOutputSymbolTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/WorkflowOutputSymbolTableMigration.scala index 8902739666e..0e7562310c2 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/WorkflowOutputSymbolTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/symbol/WorkflowOutputSymbolTableMigration.scala @@ -12,7 +12,8 @@ class WorkflowOutputSymbolTableMigration extends SymbolTableMigration { symbolScope: String, symbolIndex: Option[Int], symbolAttempt: Option[Int], - womValue: WomValue): Int = { + womValue: WomValue + ): Int = { val metadataStatementForWorkflow = new MetadataStatementForWorkflow(statement, workflowUuid) addWdlValue(s"outputs:$symbolScope.$symbolName", womValue, metadataStatementForWorkflow) } diff --git a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/workflowexecution/WorkflowExecutionTableMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/workflowexecution/WorkflowExecutionTableMigration.scala index 411f596e962..6c343e2c420 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/metadata/table/workflowexecution/WorkflowExecutionTableMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/metadata/table/workflowexecution/WorkflowExecutionTableMigration.scala @@ -5,108 +5,102 @@ import MetadataCustomSql._ class WorkflowExecutionTableMigration extends MetadataCustomSql { - override def queries: Array[String] = { + override def queries: Array[String] = Array( - s""" - |INSERT INTO METADATA_JOURNAL ( - |WORKFLOW_EXECUTION_UUID, - |METADATA_KEY, - |METADATA_VALUE, - |METADATA_VALUE_TYPE, - |METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | 'submission', - | DATE_FORMAT(START_DT, '%Y-%m-%dT%T.%f$Offset'), - | 'string', - | NOW() - |FROM WORKFLOW_EXECUTION - |WHERE START_DT IS NOT NULL;""".stripMargin - , - s""" - |INSERT INTO METADATA_JOURNAL ( - |WORKFLOW_EXECUTION_UUID, - |METADATA_KEY, - |METADATA_VALUE, - |METADATA_VALUE_TYPE, - |METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | 'start', - | DATE_FORMAT(START_DT, '%Y-%m-%dT%T.%f$Offset'), - | 'string', - | NOW() - |FROM WORKFLOW_EXECUTION - |WHERE START_DT IS NOT NULL;""".stripMargin - , - s""" - |INSERT INTO METADATA_JOURNAL ( - |WORKFLOW_EXECUTION_UUID, - |METADATA_KEY, - |METADATA_VALUE, - |METADATA_VALUE_TYPE, - |METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | 'end', - | DATE_FORMAT(END_DT, '%Y-%m-%dT%T.%f$Offset'), - | 'string', - | NOW() - |FROM WORKFLOW_EXECUTION - |WHERE END_DT IS NOT NULL;""".stripMargin - , - s""" - |INSERT INTO METADATA_JOURNAL ( - |WORKFLOW_EXECUTION_UUID, - |METADATA_KEY, - |METADATA_VALUE, - |METADATA_VALUE_TYPE, - |METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | 'status', - | STATUS, - | 'string', - | NOW() - |FROM WORKFLOW_EXECUTION;""".stripMargin - , - """ - |INSERT INTO METADATA_JOURNAL ( - |WORKFLOW_EXECUTION_UUID, - |METADATA_KEY, - |METADATA_VALUE, - |METADATA_VALUE_TYPE, - |METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | 'workflowName', - | WORKFLOW_NAME, - | 'string', - | NOW() - |FROM WORKFLOW_EXECUTION;""".stripMargin - , - """ - |INSERT INTO METADATA_JOURNAL ( - |WORKFLOW_EXECUTION_UUID, - |METADATA_KEY, - |METADATA_VALUE, - |METADATA_VALUE_TYPE, - |METADATA_TIMESTAMP - |) - |SELECT - | WORKFLOW_EXECUTION_UUID, - | 'outputs', - | NULL, - | NULL, - | '1900-01-01 0.000000' - |FROM WORKFLOW_EXECUTION;""".stripMargin + s""" + |INSERT INTO METADATA_JOURNAL ( + |WORKFLOW_EXECUTION_UUID, + |METADATA_KEY, + |METADATA_VALUE, + |METADATA_VALUE_TYPE, + |METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | 'submission', + | DATE_FORMAT(START_DT, '%Y-%m-%dT%T.%f$Offset'), + | 'string', + | NOW() + |FROM WORKFLOW_EXECUTION + |WHERE START_DT IS NOT NULL;""".stripMargin, + s""" + |INSERT INTO METADATA_JOURNAL ( + |WORKFLOW_EXECUTION_UUID, + |METADATA_KEY, + |METADATA_VALUE, + |METADATA_VALUE_TYPE, + |METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | 'start', + | DATE_FORMAT(START_DT, '%Y-%m-%dT%T.%f$Offset'), + | 'string', + | NOW() + |FROM WORKFLOW_EXECUTION + |WHERE START_DT IS NOT NULL;""".stripMargin, + s""" + |INSERT INTO METADATA_JOURNAL ( + |WORKFLOW_EXECUTION_UUID, + |METADATA_KEY, + |METADATA_VALUE, + |METADATA_VALUE_TYPE, + |METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | 'end', + | DATE_FORMAT(END_DT, '%Y-%m-%dT%T.%f$Offset'), + | 'string', + | NOW() + |FROM WORKFLOW_EXECUTION + |WHERE END_DT IS NOT NULL;""".stripMargin, + s""" + |INSERT INTO METADATA_JOURNAL ( + |WORKFLOW_EXECUTION_UUID, + |METADATA_KEY, + |METADATA_VALUE, + |METADATA_VALUE_TYPE, + |METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | 'status', + | STATUS, + | 'string', + | NOW() + |FROM WORKFLOW_EXECUTION;""".stripMargin, + """ + |INSERT INTO METADATA_JOURNAL ( + |WORKFLOW_EXECUTION_UUID, + |METADATA_KEY, + |METADATA_VALUE, + |METADATA_VALUE_TYPE, + |METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | 'workflowName', + | WORKFLOW_NAME, + | 'string', + | NOW() + |FROM WORKFLOW_EXECUTION;""".stripMargin, + """ + |INSERT INTO METADATA_JOURNAL ( + |WORKFLOW_EXECUTION_UUID, + |METADATA_KEY, + |METADATA_VALUE, + |METADATA_VALUE_TYPE, + |METADATA_TIMESTAMP + |) + |SELECT + | WORKFLOW_EXECUTION_UUID, + | 'outputs', + | NULL, + | NULL, + | '1900-01-01 0.000000' + |FROM WORKFLOW_EXECUTION;""".stripMargin ) - } override def getConfirmationMessage: String = "Workflow Execution Table migration complete." } diff --git a/database/migration/src/main/scala/cromwell/database/migration/restart/table/JobStoreSimpletonMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/restart/table/JobStoreSimpletonMigration.scala index a1857aae21e..13c2ba14e3e 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/restart/table/JobStoreSimpletonMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/restart/table/JobStoreSimpletonMigration.scala @@ -12,7 +12,7 @@ class JobStoreSimpletonMigration extends AbstractRestartMigration { // GOTC (substituting COUNT(*) for the projection): 1 row in set (5.22 sec) private val QueryOutputsForDoneCallsInRunningWorkflows = - """ + """ SELECT js.JOB_STORE_ID, -- 1 s.NAME, -- 2 @@ -51,9 +51,10 @@ class JobStoreSimpletonMigration extends AbstractRestartMigration { def buildJobStoreSimpletonEntries(name: String, womValue: WomValue, womType: WomType) = Option(womValue) match { case None => List(JobStoreSimpletonEntry(name, null, womType.stableName)) - case Some(_) => womValue.simplify(name) map { s => - JobStoreSimpletonEntry(s.simpletonKey, s.simpletonValue.valueString, s.simpletonValue.womType.stableName) - } + case Some(_) => + womValue.simplify(name) map { s => + JobStoreSimpletonEntry(s.simpletonKey, s.simpletonValue.valueString, s.simpletonValue.womType.stableName) + } } while (results.next()) { @@ -73,5 +74,3 @@ class JobStoreSimpletonMigration extends AbstractRestartMigration { } } } - - diff --git a/database/migration/src/main/scala/cromwell/database/migration/restart/table/RenameWorkflowOptionKeysMigration.scala b/database/migration/src/main/scala/cromwell/database/migration/restart/table/RenameWorkflowOptionKeysMigration.scala index c0610ae368b..9019b95f1c0 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/restart/table/RenameWorkflowOptionKeysMigration.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/restart/table/RenameWorkflowOptionKeysMigration.scala @@ -11,7 +11,6 @@ object RenameWorkflowOptionKeysMigration { private val UpdateWorkflowStore = " UPDATE WORKFLOW_STORE SET WORKFLOW_OPTIONS = ? WHERE WORKFLOW_STORE_ID = ? " } - class RenameWorkflowOptionKeysMigration extends AbstractRestartMigration { override protected def description: String = "Workflow option renaming" @@ -27,7 +26,8 @@ class RenameWorkflowOptionKeysMigration extends AbstractRestartMigration { val optionsJson = options.parseJson val newOptionsJson = optionsJson match { case JsObject(fields) => JsObject(fields map renameOptionKeys) - case other => other // There really shouldn't be workflow options of other types, but if there are pass them through. + case other => + other // There really shouldn't be workflow options of other types, but if there are pass them through. } insert.setString(1, newOptionsJson.prettyPrint) diff --git a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/ClearMetadataEntryWorkflowOptions.scala b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/ClearMetadataEntryWorkflowOptions.scala index 2892e9bc830..d07e8be76ef 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/ClearMetadataEntryWorkflowOptions.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/ClearMetadataEntryWorkflowOptions.scala @@ -11,5 +11,6 @@ class ClearMetadataEntryWorkflowOptions extends WorkflowOptionsChange { override val workflowOptionsColumn = "METADATA_VALUE" override val additionalReadBatchFilters = "AND METADATA_KEY = 'submittedFiles:options'" - override def migrateWorkflowOptions(workflowOptions: WorkflowOptions) = workflowOptions.clearEncryptedValues.asPrettyJson + override def migrateWorkflowOptions(workflowOptions: WorkflowOptions) = + workflowOptions.clearEncryptedValues.asPrettyJson } diff --git a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/RenameWorkflowOptionsInMetadata.scala b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/RenameWorkflowOptionsInMetadata.scala index 40dc4533b2d..5f0167b2ac1 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/RenameWorkflowOptionsInMetadata.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/RenameWorkflowOptionsInMetadata.scala @@ -20,12 +20,14 @@ class RenameWorkflowOptionsInMetadata extends BatchedTaskChange { | WHERE $primaryKeyColumn >= ? AND $primaryKeyColumn < ? $additionalReadBatchFilters; |""".stripMargin - override def migrateBatchQueries = List(s"UPDATE $tableName SET $workflowOptionsColumn = ? WHERE $primaryKeyColumn = ?;") + override def migrateBatchQueries = List( + s"UPDATE $tableName SET $workflowOptionsColumn = ? WHERE $primaryKeyColumn = ?;" + ) override def migrateBatchRow(readRow: ResultSet, migrateStatements: List[PreparedStatement]): Int = { val migrateStatement = migrateStatements.head val rowId = readRow.getInt(1) - + val migratedJson = readRow.getString(2).parseJson match { case JsObject(fields) => JsObject(fields map renameOptionKeys) case other => other diff --git a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsChange.scala b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsChange.scala index 1b519fb65fd..aa6e4ed0557 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsChange.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsChange.scala @@ -12,6 +12,7 @@ import scala.util.{Failure, Success} * Edits the workflow options stored in a table. */ trait WorkflowOptionsChange extends BatchedTaskChange { + /** @return name of the table */ def tableName: String @@ -47,7 +48,9 @@ trait WorkflowOptionsChange extends BatchedTaskChange { | WHERE $primaryKeyColumn >= ? AND $primaryKeyColumn < ? $additionalReadBatchFilters; |""".stripMargin - override def migrateBatchQueries = List(s"UPDATE $tableName SET $workflowOptionsColumn = ? WHERE $primaryKeyColumn = ?;") + override def migrateBatchQueries = List( + s"UPDATE $tableName SET $workflowOptionsColumn = ? WHERE $primaryKeyColumn = ?;" + ) override def migrateBatchRow(readRow: ResultSet, migrateStatements: List[PreparedStatement]): Int = { val migrateStatement = migrateStatements.head @@ -61,8 +64,7 @@ trait WorkflowOptionsChange extends BatchedTaskChange { migrateStatement.addBatch() 1 case Failure(exception) => - logger.error( - s"Unable to process $tableName pk $rowId\njson:\n$workflowOptionsJson", exception) + logger.error(s"Unable to process $tableName pk $rowId\njson:\n$workflowOptionsJson", exception) 0 } } diff --git a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsRenaming.scala b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsRenaming.scala index d148a570fdf..aca4a9f6925 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsRenaming.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/workflowoptions/WorkflowOptionsRenaming.scala @@ -12,10 +12,9 @@ object WorkflowOptionsRenaming { "call_logs_dir" -> "final_call_logs_dir" ) - def renameOptionKeys(field: JsField): JsField = { + def renameOptionKeys(field: JsField): JsField = field match { case (oldName, value) if RenamedOptionKeys.contains(oldName) => RenamedOptionKeys(oldName) -> value case noop => noop } - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala index 8e1b98a4a1c..f0968750cba 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala @@ -18,28 +18,38 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { import dataAccess.driver.api._ - override def addCallCaching(joins: Seq[CallCachingJoin], batchSize: Int) - (implicit ec: ExecutionContext): Future[Unit] = { + override def addCallCaching(joins: Seq[CallCachingJoin], batchSize: Int)(implicit + ec: ExecutionContext + ): Future[Unit] = { // Construct parallel lists of parent entries, hashes, simpletons, and detritus from `CallCachingJoin`s. val (entries, hashes, simpletons, detritus, aggregations) = joins.toList.foldMap { j => - (List(j.callCachingEntry), List(j.callCachingHashEntries), List(j.callCachingSimpletonEntries), List(j.callCachingDetritusEntries), List(j.callCachingAggregationEntry.toList)) } + (List(j.callCachingEntry), + List(j.callCachingHashEntries), + List(j.callCachingSimpletonEntries), + List(j.callCachingDetritusEntries), + List(j.callCachingAggregationEntry.toList) + ) + } // Use the supplied `assigner` function to assign parent entry row IDs into the parallel `Seq` of children entities. - def assignEntryIdsToChildren[C](ids: Seq[Long], groupingsOfChildren: Seq[Seq[C]], assigner: (Long, C) => C): Seq[C] = { + def assignEntryIdsToChildren[C](ids: Seq[Long], + groupingsOfChildren: Seq[Seq[C]], + assigner: (Long, C) => C + ): Seq[C] = (ids zip groupingsOfChildren) flatMap { case (id, children) => children.map(assigner(id, _)) } - } // Batch insert entities into the appropriate `Table`. - def batchInsert[E, T <: Table[E]](entries: Seq[E], tableQuery: TableQuery[T]): DBIO[_] = { - DBIO.sequence(entries.grouped(batchSize).map { tableQuery ++= _ }) - } + def batchInsert[E, T <: Table[E]](entries: Seq[E], tableQuery: TableQuery[T]): DBIO[_] = + DBIO.sequence(entries.grouped(batchSize).map(tableQuery ++= _)) // Functions to assign call cache entry IDs into child hash entry, simpleton, and detritus rows. def hashAssigner(id: Long, hash: CallCachingHashEntry) = hash.copy(callCachingEntryId = Option(id)) - def simpletonAssigner(id: Long, simpleton: CallCachingSimpletonEntry) = simpleton.copy(callCachingEntryId = Option(id)) + def simpletonAssigner(id: Long, simpleton: CallCachingSimpletonEntry) = + simpleton.copy(callCachingEntryId = Option(id)) def detritusAssigner(id: Long, detritus: CallCachingDetritusEntry) = detritus.copy(callCachingEntryId = Option(id)) - def aggregationAssigner(id: Long, aggregation: CallCachingAggregationEntry) = aggregation.copy(callCachingEntryId = Option(id)) + def aggregationAssigner(id: Long, aggregation: CallCachingAggregationEntry) = + aggregation.copy(callCachingEntryId = Option(id)) val action = for { entryIds <- dataAccess.callCachingEntryIdsAutoInc ++= entries @@ -70,87 +80,114 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { (0 to 2).toList map { total(_) map { p => PrefixAndLength(p, p.length) } getOrElse doNotMatch } } - override def hasMatchingCallCachingEntriesForBaseAggregation(baseAggregationHash: String, callCachePrefixes: Option[List[String]] = None) - (implicit ec: ExecutionContext): Future[Boolean] = { + override def hasMatchingCallCachingEntriesForBaseAggregation(baseAggregationHash: String, + callCachePrefixes: Option[List[String]] = None + )(implicit ec: ExecutionContext): Future[Boolean] = { val action = callCachePrefixes match { case None => dataAccess.existsCallCachingEntriesForBaseAggregationHash(baseAggregationHash).result case Some(ps) => val one :: two :: three :: _ = prefixesAndLengths(ps) - dataAccess.existsCallCachingEntriesForBaseAggregationHashWithCallCachePrefix( - (baseAggregationHash, - one.prefix, one.length, - two.prefix, two.length, - three.prefix, three.length)).result + dataAccess + .existsCallCachingEntriesForBaseAggregationHashWithCallCachePrefix( + (baseAggregationHash, one.prefix, one.length, two.prefix, two.length, three.prefix, three.length) + ) + .result } runTransaction(action) } - override def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Long]) - (implicit ec: ExecutionContext): Future[Option[Long]] = { + override def findCacheHitForAggregation(baseAggregationHash: String, + inputFilesAggregationHash: Option[String], + callCachePathPrefixes: Option[List[String]], + excludedIds: Set[Long] + )(implicit ec: ExecutionContext): Future[Option[Long]] = { val action = callCachePathPrefixes match { case None => - dataAccess.callCachingEntriesForAggregatedHashes(baseAggregationHash, inputFilesAggregationHash, excludedIds).result.headOption + dataAccess + .callCachingEntriesForAggregatedHashes(baseAggregationHash, inputFilesAggregationHash, excludedIds) + .result + .headOption case Some(ps) => val one :: two :: three :: _ = prefixesAndLengths(ps) - dataAccess.callCachingEntriesForAggregatedHashesWithPrefixes( - baseAggregationHash, inputFilesAggregationHash, - one.prefix, one.length, - two.prefix, two.length, - three.prefix, three.length, - excludedIds).result.headOption + dataAccess + .callCachingEntriesForAggregatedHashesWithPrefixes(baseAggregationHash, + inputFilesAggregationHash, + one.prefix, + one.length, + two.prefix, + two.length, + three.prefix, + three.length, + excludedIds + ) + .result + .headOption } runTransaction(action) } - override def queryResultsForCacheId(callCachingEntryId: Long) - (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { + override def queryResultsForCacheId( + callCachingEntryId: Long + )(implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { val action = for { - callCachingEntryOption <- dataAccess. - callCachingEntriesForId(callCachingEntryId).result.headOption - callCachingSimpletonEntries <- dataAccess. - callCachingSimpletonEntriesForCallCachingEntryId(callCachingEntryId).result - callCachingDetritusEntries <- dataAccess. - callCachingDetritusEntriesForCallCachingEntryId(callCachingEntryId).result + callCachingEntryOption <- dataAccess.callCachingEntriesForId(callCachingEntryId).result.headOption + callCachingSimpletonEntries <- dataAccess + .callCachingSimpletonEntriesForCallCachingEntryId(callCachingEntryId) + .result + callCachingDetritusEntries <- dataAccess + .callCachingDetritusEntriesForCallCachingEntryId(callCachingEntryId) + .result } yield callCachingEntryOption.map( - CallCachingJoin(_, Seq.empty, None, callCachingSimpletonEntries, callCachingDetritusEntries)) + CallCachingJoin(_, Seq.empty, None, callCachingSimpletonEntries, callCachingDetritusEntries) + ) runTransaction(action) } - private def callCacheJoinFromEntryQuery(callCachingEntry: CallCachingEntry) - (implicit ec: ExecutionContext): DBIO[CallCachingJoin] = { + private def callCacheJoinFromEntryQuery( + callCachingEntry: CallCachingEntry + )(implicit ec: ExecutionContext): DBIO[CallCachingJoin] = { val callCachingEntryId = callCachingEntry.callCachingEntryId.get - val callCachingSimpletonEntries: DBIO[Seq[CallCachingSimpletonEntry]] = dataAccess. - callCachingSimpletonEntriesForCallCachingEntryId(callCachingEntryId).result - val callCachingDetritusEntries: DBIO[Seq[CallCachingDetritusEntry]] = dataAccess. - callCachingDetritusEntriesForCallCachingEntryId(callCachingEntryId).result - val callCachingHashEntries: DBIO[Seq[CallCachingHashEntry]] = dataAccess. - callCachingHashEntriesForCallCachingEntryId(callCachingEntryId).result - val callCachingAggregationEntries: DBIO[Option[CallCachingAggregationEntry]] = dataAccess. - callCachingAggregationForCacheEntryId(callCachingEntryId).result.headOption - - (callCachingHashEntries, callCachingAggregationEntries, callCachingSimpletonEntries, callCachingDetritusEntries) mapN { - case (hashes, aggregation, simpletons, detrituses) => - CallCachingJoin(callCachingEntry, hashes, aggregation, simpletons, detrituses) + val callCachingSimpletonEntries: DBIO[Seq[CallCachingSimpletonEntry]] = + dataAccess.callCachingSimpletonEntriesForCallCachingEntryId(callCachingEntryId).result + val callCachingDetritusEntries: DBIO[Seq[CallCachingDetritusEntry]] = + dataAccess.callCachingDetritusEntriesForCallCachingEntryId(callCachingEntryId).result + val callCachingHashEntries: DBIO[Seq[CallCachingHashEntry]] = + dataAccess.callCachingHashEntriesForCallCachingEntryId(callCachingEntryId).result + val callCachingAggregationEntries: DBIO[Option[CallCachingAggregationEntry]] = + dataAccess.callCachingAggregationForCacheEntryId(callCachingEntryId).result.headOption + + (callCachingHashEntries, + callCachingAggregationEntries, + callCachingSimpletonEntries, + callCachingDetritusEntries + ) mapN { case (hashes, aggregation, simpletons, detrituses) => + CallCachingJoin(callCachingEntry, hashes, aggregation, simpletons, detrituses) } } - override def callCacheJoinForCall(workflowExecutionUuid: String, callFqn: String, index: Int) - (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { + override def callCacheJoinForCall(workflowExecutionUuid: String, callFqn: String, index: Int)(implicit + ec: ExecutionContext + ): Future[Option[CallCachingJoin]] = { val action = for { - callCachingEntryOption <- dataAccess. - callCachingEntriesForWorkflowFqnIndex((workflowExecutionUuid, callFqn, index)).result.headOption + callCachingEntryOption <- dataAccess + .callCachingEntriesForWorkflowFqnIndex((workflowExecutionUuid, callFqn, index)) + .result + .headOption callCacheJoin <- callCachingEntryOption - .fold[DBIOAction[Option[CallCachingJoin], NoStream, Effect.All]](DBIO.successful(None))(callCacheJoinFromEntryQuery(_).map(Option.apply)) + .fold[DBIOAction[Option[CallCachingJoin], NoStream, Effect.All]](DBIO.successful(None))( + callCacheJoinFromEntryQuery(_).map(Option.apply) + ) } yield callCacheJoin runTransaction(action) } - override def invalidateCall(callCachingEntryId: Long) - (implicit ec: ExecutionContext): Future[Option[CallCachingEntry]] = { + override def invalidateCall( + callCachingEntryId: Long + )(implicit ec: ExecutionContext): Future[Option[CallCachingEntry]] = { val action = for { _ <- dataAccess.allowResultReuseForCallCachingEntryId(callCachingEntryId).update(false) callCachingEntryOption <- dataAccess.callCachingEntriesForId(callCachingEntryId).result.headOption @@ -159,13 +196,16 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { runTransaction(action) } - override def invalidateCallCacheEntryIdsForWorkflowId(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Unit] = { + override def invalidateCallCacheEntryIdsForWorkflowId( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Unit] = { val action = dataAccess.allowResultReuseForWorkflowId(workflowExecutionUuid).update(false) runTransaction(action).void } - override def callCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[Long]] = { + override def callCacheEntryIdsForWorkflowId( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Seq[Long]] = { val action = dataAccess.callCachingEntryIdsForWorkflowId(workflowExecutionUuid).result runTransaction(action) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/DockerHashStoreSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/DockerHashStoreSlickDatabase.scala index 1b70e187083..5109ad17476 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/DockerHashStoreSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/DockerHashStoreSlickDatabase.scala @@ -15,8 +15,9 @@ trait DockerHashStoreSlickDatabase extends DockerHashStoreSqlDatabase { /** * Adds a docker hash entry to the store. */ - override def addDockerHashStoreEntry(dockerHashStoreEntry: DockerHashStoreEntry) - (implicit ec: ExecutionContext): Future[Unit] = { + override def addDockerHashStoreEntry( + dockerHashStoreEntry: DockerHashStoreEntry + )(implicit ec: ExecutionContext): Future[Unit] = { val action = dataAccess.dockerHashStoreEntries += dockerHashStoreEntry runTransaction(action) void } @@ -25,8 +26,9 @@ trait DockerHashStoreSlickDatabase extends DockerHashStoreSqlDatabase { * Retrieves docker hash entries for a workflow. * */ - override def queryDockerHashStoreEntries(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Seq[DockerHashStoreEntry]] = { + override def queryDockerHashStoreEntries( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Seq[DockerHashStoreEntry]] = { val action = dataAccess.dockerHashStoreEntriesForWorkflowExecutionUuid(workflowExecutionUuid).result runTransaction(action) } @@ -34,7 +36,9 @@ trait DockerHashStoreSlickDatabase extends DockerHashStoreSqlDatabase { /** * Deletes docker hash entries related to a workflow, returning the number of rows affected. */ - override def removeDockerHashStoreEntries(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Int] = { + override def removeDockerHashStoreEntries( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Int] = { val action = dataAccess.dockerHashStoreEntriesForWorkflowExecutionUuid(workflowExecutionUuid).delete runTransaction(action) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/EngineSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/EngineSlickDatabase.scala index bacc10da1ee..fde45e29d6f 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/EngineSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/EngineSlickDatabase.scala @@ -12,7 +12,7 @@ object EngineSlickDatabase { } class EngineSlickDatabase(originalDatabaseConfig: Config) - extends SlickDatabase(originalDatabaseConfig) + extends SlickDatabase(originalDatabaseConfig) with EngineSqlDatabase with WorkflowStoreSlickDatabase with JobKeyValueSlickDatabase diff --git a/database/sql/src/main/scala/cromwell/database/slick/JobKeyValueSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/JobKeyValueSlickDatabase.scala index 4e28cdb33d5..ec416791ad6 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/JobKeyValueSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/JobKeyValueSlickDatabase.scala @@ -16,8 +16,7 @@ trait JobKeyValueSlickDatabase extends JobKeyValueSqlDatabase { runTransaction(action) } - override def addJobKeyValueEntry(jobKeyValueEntry: JobKeyValueEntry) - (implicit ec: ExecutionContext): Future[Unit] = { + override def addJobKeyValueEntry(jobKeyValueEntry: JobKeyValueEntry)(implicit ec: ExecutionContext): Future[Unit] = { val action = if (useSlickUpserts) { for { _ <- dataAccess.jobKeyValueEntryIdsAutoInc.insertOrUpdate(jobKeyValueEntry) @@ -31,24 +30,26 @@ trait JobKeyValueSlickDatabase extends JobKeyValueSqlDatabase { // !!!!!!! updates running in a single transaction. !!!!!!!! // !!!!!!! https://broadworkbench.atlassian.net/browse/BA-6262 !!!!!!!! // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! - private def manualUpsertQuery(jobKeyValueEntry: JobKeyValueEntry) - (implicit ec: ExecutionContext) = for { - updateCount <- dataAccess. - storeValuesForJobKeyAndStoreKey(( - jobKeyValueEntry.workflowExecutionUuid, - jobKeyValueEntry.callFullyQualifiedName, - jobKeyValueEntry.jobIndex, - jobKeyValueEntry.jobAttempt, - jobKeyValueEntry.storeKey)). - update(jobKeyValueEntry.storeValue) + private def manualUpsertQuery(jobKeyValueEntry: JobKeyValueEntry)(implicit ec: ExecutionContext) = for { + updateCount <- dataAccess + .storeValuesForJobKeyAndStoreKey( + (jobKeyValueEntry.workflowExecutionUuid, + jobKeyValueEntry.callFullyQualifiedName, + jobKeyValueEntry.jobIndex, + jobKeyValueEntry.jobAttempt, + jobKeyValueEntry.storeKey + ) + ) + .update(jobKeyValueEntry.storeValue) _ <- updateCount match { case 0 => dataAccess.jobKeyValueEntryIdsAutoInc += jobKeyValueEntry case _ => assertUpdateCount("addJobKeyValueEntry", updateCount, 1) } } yield () - def addJobKeyValueEntries(jobKeyValueEntries: Iterable[JobKeyValueEntry]) - (implicit ec: ExecutionContext): Future[Unit] = { + def addJobKeyValueEntries( + jobKeyValueEntries: Iterable[JobKeyValueEntry] + )(implicit ec: ExecutionContext): Future[Unit] = { val action = if (useSlickUpserts) { createBatchUpsert("KeyValueStore", dataAccess.jobKeyValueTableQueryCompiled, jobKeyValueEntries) } else { @@ -57,18 +58,23 @@ trait JobKeyValueSlickDatabase extends JobKeyValueSqlDatabase { runTransaction(action).void } - override def queryJobKeyValueEntries(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Seq[JobKeyValueEntry]] = { + override def queryJobKeyValueEntries( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Seq[JobKeyValueEntry]] = { val action = dataAccess.jobKeyValueEntriesForWorkflowExecutionUuid(workflowExecutionUuid).result runTransaction(action) } - override def queryStoreValue(workflowExecutionUuid: String, callFqn: String, jobScatterIndex: Int, - jobRetryAttempt: Int, storeKey: String) - (implicit ec: ExecutionContext): Future[Option[String]] = { - val action = dataAccess. - storeValuesForJobKeyAndStoreKey((workflowExecutionUuid, callFqn, jobScatterIndex, jobRetryAttempt, storeKey)). - result.headOption + override def queryStoreValue(workflowExecutionUuid: String, + callFqn: String, + jobScatterIndex: Int, + jobRetryAttempt: Int, + storeKey: String + )(implicit ec: ExecutionContext): Future[Option[String]] = { + val action = dataAccess + .storeValuesForJobKeyAndStoreKey((workflowExecutionUuid, callFqn, jobScatterIndex, jobRetryAttempt, storeKey)) + .result + .headOption runTransaction(action) } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/JobStoreSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/JobStoreSlickDatabase.scala index 5c13de6dbe3..08c8e637908 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/JobStoreSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/JobStoreSlickDatabase.scala @@ -11,8 +11,9 @@ trait JobStoreSlickDatabase extends JobStoreSqlDatabase { import dataAccess.driver.api._ - override def addJobStores(jobStoreJoins: Seq[JobStoreJoin], batchSize: Int) - (implicit ec: ExecutionContext): Future[Unit] = { + override def addJobStores(jobStoreJoins: Seq[JobStoreJoin], batchSize: Int)(implicit + ec: ExecutionContext + ): Future[Unit] = { def assignJobStoreIdsToSimpletons(jobStoreIds: Seq[Long]): Seq[JobStoreSimpletonEntry] = { val simpletonsByJobStoreEntry = jobStoreJoins map { _.jobStoreSimpletonEntries } @@ -32,13 +33,17 @@ trait JobStoreSlickDatabase extends JobStoreSqlDatabase { runTransaction(action) } - override def queryJobStores(workflowExecutionUuid: String, callFqn: String, jobScatterIndex: Int, - jobScatterAttempt: Int)(implicit ec: ExecutionContext): - Future[Option[JobStoreJoin]] = { + override def queryJobStores(workflowExecutionUuid: String, + callFqn: String, + jobScatterIndex: Int, + jobScatterAttempt: Int + )(implicit ec: ExecutionContext): Future[Option[JobStoreJoin]] = { val action = for { - jobStoreEntryOption <- dataAccess. - jobStoreEntriesForJobKey((workflowExecutionUuid, callFqn, jobScatterIndex, jobScatterAttempt)).result.headOption + jobStoreEntryOption <- dataAccess + .jobStoreEntriesForJobKey((workflowExecutionUuid, callFqn, jobScatterIndex, jobScatterAttempt)) + .result + .headOption jobStoreSimpletonEntries <- jobStoreEntryOption match { case Some(jobStoreEntry) => dataAccess.jobStoreSimpletonEntriesForJobStoreEntryId(jobStoreEntry.jobStoreEntryId.get).result @@ -49,10 +54,9 @@ trait JobStoreSlickDatabase extends JobStoreSqlDatabase { runTransaction(action) } - override def removeJobStores(workflowExecutionUuids: Seq[String]) - (implicit ec: ExecutionContext): Future[Seq[Int]] = { - val actions = workflowExecutionUuids map { - workflowExecutionUuid => dataAccess.jobStoreEntriesForWorkflowExecutionUuid(workflowExecutionUuid).delete + override def removeJobStores(workflowExecutionUuids: Seq[String])(implicit ec: ExecutionContext): Future[Seq[Int]] = { + val actions = workflowExecutionUuids map { workflowExecutionUuid => + dataAccess.jobStoreEntriesForWorkflowExecutionUuid(workflowExecutionUuid).delete } runTransaction(DBIO.sequence(actions)) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala index eb87f88d101..c0723af6795 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala @@ -8,7 +8,12 @@ import cromwell.database.slick.tables.MetadataDataAccessComponent import cromwell.database.sql.MetadataSqlDatabase import cromwell.database.sql.SqlConverters._ import cromwell.database.sql.joins.{CallOrWorkflowQuery, CallQuery, MetadataJobQueryValue, WorkflowQuery} -import cromwell.database.sql.tables.{CustomLabelEntry, InformationSchemaEntry, MetadataEntry, WorkflowMetadataSummaryEntry} +import cromwell.database.sql.tables.{ + CustomLabelEntry, + InformationSchemaEntry, + MetadataEntry, + WorkflowMetadataSummaryEntry +} import net.ceedubs.ficus.Ficus._ import slick.basic.DatabasePublisher import slick.jdbc.{ResultSetConcurrency, ResultSetType} @@ -23,7 +28,8 @@ object MetadataSlickDatabase { } case class SummarizationPartitionedMetadata(nonSummarizableMetadata: Seq[MetadataEntry], - summarizableMetadata: Seq[MetadataEntry]) + summarizableMetadata: Seq[MetadataEntry] + ) def partitionSummarizationMetadata(rawMetadataEntries: Seq[MetadataEntry], startMetadataKey: String, @@ -33,14 +39,24 @@ object MetadataSlickDatabase { submissionMetadataKey: String, parentWorkflowIdKey: String, rootWorkflowIdKey: String, - labelMetadataKey: String): SummarizationPartitionedMetadata = { - - val exactMatchMetadataKeys = Set(startMetadataKey, endMetadataKey, nameMetadataKey, statusMetadataKey, submissionMetadataKey, parentWorkflowIdKey, rootWorkflowIdKey) + labelMetadataKey: String + ): SummarizationPartitionedMetadata = { + + val exactMatchMetadataKeys = Set(startMetadataKey, + endMetadataKey, + nameMetadataKey, + statusMetadataKey, + submissionMetadataKey, + parentWorkflowIdKey, + rootWorkflowIdKey + ) val startsWithMetadataKeys = Set(labelMetadataKey) val (summarizable, nonSummarizable) = rawMetadataEntries partition { entry => entry.callFullyQualifiedName.isEmpty && entry.jobIndex.isEmpty && entry.jobAttempt.isEmpty && - (exactMatchMetadataKeys.contains(entry.metadataKey) || startsWithMetadataKeys.exists(entry.metadataKey.startsWith)) + (exactMatchMetadataKeys.contains(entry.metadataKey) || startsWithMetadataKeys.exists( + entry.metadataKey.startsWith + )) } SummarizationPartitionedMetadata( @@ -51,7 +67,7 @@ object MetadataSlickDatabase { } class MetadataSlickDatabase(originalDatabaseConfig: Config) - extends SlickDatabase(originalDatabaseConfig) + extends SlickDatabase(originalDatabaseConfig) with MetadataSqlDatabase with SummaryStatusSlickDatabase with SummaryQueueSlickDatabase { @@ -75,24 +91,26 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) submissionMetadataKey: String, parentWorkflowIdKey: String, rootWorkflowIdKey: String, - labelMetadataKey: String) - (implicit ec: ExecutionContext): Future[Unit] = { + labelMetadataKey: String + )(implicit ec: ExecutionContext): Future[Unit] = { val partitioned = partitionSummarizationMetadata( - rawMetadataEntries = metadataEntries.toSeq, - startMetadataKey, - endMetadataKey, - nameMetadataKey, - statusMetadataKey, - submissionMetadataKey, - parentWorkflowIdKey, - rootWorkflowIdKey, - labelMetadataKey) + rawMetadataEntries = metadataEntries.toSeq, + startMetadataKey, + endMetadataKey, + nameMetadataKey, + statusMetadataKey, + submissionMetadataKey, + parentWorkflowIdKey, + rootWorkflowIdKey, + labelMetadataKey + ) val roleSet = pgLargeObjectWriteRole.map(role => sqlu"""SET ROLE TO "#$role"""") // These entries also require a write to the summary queue. - def writeSummarizable(): Future[Unit] = if (partitioned.summarizableMetadata.isEmpty) Future.successful(()) else { + def writeSummarizable(): Future[Unit] = if (partitioned.summarizableMetadata.isEmpty) Future.successful(()) + else { val batchesToWrite = partitioned.summarizableMetadata.grouped(insertBatchSize).toList val insertActions = batchesToWrite.map { batch => val insertMetadata = dataAccess.metadataEntryIdsAutoInc ++= batch @@ -103,8 +121,11 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) // Non-summarizable metadata that only needs to go to the metadata table can be written much more efficiently // than summarizable metadata. - def writeNonSummarizable(): Future[Unit] = if (partitioned.nonSummarizableMetadata.isEmpty) Future.successful(()) else { - val action = DBIO.sequence(roleSet ++ partitioned.nonSummarizableMetadata.grouped(insertBatchSize).map(dataAccess.metadataEntries ++= _)) + def writeNonSummarizable(): Future[Unit] = if (partitioned.nonSummarizableMetadata.isEmpty) Future.successful(()) + else { + val action = DBIO.sequence( + roleSet ++ partitioned.nonSummarizableMetadata.grouped(insertBatchSize).map(dataAccess.metadataEntries ++= _) + ) runLobAction(action).void } @@ -119,42 +140,45 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) runTransaction(action) } - override def metadataSummaryEntryExists(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Boolean] = { + override def metadataSummaryEntryExists( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Boolean] = { val action = dataAccess.workflowMetadataSummaryEntryExistsForWorkflowExecutionUuid(workflowExecutionUuid).result runTransaction(action) } - override def queryMetadataEntries(workflowExecutionUuid: String, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { + override def queryMetadataEntries(workflowExecutionUuid: String, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Seq[MetadataEntry]] = { val action = dataAccess.metadataEntriesForWorkflowExecutionUuid(workflowExecutionUuid).result runTransaction(action, timeout = timeout) } override def streamMetadataEntries(workflowExecutionUuid: String): DatabasePublisher[MetadataEntry] = { - val action = dataAccess.metadataEntriesForWorkflowSortedById(workflowExecutionUuid) + val action = dataAccess + .metadataEntriesForWorkflowSortedById(workflowExecutionUuid) .result .withStatementParameters( rsType = ResultSetType.ForwardOnly, rsConcurrency = ResultSetConcurrency.ReadOnly, // Magic number alert: fetchSize is set to MIN_VALUE for MySQL to stream rather than cache in memory first. // Inspired by: https://github.com/slick/slick/issues/1218 - fetchSize = Integer.MIN_VALUE) + fetchSize = Integer.MIN_VALUE + ) database.stream(action) } - override def countMetadataEntries(workflowExecutionUuid: String, - expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] = { - val action = dataAccess.countMetadataEntriesForWorkflowExecutionUuid((workflowExecutionUuid, expandSubWorkflows)).result + override def countMetadataEntries(workflowExecutionUuid: String, expandSubWorkflows: Boolean, timeout: Duration)( + implicit ec: ExecutionContext + ): Future[Int] = { + val action = + dataAccess.countMetadataEntriesForWorkflowExecutionUuid((workflowExecutionUuid, expandSubWorkflows)).result runTransaction(action, timeout = timeout) } - override def queryMetadataEntries(workflowExecutionUuid: String, - metadataKey: String, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { + override def queryMetadataEntries(workflowExecutionUuid: String, metadataKey: String, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Seq[MetadataEntry]] = { val action = dataAccess.metadataEntriesForWorkflowExecutionUuidAndMetadataKey((workflowExecutionUuid, metadataKey)).result runTransaction(action, timeout = timeout) @@ -163,10 +187,14 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) override def countMetadataEntries(workflowExecutionUuid: String, metadataKey: String, expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] = { + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = { val action = - dataAccess.countMetadataEntriesForWorkflowExecutionUuidAndMetadataKey((workflowExecutionUuid, metadataKey, expandSubWorkflows)).result + dataAccess + .countMetadataEntriesForWorkflowExecutionUuidAndMetadataKey( + (workflowExecutionUuid, metadataKey, expandSubWorkflows) + ) + .result runTransaction(action, timeout = timeout) } @@ -174,10 +202,10 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { - val action = dataAccess. - metadataEntriesForJobKey((workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt)).result + timeout: Duration + )(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { + val action = + dataAccess.metadataEntriesForJobKey((workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt)).result runTransaction(action, timeout = timeout) } @@ -186,10 +214,13 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) jobIndex: Option[Int], jobAttempt: Option[Int], expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] = { - val action = dataAccess. - countMetadataEntriesForJobKey((workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, expandSubWorkflows)).result + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = { + val action = dataAccess + .countMetadataEntriesForJobKey( + (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, expandSubWorkflows) + ) + .result runTransaction(action, timeout = timeout) } @@ -198,10 +229,11 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { - val action = dataAccess.metadataEntriesForJobKeyAndMetadataKey(( - workflowUuid, metadataKey, callFullyQualifiedName, jobIndex, jobAttempt)).result + timeout: Duration + )(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { + val action = dataAccess + .metadataEntriesForJobKeyAndMetadataKey((workflowUuid, metadataKey, callFullyQualifiedName, jobIndex, jobAttempt)) + .result runTransaction(action, timeout = timeout) } @@ -211,10 +243,13 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) jobIndex: Option[Int], jobAttempt: Option[Int], expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] = { - val action = dataAccess.countMetadataEntriesForJobKeyAndMetadataKey(( - workflowUuid, metadataKey, callFullyQualifiedName, jobIndex, jobAttempt, expandSubWorkflows)).result + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = { + val action = dataAccess + .countMetadataEntriesForJobKeyAndMetadataKey( + (workflowUuid, metadataKey, callFullyQualifiedName, jobIndex, jobAttempt, expandSubWorkflows) + ) + .result runTransaction(action, timeout = timeout) } @@ -222,15 +257,35 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) metadataKeysToFilterFor: List[String], metadataKeysToFilterOut: List[String], metadataJobQueryValue: MetadataJobQueryValue, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { + timeout: Duration + )(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] = { val action = metadataJobQueryValue match { case CallQuery(callFqn, jobIndex, jobAttempt) => - dataAccess.metadataEntriesForJobWithKeyConstraints(workflowExecutionUuid, metadataKeysToFilterFor, metadataKeysToFilterOut, callFqn, jobIndex, jobAttempt).result + dataAccess + .metadataEntriesForJobWithKeyConstraints(workflowExecutionUuid, + metadataKeysToFilterFor, + metadataKeysToFilterOut, + callFqn, + jobIndex, + jobAttempt + ) + .result case WorkflowQuery => - dataAccess.metadataEntriesWithKeyConstraints(workflowExecutionUuid, metadataKeysToFilterFor, metadataKeysToFilterOut, requireEmptyJobKey = true).result + dataAccess + .metadataEntriesWithKeyConstraints(workflowExecutionUuid, + metadataKeysToFilterFor, + metadataKeysToFilterOut, + requireEmptyJobKey = true + ) + .result case CallOrWorkflowQuery => - dataAccess.metadataEntriesWithKeyConstraints(workflowExecutionUuid, metadataKeysToFilterFor, metadataKeysToFilterOut, requireEmptyJobKey = false).result + dataAccess + .metadataEntriesWithKeyConstraints(workflowExecutionUuid, + metadataKeysToFilterFor, + metadataKeysToFilterOut, + requireEmptyJobKey = false + ) + .result } runTransaction(action, timeout = timeout) } @@ -240,88 +295,115 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) metadataKeysToFilterOut: List[String], metadataJobQueryValue: MetadataJobQueryValue, expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] = { + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = { val action = metadataJobQueryValue match { case CallQuery(callFqn, jobIndex, jobAttempt) => - dataAccess.countMetadataEntriesForJobWithKeyConstraints(workflowExecutionUuid, metadataKeysToFilterFor, metadataKeysToFilterOut, callFqn, jobIndex, jobAttempt, expandSubWorkflows).result + dataAccess + .countMetadataEntriesForJobWithKeyConstraints(workflowExecutionUuid, + metadataKeysToFilterFor, + metadataKeysToFilterOut, + callFqn, + jobIndex, + jobAttempt, + expandSubWorkflows + ) + .result case WorkflowQuery => - dataAccess.countMetadataEntriesWithKeyConstraints(workflowExecutionUuid, metadataKeysToFilterFor, metadataKeysToFilterOut, requireEmptyJobKey = true, expandSubWorkflows = expandSubWorkflows).result + dataAccess + .countMetadataEntriesWithKeyConstraints(workflowExecutionUuid, + metadataKeysToFilterFor, + metadataKeysToFilterOut, + requireEmptyJobKey = true, + expandSubWorkflows = expandSubWorkflows + ) + .result case CallOrWorkflowQuery => - dataAccess.countMetadataEntriesWithKeyConstraints(workflowExecutionUuid, metadataKeysToFilterFor, metadataKeysToFilterOut, requireEmptyJobKey = false, expandSubWorkflows = expandSubWorkflows).result + dataAccess + .countMetadataEntriesWithKeyConstraints(workflowExecutionUuid, + metadataKeysToFilterFor, + metadataKeysToFilterOut, + requireEmptyJobKey = false, + expandSubWorkflows = expandSubWorkflows + ) + .result } runTransaction(action, timeout = timeout) } - private def updateWorkflowMetadataSummaryEntry(buildUpdatedWorkflowMetadataSummaryEntry: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => - WorkflowMetadataSummaryEntry) - (workflowExecutionUuuidAndMetadataEntries: (String, Seq[MetadataEntry])) - (implicit ec: ExecutionContext): DBIO[Unit] = { + private def updateWorkflowMetadataSummaryEntry( + buildUpdatedWorkflowMetadataSummaryEntry: (Option[WorkflowMetadataSummaryEntry], + Seq[MetadataEntry] + ) => WorkflowMetadataSummaryEntry + )( + workflowExecutionUuuidAndMetadataEntries: (String, Seq[MetadataEntry]) + )(implicit ec: ExecutionContext): DBIO[Unit] = { val (workflowExecutionUuid, metadataEntries) = workflowExecutionUuuidAndMetadataEntries for { - // There might not be a preexisting summary for a given UUID, so `headOption` the result - existingWorkflowMetadataSummaryEntry <- dataAccess. - workflowMetadataSummaryEntriesForWorkflowExecutionUuid(workflowExecutionUuid).result.headOption + // There might not be a preexisting summary for a given UUID, so `headOption` the result + existingWorkflowMetadataSummaryEntry <- dataAccess + .workflowMetadataSummaryEntriesForWorkflowExecutionUuid(workflowExecutionUuid) + .result + .headOption updatedWorkflowMetadataSummaryEntry = buildUpdatedWorkflowMetadataSummaryEntry( - existingWorkflowMetadataSummaryEntry, metadataEntries) + existingWorkflowMetadataSummaryEntry, + metadataEntries + ) _ <- upsertWorkflowMetadataSummaryEntry(updatedWorkflowMetadataSummaryEntry) } yield () } private def toCustomLabelEntry(metadataEntry: MetadataEntry): CustomLabelEntry = { - //Extracting the label key from the MetadataEntry key + // Extracting the label key from the MetadataEntry key val labelKey = metadataEntry.metadataKey.split("\\:", 2)(1) val labelValue = metadataEntry.metadataValue.toRawString val customLabelEntry = CustomLabelEntry(labelKey, labelValue, metadataEntry.workflowExecutionUuid) customLabelEntry } - private def upsertCustomLabelEntry(customLabelEntry: CustomLabelEntry) - (implicit ec: ExecutionContext): DBIO[Unit] = { + private def upsertCustomLabelEntry(customLabelEntry: CustomLabelEntry)(implicit ec: ExecutionContext): DBIO[Unit] = if (useSlickUpserts) { for { _ <- dataAccess.customLabelEntryIdsAutoInc.insertOrUpdate(customLabelEntry) } yield () } else { for { - updateCount <- dataAccess. - customLabelEntriesForWorkflowExecutionUuidAndLabelKey( + updateCount <- dataAccess + .customLabelEntriesForWorkflowExecutionUuidAndLabelKey( (customLabelEntry.workflowExecutionUuid, customLabelEntry.customLabelKey) - ).update(customLabelEntry) + ) + .update(customLabelEntry) _ <- updateCount match { case 0 => dataAccess.customLabelEntryIdsAutoInc += customLabelEntry case _ => assertUpdateCount("upsertCustomLabelEntry", updateCount, 1) } } yield () } - } - private def upsertWorkflowMetadataSummaryEntry(workflowMetadataSummaryEntry: WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): DBIO[Unit] = { + private def upsertWorkflowMetadataSummaryEntry( + workflowMetadataSummaryEntry: WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): DBIO[Unit] = if (useSlickUpserts) { for { _ <- dataAccess.workflowMetadataSummaryEntryIdsAutoInc.insertOrUpdate(workflowMetadataSummaryEntry) } yield () } else { for { - updateCount <- dataAccess. - workflowMetadataSummaryEntriesForWorkflowExecutionUuid(workflowMetadataSummaryEntry.workflowExecutionUuid). - update(workflowMetadataSummaryEntry) + updateCount <- dataAccess + .workflowMetadataSummaryEntriesForWorkflowExecutionUuid(workflowMetadataSummaryEntry.workflowExecutionUuid) + .update(workflowMetadataSummaryEntry) _ <- updateCount match { case 0 => dataAccess.workflowMetadataSummaryEntryIdsAutoInc += workflowMetadataSummaryEntry case _ => assertUpdateCount("upsertWorkflowMetadataSummaryEntry", updateCount, 1) } } yield () } - } - override def summarizeIncreasing(labelMetadataKey: String, - limit: Int, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) - => WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): Future[Long] = { + override def summarizeIncreasing( + labelMetadataKey: String, + limit: Int, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): Future[Long] = { val action = for { rawMetadataEntries <- dataAccess.metadataEntriesToSummarizeQuery(limit.toLong).result _ <- @@ -337,14 +419,13 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) runTransaction(action) } - override def summarizeDecreasing(summaryNameDecreasing: String, - summaryNameIncreasing: String, - labelMetadataKey: String, - limit: Int, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) - => WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): Future[(Long, Long)] = { + override def summarizeDecreasing( + summaryNameDecreasing: String, + summaryNameIncreasing: String, + labelMetadataKey: String, + limit: Int, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): Future[(Long, Long)] = { val action = for { previousExistingMetadataEntryIdOption <- getSummaryStatusEntrySummaryPosition(summaryNameDecreasing) previousInitializedMetadataEntryIdOption <- previousExistingMetadataEntryIdOption match { @@ -372,13 +453,14 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) runTransaction(action) } - private def buildMetadataSummaryFromRawMetadataAndWriteToDb(rawMetadataEntries: Seq[MetadataEntry], - labelMetadataKey: String, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry - )(implicit ec: ExecutionContext): DBIO[Unit] = { + private def buildMetadataSummaryFromRawMetadataAndWriteToDb( + rawMetadataEntries: Seq[MetadataEntry], + labelMetadataKey: String, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): DBIO[Unit] = { - val (summarizableLabelsMetadata, summarizableRegularMetadata) = rawMetadataEntries.partition(_.metadataKey.contains(labelMetadataKey)) + val (summarizableLabelsMetadata, summarizableRegularMetadata) = + rawMetadataEntries.partition(_.metadataKey.contains(labelMetadataKey)) val groupedSummarizableRegularMetadata = summarizableRegularMetadata.groupBy(_.workflowExecutionUuid) for { @@ -387,25 +469,32 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) } yield () } - override def updateMetadataArchiveStatus(workflowExecutionUuid: String, newArchiveStatus: Option[String]): Future[Int] = { + override def updateMetadataArchiveStatus(workflowExecutionUuid: String, + newArchiveStatus: Option[String] + ): Future[Int] = { val action = dataAccess.metadataArchiveStatusByWorkflowId(workflowExecutionUuid).update(newArchiveStatus) runTransaction(action) } - override def getWorkflowStatus(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Option[String]] = { + override def getWorkflowStatus( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Option[String]] = { val action = dataAccess.workflowStatusesForWorkflowExecutionUuid(workflowExecutionUuid).result.headOption // The workflow might not exist, so `headOption`. But even if the workflow does exist, the status might be None. // So flatten the Option[Option[String]] to Option[String]. runTransaction(action).map(_.flatten) } - override def getWorkflowLabels(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Map[String, String]] = { + override def getWorkflowLabels( + workflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Map[String, String]] = { val action = dataAccess.labelsForWorkflowExecutionUuid(workflowExecutionUuid).result runTransaction(action).map(_.toMap) } - override def getRootAndSubworkflowLabels(rootWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Map[String, Map[String, String]]] = { + override def getRootAndSubworkflowLabels( + rootWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Map[String, Map[String, String]]] = { val action = dataAccess.labelsForWorkflowAndSubworkflows(rootWorkflowExecutionUuid).result // An empty Map of String workflow IDs to an inner Map of label keys to label values. // The outer Map has a default value so any request for a workflow ID not already present @@ -424,10 +513,10 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) workflowStatuses: Set[String], workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestampOption: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], @@ -435,12 +524,27 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) includeSubworkflows: Boolean, page: Option[Int], pageSize: Option[Int], - newestFirst: Boolean) - (implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = { - - val action = dataAccess.queryWorkflowMetadataSummaryEntries(parentIdWorkflowMetadataKey, workflowStatuses, workflowNames, workflowExecutionUuids, - labelAndKeyLabelValues, labelOrKeyLabelValues, excludeLabelAndValues, excludeLabelOrValues, submissionTimestampOption, startTimestampOption, - endTimestampOption, metadataArchiveStatus, includeSubworkflows, page, pageSize, newestFirst) + newestFirst: Boolean + )(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = { + + val action = dataAccess.queryWorkflowMetadataSummaryEntries( + parentIdWorkflowMetadataKey, + workflowStatuses, + workflowNames, + workflowExecutionUuids, + labelAndKeyLabelValues, + labelOrKeyLabelValues, + excludeLabelAndValues, + excludeLabelOrValues, + submissionTimestampOption, + startTimestampOption, + endTimestampOption, + metadataArchiveStatus, + includeSubworkflows, + page, + pageSize, + newestFirst + ) runTransaction(action) } @@ -448,79 +552,110 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) workflowStatuses: Set[String], workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestampOption: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], metadataArchiveStatus: Set[Option[String]], - includeSubworkflows: Boolean) - (implicit ec: ExecutionContext): Future[Int] = { - val action = dataAccess.countWorkflowMetadataSummaryEntries(parentIdWorkflowMetadataKey, workflowStatuses, workflowNames, workflowExecutionUuids, - labelAndKeyLabelValues, labelOrKeyLabelValues, excludeLabelAndValues, excludeLabelOrValues, submissionTimestampOption, startTimestampOption, - endTimestampOption, metadataArchiveStatus, includeSubworkflows) + includeSubworkflows: Boolean + )(implicit ec: ExecutionContext): Future[Int] = { + val action = dataAccess.countWorkflowMetadataSummaryEntries( + parentIdWorkflowMetadataKey, + workflowStatuses, + workflowNames, + workflowExecutionUuids, + labelAndKeyLabelValues, + labelOrKeyLabelValues, + excludeLabelAndValues, + excludeLabelOrValues, + submissionTimestampOption, + startTimestampOption, + endTimestampOption, + metadataArchiveStatus, + includeSubworkflows + ) runTransaction(action) } - override def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int] = { + override def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId: String, newArchiveStatus: Option[String])( + implicit ec: ExecutionContext + ): Future[Int] = runTransaction { for { numDeleted <- dataAccess.metadataEntriesForWorkflowSortedById(workflowId).delete _ <- dataAccess.metadataArchiveStatusByWorkflowId(workflowId).update(newArchiveStatus) } yield numDeleted } - } - override def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = { + override def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = runAction( dataAccess.rootWorkflowId(workflowId).result.headOption ) - } - override def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[String]] = { + override def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], + thresholdTimestamp: Timestamp, + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[String]] = runAction( - dataAccess.workflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp((archiveStatus, thresholdTimestamp, batchSize)).result + dataAccess + .workflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp((archiveStatus, thresholdTimestamp, batchSize)) + .result ) - } override def getSummaryQueueSize()(implicit ec: ExecutionContext): Future[Int] = runAction( countSummaryQueueEntries() ) - override def getMetadataArchiveStatusAndEndTime(workflowId: String)(implicit ec: ExecutionContext): Future[(Option[String], Option[Timestamp])] = { + override def getMetadataArchiveStatusAndEndTime( + workflowId: String + )(implicit ec: ExecutionContext): Future[(Option[String], Option[Timestamp])] = { val action = dataAccess.metadataArchiveStatusAndEndTimeByWorkflowId(workflowId).result.headOption runTransaction(action).map(_.getOrElse((None, None))) } override def queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], workflowEndTimestampThreshold: Timestamp, - batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = { + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = runAction( - dataAccess.workflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses, workflowEndTimestampThreshold, batchSize).result + dataAccess + .workflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses, + workflowEndTimestampThreshold, + batchSize + ) + .result ) - } override def countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], - workflowEndTimestampThreshold: Timestamp)(implicit ec: ExecutionContext): Future[Int] = { + workflowEndTimestampThreshold: Timestamp + )(implicit ec: ExecutionContext): Future[Int] = runAction( - dataAccess.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses, workflowEndTimestampThreshold).result + dataAccess + .countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses, + workflowEndTimestampThreshold + ) + .result ) - } - override def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: Timestamp)(implicit ec: ExecutionContext): Future[Int] = { + override def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp( + workflowEndTimestampThreshold: Timestamp + )(implicit ec: ExecutionContext): Future[Int] = runAction( dataAccess.countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold).result ) - } - override def getMetadataTableSizeInformation()(implicit ec: ExecutionContext): Future[Option[InformationSchemaEntry]] = { + override def getMetadataTableSizeInformation()(implicit + ec: ExecutionContext + ): Future[Option[InformationSchemaEntry]] = runAction(dataAccess.metadataTableSizeInformation()) - } - override def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: String)(implicit ec: ExecutionContext): Future[Vector[MetadataEntry]] = { + override def getFailedJobsMetadataWithWorkflowId( + rootWorkflowId: String + )(implicit ec: ExecutionContext): Future[Vector[MetadataEntry]] = { val isPostgres = databaseConfig.getValue("db.driver").toString.toLowerCase().contains("postgres") runLobAction(dataAccess.failedJobsMetadataWithWorkflowId(rootWorkflowId, isPostgres)) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala index 459c705480e..55c408f944f 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala @@ -11,11 +11,12 @@ import slick.basic.DatabaseConfig import slick.jdbc.{JdbcCapabilities, JdbcProfile, PostgresProfile, TransactionIsolation} import java.sql.{Connection, PreparedStatement, Statement} -import java.util.concurrent.{ExecutorService, Executors} +import java.util.concurrent.{Executors, ExecutorService} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} object SlickDatabase { + /** * Returns either the "url" or "properties.url" */ @@ -23,7 +24,7 @@ object SlickDatabase { lazy val log: Logger = LoggerFactory.getLogger("cromwell.database.slick") - def createSchema(slickDatabase: SlickDatabase): Unit = { + def createSchema(slickDatabase: SlickDatabase): Unit = // NOTE: Slick 3.0.0 schema creation, Clobs, and MySQL don't mix: https://github.com/slick/slick/issues/637 // // Not really an issue, since externally run liquibase is standard way of installing / upgrading MySQL. @@ -43,7 +44,6 @@ object SlickDatabase { import slickDatabase.dataAccess.driver.api._ Await.result(slickDatabase.database.run(slickDatabase.dataAccess.schema.create), Duration.Inf) } - } def getDatabaseConfig(name: String, parentConfig: Config): Config = { val rootDatabaseConfig = parentConfig.getConfig("database") @@ -130,7 +130,8 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend } private val actionExecutionContext: ExecutionContext = ExecutionContext.fromExecutor( - actionThreadPool, database.executor.executionContext.reportFailure + actionThreadPool, + database.executor.executionContext.reportFailure ) protected[this] lazy val insertBatchSize: Int = databaseConfig.getOrElse("insert-batch-size", 2000) @@ -138,23 +139,21 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend protected[this] lazy val useSlickUpserts: Boolean = dataAccess.driver.capabilities.contains(JdbcCapabilities.insertOrUpdate) - //noinspection SameParameterValue - protected[this] def assertUpdateCount(description: String, updates: Int, expected: Int): DBIO[Unit] = { + // noinspection SameParameterValue + protected[this] def assertUpdateCount(description: String, updates: Int, expected: Int): DBIO[Unit] = if (updates == expected) { DBIO.successful(()) } else { DBIO.failed(new RuntimeException(s"$description expected update count $expected, got $updates")) } - } - override def withConnection[A](block: Connection => A): A = { + override def withConnection[A](block: Connection => A): A = /* TODO: Should this withConnection() method have a (implicit?) timeout parameter, that it passes on to Await.result? If we run completely asynchronously, nest calls to withConnection, and then call flatMap, the outer connection may already be closed before an inner block finishes running. */ Await.result(database.run(SimpleDBIO(context => block(context.connection))), Duration.Inf) - } override def close(): Unit = { actionThreadPool.shutdown() @@ -166,40 +165,41 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend protected[this] def runTransaction[R](action: DBIO[R], isolationLevel: TransactionIsolation = TransactionIsolation.RepeatableRead, - timeout: Duration = Duration.Inf): Future[R] = { + timeout: Duration = Duration.Inf + ): Future[R] = runActionInternal(action.transactionally.withTransactionIsolation(isolationLevel), timeout = timeout) - } /* Note that this is only appropriate for actions that do not involve Blob * or Clob fields in Postgres, since large object support requires running * transactionally. Use runLobAction instead, which will still run in * auto-commit mode when using other database engines. */ - protected[this] def runAction[R](action: DBIO[R]): Future[R] = { + protected[this] def runAction[R](action: DBIO[R]): Future[R] = runActionInternal(action.withPinnedSession) - } /* Wrapper for queries where Clob/Blob types are used * https://stackoverflow.com/questions/3164072/large-objects-may-not-be-used-in-auto-commit-mode#answer-3164352 */ - protected[this] def runLobAction[R](action: DBIO[R]): Future[R] = { + protected[this] def runLobAction[R](action: DBIO[R]): Future[R] = dataAccess.driver match { case PostgresProfile => runTransaction(action) case _ => runAction(action) } - } - private def runActionInternal[R](action: DBIO[R], timeout: Duration = Duration.Inf): Future[R] = { - //database.run(action) <-- See comment above private val actionThreadPool + private def runActionInternal[R](action: DBIO[R], timeout: Duration = Duration.Inf): Future[R] = + // database.run(action) <-- See comment above private val actionThreadPool Future { - try { + try if (timeout.isFinite) { // https://stackoverflow.com/a/52569275/818054 - Await.result(database.run(action.withStatementParameters(statementInit = _.setQueryTimeout(timeout.toSeconds.toInt))), Duration.Inf) + Await.result( + database.run(action.withStatementParameters(statementInit = _.setQueryTimeout(timeout.toSeconds.toInt))), + Duration.Inf + ) } else { Await.result(database.run(action), Duration.Inf) } - } catch { + catch { case rollbackException: MySQLTransactionRollbackException => debugExitStatusCodeOption match { case Some(status) => @@ -229,17 +229,16 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend } } }(actionExecutionContext) - } /* - * Upsert the provided values in batch. - * Fails the query if one or more upsert failed. - * Adapted from https://github.com/slick/slick/issues/1781 + * Upsert the provided values in batch. + * Fails the query if one or more upsert failed. + * Adapted from https://github.com/slick/slick/issues/1781 */ protected[this] def createBatchUpsert[T](description: String, compiled: dataAccess.driver.JdbcCompiledInsert, values: Iterable[T] - )(implicit ec: ExecutionContext): DBIO[Unit] = { + )(implicit ec: ExecutionContext): DBIO[Unit] = SimpleDBIO { context => context.session.withPreparedStatement[Array[Int]](compiled.upsert.sql) { st: PreparedStatement => values.foreach { update => @@ -255,10 +254,11 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend else { val valueList = values.toList val failedRequests = failures.toList.map(valueList(_)) - DBIO.failed(new RuntimeException( - s"$description failed to upsert the following rows: ${failedRequests.mkString(", ")}" - )) + DBIO.failed( + new RuntimeException( + s"$description failed to upsert the following rows: ${failedRequests.mkString(", ")}" + ) + ) } } - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/SubWorkflowStoreSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SubWorkflowStoreSlickDatabase.scala index 21dd3ece7a0..973f18540fc 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SubWorkflowStoreSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SubWorkflowStoreSlickDatabase.scala @@ -12,14 +12,18 @@ trait SubWorkflowStoreSlickDatabase extends SubWorkflowStoreSqlDatabase { import dataAccess.driver.api._ - def addSubWorkflowStoreEntry(rootWorkflowExecutionUuid: String, + def addSubWorkflowStoreEntry(rootWorkflowExecutionUuid: String, parentWorkflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Int, jobAttempt: Int, - subWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Unit] = { + subWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Unit] = { val action = for { - workflowStoreEntry <- dataAccess.workflowStoreEntriesForWorkflowExecutionUuid(rootWorkflowExecutionUuid).result.headOption + workflowStoreEntry <- dataAccess + .workflowStoreEntriesForWorkflowExecutionUuid(rootWorkflowExecutionUuid) + .result + .headOption _ <- workflowStoreEntry match { case Some(rootWorkflow) => dataAccess.subWorkflowStoreEntryIdsAutoInc += @@ -31,28 +35,41 @@ trait SubWorkflowStoreSlickDatabase extends SubWorkflowStoreSqlDatabase { jobAttempt, subWorkflowExecutionUuid ) - case None => DBIO.failed(new IllegalArgumentException(s"Could not find root workflow with UUID $rootWorkflowExecutionUuid")) + case None => + DBIO.failed( + new IllegalArgumentException(s"Could not find root workflow with UUID $rootWorkflowExecutionUuid") + ) } } yield () - + runTransaction(action) void } - override def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, jobAttempt: Int) - (implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] = { + override def querySubWorkflowStore(parentWorkflowExecutionUuid: String, + callFqn: String, + jobIndex: Int, + jobAttempt: Int + )(implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] = { val action = for { - subWorkflowStoreEntryOption <- dataAccess.subWorkflowStoreEntriesForJobKey( - (parentWorkflowExecutionUuid, callFqn, jobIndex, jobAttempt) - ).result.headOption + subWorkflowStoreEntryOption <- dataAccess + .subWorkflowStoreEntriesForJobKey( + (parentWorkflowExecutionUuid, callFqn, jobIndex, jobAttempt) + ) + .result + .headOption } yield subWorkflowStoreEntryOption runTransaction(action) } - override def removeSubWorkflowStoreEntries(rootWorkflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Int] = { + override def removeSubWorkflowStoreEntries( + rootWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Int] = { val action = for { - workflowStoreEntry <- dataAccess.workflowStoreEntriesForWorkflowExecutionUuid(rootWorkflowExecutionUuid).result.headOption + workflowStoreEntry <- dataAccess + .workflowStoreEntriesForWorkflowExecutionUuid(rootWorkflowExecutionUuid) + .result + .headOption deleted <- workflowStoreEntry match { case Some(rootWorkflow) => dataAccess.subWorkflowStoreEntriesForRootWorkflowId(rootWorkflow.workflowStoreEntryId.get).delete @@ -60,7 +77,7 @@ trait SubWorkflowStoreSlickDatabase extends SubWorkflowStoreSqlDatabase { DBIO.successful(0) } } yield deleted - + runTransaction(action) } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/SummaryQueueSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SummaryQueueSlickDatabase.scala index af04939b99b..7f6365493d2 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SummaryQueueSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SummaryQueueSlickDatabase.scala @@ -7,16 +7,13 @@ trait SummaryQueueSlickDatabase { import dataAccess.driver.api._ - private[slick] def writeSummaryQueueEntries(metadataJournalIds: Seq[Long]) = { + private[slick] def writeSummaryQueueEntries(metadataJournalIds: Seq[Long]) = dataAccess.summaryQueueEntries ++= metadataJournalIds.map(id => SummaryQueueEntry(id)) - } - private[slick] def deleteSummaryQueueEntriesByMetadataJournalIds(metadataJournalIds: Seq[Long]) = { + private[slick] def deleteSummaryQueueEntriesByMetadataJournalIds(metadataJournalIds: Seq[Long]) = dataAccess.summaryQueueEntries.filter(_.metadataJournalId.inSet(metadataJournalIds)).delete - } - private[slick] def countSummaryQueueEntries() = { + private[slick] def countSummaryQueueEntries() = dataAccess.summaryQueueEntries.length.result - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala index 43a3aff9273..a7fa1a57a62 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SummaryStatusSlickDatabase.scala @@ -9,17 +9,15 @@ trait SummaryStatusSlickDatabase { import dataAccess.driver.api._ - private[slick] def getSummaryStatusEntrySummaryPosition(summaryName: String): DBIO[Option[Long]] = { + private[slick] def getSummaryStatusEntrySummaryPosition(summaryName: String): DBIO[Option[Long]] = dataAccess.summaryPositionForSummaryName(summaryName).result.headOption - } - private[slick] def upsertSummaryStatusEntrySummaryPosition(summaryName: String, - summaryPosition: Long) - (implicit ec: ExecutionContext): DBIO[Unit] = { + private[slick] def upsertSummaryStatusEntrySummaryPosition(summaryName: String, summaryPosition: Long)(implicit + ec: ExecutionContext + ): DBIO[Unit] = if (useSlickUpserts) { for { - _ <- dataAccess.summaryStatusEntryIdsAutoInc. - insertOrUpdate(SummaryStatusEntry(summaryName, summaryPosition)) + _ <- dataAccess.summaryStatusEntryIdsAutoInc.insertOrUpdate(SummaryStatusEntry(summaryName, summaryPosition)) } yield () } else { for { @@ -32,5 +30,4 @@ trait SummaryStatusSlickDatabase { } } yield () } - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala index a39dcbf4195..d7d3e7bb6db 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/WorkflowStoreSlickDatabase.scala @@ -14,8 +14,9 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { import dataAccess.driver.api._ - override def setStateToState(fromWorkflowState: String, toWorkflowState: String) - (implicit ec: ExecutionContext): Future[Unit] = { + override def setStateToState(fromWorkflowState: String, toWorkflowState: String)(implicit + ec: ExecutionContext + ): Future[Unit] = { val action = dataAccess .workflowStateForWorkflowState(fromWorkflowState) .update(toWorkflowState) @@ -26,8 +27,8 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { override def deleteOrUpdateWorkflowToState(workflowExecutionUuid: String, workflowStateToDelete1: String, workflowStateToDelete2: String, - workflowStateForUpdate: String) - (implicit ec: ExecutionContext): Future[Option[Boolean]] = { + workflowStateForUpdate: String + )(implicit ec: ExecutionContext): Future[Option[Boolean]] = { val action = for { // First, delete all rows in either of our states to be deleted. deleted <- dataAccess @@ -44,11 +45,14 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { } } yield (deleted, updated) - runTransaction(action) map { case (deleted, updated) => if (deleted == 0 && updated == 0) None else Option(deleted > 0) } + runTransaction(action) map { case (deleted, updated) => + if (deleted == 0 && updated == 0) None else Option(deleted > 0) + } } - override def addWorkflowStoreEntries(workflowStoreEntries: Iterable[WorkflowStoreEntry]) - (implicit ec: ExecutionContext): Future[Unit] = { + override def addWorkflowStoreEntries( + workflowStoreEntries: Iterable[WorkflowStoreEntry] + )(implicit ec: ExecutionContext): Future[Unit] = { val action = dataAccess.workflowStoreEntryIdsAutoInc ++= workflowStoreEntries runTransaction(action).void } @@ -60,15 +64,14 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { workflowStateFrom: String, workflowStateTo: String, workflowStateExcluded: String, - excludedGroups: Set[String]) - (implicit ec: ExecutionContext): Future[Seq[WorkflowStoreEntry]] = { + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[Seq[WorkflowStoreEntry]] = { def updateForFetched(cromwellId: String, heartbeatTimestampTo: Timestamp, workflowStateFrom: String, - workflowStateTo: String) - (workflowStoreEntry: WorkflowStoreEntry) - (implicit ec: ExecutionContext): DBIO[Unit] = { + workflowStateTo: String + )(workflowStoreEntry: WorkflowStoreEntry)(implicit ec: ExecutionContext): DBIO[Unit] = { val workflowExecutionUuid = workflowStoreEntry.workflowExecutionUuid val updateState = workflowStoreEntry.workflowState match { case matched if matched == workflowStateFrom => workflowStateTo @@ -88,20 +91,28 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { } yield () } - def fetchAndUpdateStartableWfs(hogGroup: Option[String]) = { + def fetchAndUpdateStartableWfs(hogGroup: Option[String]) = for { - workflowStoreEntries <- dataAccess.fetchStartableWfsForHogGroup( - (limit.toLong, heartbeatTimestampTimedOut, workflowStateExcluded, hogGroup) - ).result + workflowStoreEntries <- dataAccess + .fetchStartableWfsForHogGroup( + (limit.toLong, heartbeatTimestampTimedOut, workflowStateExcluded, hogGroup) + ) + .result _ <- DBIO.sequence( - workflowStoreEntries map updateForFetched(cromwellId, heartbeatTimestampTo, workflowStateFrom, workflowStateTo) + workflowStoreEntries map updateForFetched(cromwellId, + heartbeatTimestampTo, + workflowStateFrom, + workflowStateTo + ) ) } yield workflowStoreEntries - } val action = for { // find hog group with lowest count of actively running workflows - hogGroupOption <- dataAccess.getHogGroupWithLowestRunningWfs(heartbeatTimestampTimedOut, workflowStateExcluded, excludedGroups).result.headOption + hogGroupOption <- dataAccess + .getHogGroupWithLowestRunningWfs(heartbeatTimestampTimedOut, workflowStateExcluded, excludedGroups) + .result + .headOption workflowStoreEntries <- hogGroupOption match { // if no such hog group was found, all hog groups have workflows that are either actively running or in "OnHold" status case None => DBIO.successful(Seq.empty[WorkflowStoreEntry]) @@ -118,9 +129,9 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { runTransaction(action, TransactionIsolation.ReadCommitted) } - override def writeWorkflowHeartbeats(workflowExecutionUuids: Seq[String], - heartbeatTimestamp: Timestamp) - (implicit ec: ExecutionContext): Future[Int] = { + override def writeWorkflowHeartbeats(workflowExecutionUuids: Seq[String], heartbeatTimestamp: Timestamp)(implicit + ec: ExecutionContext + ): Future[Int] = { // Return the count of heartbeats written. This could legitimately be less than the size of the `workflowExecutionUuids` // List if any of those workflows completed and their workflow store entries were removed. val action = for { @@ -147,10 +158,9 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { runTransaction(action) map { _.toMap } } - override def updateWorkflowState(workflowExecutionUuid: String, - fromWorkflowState: String, - toWorkflowState: String) - (implicit ec: ExecutionContext): Future[Int] = { + override def updateWorkflowState(workflowExecutionUuid: String, fromWorkflowState: String, toWorkflowState: String)( + implicit ec: ExecutionContext + ): Future[Int] = { val action = for { updated <- dataAccess .workflowStateForWorkflowExecutionUUidAndWorkflowState((workflowExecutionUuid, fromWorkflowState)) @@ -160,15 +170,14 @@ trait WorkflowStoreSlickDatabase extends WorkflowStoreSqlDatabase { runTransaction(action) } - override def findWorkflowsWithAbortRequested(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[String]] = { + override def findWorkflowsWithAbortRequested(cromwellId: String)(implicit + ec: ExecutionContext + ): Future[Iterable[String]] = runTransaction(dataAccess.findWorkflowsWithAbortRequested(cromwellId).result) - } - override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[String]] = { + override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[String]] = runTransaction(dataAccess.findWorkflows(cromwellId).result) - } - override def checkWhetherWorkflowExists(workflowId: String)(implicit ec: ExecutionContext): Future[Boolean] = { + override def checkWhetherWorkflowExists(workflowId: String)(implicit ec: ExecutionContext): Future[Boolean] = runTransaction(dataAccess.checkExists(workflowId).result.map(_.nonEmpty)) - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala index 139558aa7b8..1cbc705e019 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala @@ -2,14 +2,14 @@ package cromwell.database.slick.tables import cromwell.database.sql.tables.CallCachingAggregationEntry - trait CallCachingAggregationEntryComponent { this: DriverComponent with CallCachingEntryComponent with CallCachingDetritusEntryComponent => import driver.api._ - class CallCachingAggregationEntries(tag: Tag) extends Table[CallCachingAggregationEntry](tag, "CALL_CACHING_AGGREGATION_ENTRY") { + class CallCachingAggregationEntries(tag: Tag) + extends Table[CallCachingAggregationEntry](tag, "CALL_CACHING_AGGREGATION_ENTRY") { def callCachingAggregationEntryId = column[Long]("CALL_CACHING_AGGREGATION_ENTRY_ID", O.PrimaryKey, O.AutoInc) def baseAggregation = column[String]("BASE_AGGREGATION", O.Length(255)) @@ -21,8 +21,10 @@ trait CallCachingAggregationEntryComponent { override def * = (baseAggregation, inputFilesAggregation, callCachingEntryId.?, callCachingAggregationEntryId.?) <> (CallCachingAggregationEntry.tupled, CallCachingAggregationEntry.unapply) - def fkCallCachingAggregationEntryCallCachingEntryId = foreignKey("FK_CALL_CACHING_AGGREGATION_ENTRY_CALL_CACHING_ENTRY_ID", - callCachingEntryId, callCachingEntries)(_.callCachingEntryId) + def fkCallCachingAggregationEntryCallCachingEntryId = + foreignKey("FK_CALL_CACHING_AGGREGATION_ENTRY_CALL_CACHING_ENTRY_ID", callCachingEntryId, callCachingEntries)( + _.callCachingEntryId + ) def ixCallCachingAggregationEntryBaIfa = index("IX_CALL_CACHING_AGGREGATION_ENTRY_BA_IFA", (baseAggregation, inputFilesAggregation), unique = false) @@ -32,16 +34,16 @@ trait CallCachingAggregationEntryComponent { val callCachingAggregationEntryIdsAutoInc = callCachingAggregationEntries returning callCachingAggregationEntries.map(_.callCachingAggregationEntryId) - - val callCachingAggregationForCacheEntryId = Compiled( - (callCachingEntryId: Rep[Long]) => for { - callCachingAggregationEntry <- callCachingAggregationEntries + + val callCachingAggregationForCacheEntryId = Compiled((callCachingEntryId: Rep[Long]) => + for { + callCachingAggregationEntry <- callCachingAggregationEntries if callCachingAggregationEntry.callCachingEntryId === callCachingEntryId } yield callCachingAggregationEntry ) - val existsCallCachingEntriesForBaseAggregationHash = Compiled( - (baseAggregation: Rep[String]) => (for { + val existsCallCachingEntriesForBaseAggregationHash = Compiled((baseAggregation: Rep[String]) => + (for { callCachingEntry <- callCachingEntries if callCachingEntry.allowResultReuse callCachingAggregationEntry <- callCachingAggregationEntries @@ -52,24 +54,32 @@ trait CallCachingAggregationEntryComponent { val existsCallCachingEntriesForBaseAggregationHashWithCallCachePrefix = Compiled( (baseAggregation: Rep[String], - prefix1: Rep[String], prefix1Length: Rep[Int], - prefix2: Rep[String], prefix2Length: Rep[Int], - prefix3: Rep[String], prefix3Length: Rep[Int] - ) => (for { - callCachingEntry <- callCachingEntries - if callCachingEntry.allowResultReuse - callCachingAggregationEntry <- callCachingAggregationEntries - if callCachingEntry.callCachingEntryId === callCachingAggregationEntry.callCachingEntryId - if callCachingAggregationEntry.baseAggregation === baseAggregation - detritus <- callCachingDetritusEntries - if detritus.callCachingEntryId === callCachingEntry.callCachingEntryId - detritusPath = detritus.detritusValue.map(clobToString) - if (detritusPath.substring(0, prefix1Length) === prefix1) || - (detritusPath.substring(0, prefix2Length) === prefix2) || - (detritusPath.substring(0, prefix3Length) === prefix3)} yield ()).exists + prefix1: Rep[String], + prefix1Length: Rep[Int], + prefix2: Rep[String], + prefix2Length: Rep[Int], + prefix3: Rep[String], + prefix3Length: Rep[Int] + ) => + (for { + callCachingEntry <- callCachingEntries + if callCachingEntry.allowResultReuse + callCachingAggregationEntry <- callCachingAggregationEntries + if callCachingEntry.callCachingEntryId === callCachingAggregationEntry.callCachingEntryId + if callCachingAggregationEntry.baseAggregation === baseAggregation + detritus <- callCachingDetritusEntries + if detritus.callCachingEntryId === callCachingEntry.callCachingEntryId + detritusPath = detritus.detritusValue.map(clobToString) + if (detritusPath.substring(0, prefix1Length) === prefix1) || + (detritusPath.substring(0, prefix2Length) === prefix2) || + (detritusPath.substring(0, prefix3Length) === prefix3) + } yield ()).exists ) - def callCachingEntriesForAggregatedHashes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], excludedIds: Set[Long]) = { + def callCachingEntriesForAggregatedHashes(baseAggregation: Rep[String], + inputFilesAggregation: Rep[Option[String]], + excludedIds: Set[Long] + ) = (for { callCachingEntry <- callCachingEntries if callCachingEntry.allowResultReuse && !(callCachingEntry.callCachingEntryId inSet excludedIds) @@ -79,13 +89,17 @@ trait CallCachingAggregationEntryComponent { if (callCachingAggregationEntry.inputFilesAggregation.isEmpty && inputFilesAggregation.isEmpty) || (callCachingAggregationEntry.inputFilesAggregation === inputFilesAggregation) } yield callCachingAggregationEntry.callCachingEntryId).take(1) - } - def callCachingEntriesForAggregatedHashesWithPrefixes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], - prefix1: Rep[String], prefix1Length: Rep[Int], - prefix2: Rep[String], prefix2Length: Rep[Int], - prefix3: Rep[String], prefix3Length: Rep[Int], - excludedIds: Set[Long]) = { + def callCachingEntriesForAggregatedHashesWithPrefixes(baseAggregation: Rep[String], + inputFilesAggregation: Rep[Option[String]], + prefix1: Rep[String], + prefix1Length: Rep[Int], + prefix2: Rep[String], + prefix2Length: Rep[Int], + prefix3: Rep[String], + prefix3Length: Rep[Int], + excludedIds: Set[Long] + ) = (for { callCachingEntry <- callCachingEntries if callCachingEntry.allowResultReuse && !(callCachingEntry.callCachingEntryId inSet excludedIds) @@ -104,5 +118,4 @@ trait CallCachingAggregationEntryComponent { (detritusPath.substring(0, prefix2Length) === prefix2) || (detritusPath.substring(0, prefix3Length) === prefix3) } yield callCachingAggregationEntry.callCachingEntryId).take(1) - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala index b668adb4940..cafaa91d9b0 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala @@ -11,7 +11,7 @@ trait CallCachingDetritusEntryComponent { import driver.api._ class CallCachingDetritusEntries(tag: Tag) - extends Table[CallCachingDetritusEntry](tag, "CALL_CACHING_DETRITUS_ENTRY") { + extends Table[CallCachingDetritusEntry](tag, "CALL_CACHING_DETRITUS_ENTRY") { def callCachingDetritusEntryId = column[Long]("CALL_CACHING_DETRITUS_ENTRY_ID", O.PrimaryKey, O.AutoInc) def detritusKey = column[String]("DETRITUS_KEY", O.Length(255)) @@ -23,9 +23,10 @@ trait CallCachingDetritusEntryComponent { override def * = (detritusKey, detritusValue, callCachingEntryId.?, callCachingDetritusEntryId.?) <> (CallCachingDetritusEntry.tupled, CallCachingDetritusEntry.unapply) - def fkCallCachingDetritusEntryCallCachingEntryId = foreignKey( - "FK_CALL_CACHING_DETRITUS_ENTRY_CALL_CACHING_ENTRY_ID", - callCachingEntryId, callCachingEntries)(_.callCachingEntryId) + def fkCallCachingDetritusEntryCallCachingEntryId = + foreignKey("FK_CALL_CACHING_DETRITUS_ENTRY_CALL_CACHING_ENTRY_ID", callCachingEntryId, callCachingEntries)( + _.callCachingEntryId + ) def ucCallCachingDetritusEntryCceiDk = index("UC_CALL_CACHING_DETRITUS_ENTRY_CCEI_DK", (callCachingEntryId, detritusKey), unique = true) @@ -36,8 +37,8 @@ trait CallCachingDetritusEntryComponent { val callCachingDetritusEntryIdsAutoInc = callCachingDetritusEntries returning callCachingDetritusEntries.map(_.callCachingDetritusEntryId) - val callCachingDetritusEntriesForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Long]) => for { + val callCachingDetritusEntriesForCallCachingEntryId = Compiled((callCachingEntryId: Rep[Long]) => + for { callCachingDetritusEntry <- callCachingDetritusEntries if callCachingDetritusEntry.callCachingEntryId === callCachingEntryId } yield callCachingDetritusEntry diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala index db9c5dc1654..b9a26585bec 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala @@ -22,51 +22,60 @@ trait CallCachingEntryComponent { def returnCode = column[Option[Int]]("RETURN_CODE") def allowResultReuse = column[Boolean]("ALLOW_RESULT_REUSE", O.Default(true)) - - override def * = (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, returnCode, allowResultReuse, - callCachingEntryId.?) <> (CallCachingEntry.tupled, CallCachingEntry.unapply) + + override def * = (workflowExecutionUuid, + callFullyQualifiedName, + jobIndex, + jobAttempt, + returnCode, + allowResultReuse, + callCachingEntryId.? + ) <> (CallCachingEntry.tupled, CallCachingEntry.unapply) def ucCallCachingEntryWeuCfqnJi = - index("UC_CALL_CACHING_ENTRY_WEU_CFQN_JI", (workflowExecutionUuid, callFullyQualifiedName, jobIndex), - unique = true) + index("UC_CALL_CACHING_ENTRY_WEU_CFQN_JI", + (workflowExecutionUuid, callFullyQualifiedName, jobIndex), + unique = true + ) } protected val callCachingEntries = TableQuery[CallCachingEntries] val callCachingEntryIdsAutoInc = callCachingEntries returning callCachingEntries.map(_.callCachingEntryId) - val callCachingEntriesForId = Compiled( - (callCachingEntryId: Rep[Long]) => for { + val callCachingEntriesForId = Compiled((callCachingEntryId: Rep[Long]) => + for { callCachingEntry <- callCachingEntries if callCachingEntry.callCachingEntryId === callCachingEntryId } yield callCachingEntry ) - val allowResultReuseForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Long]) => for { + val allowResultReuseForCallCachingEntryId = Compiled((callCachingEntryId: Rep[Long]) => + for { callCachingEntry <- callCachingEntries if callCachingEntry.callCachingEntryId === callCachingEntryId } yield callCachingEntry.allowResultReuse ) - val callCachingEntriesForWorkflowFqnIndex = Compiled( - (workflowId: Rep[String], callFqn: Rep[String], jobIndex: Rep[Int]) => for { - callCachingEntry <- callCachingEntries - if callCachingEntry.workflowExecutionUuid === workflowId - if callCachingEntry.callFullyQualifiedName === callFqn - if callCachingEntry.jobIndex === jobIndex - } yield callCachingEntry - ) - - val callCachingEntryIdsForWorkflowId = Compiled( - (workflowId: Rep[String]) => for { + val callCachingEntriesForWorkflowFqnIndex = + Compiled((workflowId: Rep[String], callFqn: Rep[String], jobIndex: Rep[Int]) => + for { + callCachingEntry <- callCachingEntries + if callCachingEntry.workflowExecutionUuid === workflowId + if callCachingEntry.callFullyQualifiedName === callFqn + if callCachingEntry.jobIndex === jobIndex + } yield callCachingEntry + ) + + val callCachingEntryIdsForWorkflowId = Compiled((workflowId: Rep[String]) => + for { callCachingEntry <- callCachingEntries if callCachingEntry.workflowExecutionUuid === workflowId } yield callCachingEntry.callCachingEntryId ) - val allowResultReuseForWorkflowId = Compiled( - (workflowId: Rep[String]) => for { + val allowResultReuseForWorkflowId = Compiled((workflowId: Rep[String]) => + for { callCachingEntry <- callCachingEntries if callCachingEntry.workflowExecutionUuid === workflowId } yield callCachingEntry.allowResultReuse diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala index b556b3f22de..b89fe7f5cc8 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala @@ -20,8 +20,10 @@ trait CallCachingHashEntryComponent { override def * = (hashKey, hashValue, callCachingEntryId.?, callCachingHashEntryId.?) <> (CallCachingHashEntry.tupled, CallCachingHashEntry.unapply) - def fkCallCachingHashEntryCallCachingEntryId = foreignKey("FK_CALL_CACHING_HASH_ENTRY_CALL_CACHING_ENTRY_ID", - callCachingEntryId, callCachingEntries)(_.callCachingEntryId) + def fkCallCachingHashEntryCallCachingEntryId = + foreignKey("FK_CALL_CACHING_HASH_ENTRY_CALL_CACHING_ENTRY_ID", callCachingEntryId, callCachingEntries)( + _.callCachingEntryId + ) def ucCallCachingHashEntryCceiHk = index("UC_CALL_CACHING_HASH_ENTRY_CCEI_HK", (callCachingEntryId, hashKey), unique = true) @@ -31,12 +33,12 @@ trait CallCachingHashEntryComponent { val callCachingHashEntryIdsAutoInc = callCachingHashEntries returning callCachingHashEntries.map(_.callCachingHashEntryId) - + /** * Find all hashes for a CALL_CACHING_ENTRY_ID */ - val callCachingHashEntriesForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Long]) => for { + val callCachingHashEntriesForCallCachingEntryId = Compiled((callCachingEntryId: Rep[Long]) => + for { callCachingHashEntry <- callCachingHashEntries if callCachingHashEntry.callCachingEntryId === callCachingEntryId } yield callCachingHashEntry diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala index b2e6f02b5b9..8c624798ce4 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala @@ -11,7 +11,7 @@ trait CallCachingSimpletonEntryComponent { import driver.api._ class CallCachingSimpletonEntries(tag: Tag) - extends Table[CallCachingSimpletonEntry](tag, "CALL_CACHING_SIMPLETON_ENTRY") { + extends Table[CallCachingSimpletonEntry](tag, "CALL_CACHING_SIMPLETON_ENTRY") { def callCachingSimpletonEntryId = column[Long]("CALL_CACHING_SIMPLETON_ENTRY_ID", O.PrimaryKey, O.AutoInc) def simpletonKey = column[String]("SIMPLETON_KEY", O.Length(255)) @@ -25,8 +25,10 @@ trait CallCachingSimpletonEntryComponent { override def * = (simpletonKey, simpletonValue, wdlType, callCachingEntryId.?, callCachingSimpletonEntryId.?) <> (CallCachingSimpletonEntry.tupled, CallCachingSimpletonEntry.unapply) - def fkCallCachingSimpletonEntryCallCachingEntryId = foreignKey( - "FK_CALL_CACHING_SIMPLETON_ENTRY_CALL_CACHING_ENTRY_ID", callCachingEntryId, callCachingEntries)(_.callCachingEntryId) + def fkCallCachingSimpletonEntryCallCachingEntryId = + foreignKey("FK_CALL_CACHING_SIMPLETON_ENTRY_CALL_CACHING_ENTRY_ID", callCachingEntryId, callCachingEntries)( + _.callCachingEntryId + ) def ucCallCachingSimpletonEntryCceiSk = index("UC_CALL_CACHING_SIMPLETON_ENTRY_CCEI_SK", (callCachingEntryId, simpletonKey), unique = true) @@ -40,8 +42,8 @@ trait CallCachingSimpletonEntryComponent { /** * Find all result simpletons which match a given CALL_CACHING_ENTRY_ID */ - val callCachingSimpletonEntriesForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Long]) => for { + val callCachingSimpletonEntriesForCallCachingEntryId = Compiled((callCachingEntryId: Rep[Long]) => + for { callCachingSimpletonEntry <- callCachingSimpletonEntries if callCachingSimpletonEntry.callCachingEntryId === callCachingEntryId } yield callCachingSimpletonEntry diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala index 36789edb696..de988ff2bb1 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala @@ -11,8 +11,7 @@ trait CustomLabelEntryComponent { import driver.api.TupleMethods._ import driver.api._ - class CustomLabelEntries(tag: Tag) - extends Table[CustomLabelEntry](tag, "CUSTOM_LABEL_ENTRY") { + class CustomLabelEntries(tag: Tag) extends Table[CustomLabelEntry](tag, "CUSTOM_LABEL_ENTRY") { def customLabelEntryId = column[Long]("CUSTOM_LABEL_ENTRY_ID", O.PrimaryKey, O.AutoInc) def customLabelKey = column[String]("CUSTOM_LABEL_KEY", O.Length(255)) @@ -31,45 +30,52 @@ trait CustomLabelEntryComponent { ) def fkCustomLabelEntryWorkflowExecutionUuid = foreignKey("FK_CUSTOM_LABEL_ENTRY_WORKFLOW_EXECUTION_UUID", - workflowExecutionUuid, workflowMetadataSummaryEntries)(_.workflowExecutionUuid, onDelete = Cascade) + workflowExecutionUuid, + workflowMetadataSummaryEntries + )(_.workflowExecutionUuid, onDelete = Cascade) - def ucCustomLabelEntryClkWeu = index("UC_CUSTOM_LABEL_ENTRY_CLK_WEU", - (customLabelKey, workflowExecutionUuid), unique = true) + def ucCustomLabelEntryClkWeu = + index("UC_CUSTOM_LABEL_ENTRY_CLK_WEU", (customLabelKey, workflowExecutionUuid), unique = true) - def ixCustomLabelEntryClkClv = index("IX_CUSTOM_LABEL_ENTRY_CLK_CLV", (customLabelKey, customLabelValue), unique = false) -} + def ixCustomLabelEntryClkClv = + index("IX_CUSTOM_LABEL_ENTRY_CLK_CLV", (customLabelKey, customLabelValue), unique = false) + } val customLabelEntries = TableQuery[CustomLabelEntries] val customLabelEntryIdsAutoInc = customLabelEntries returning customLabelEntries.map(_.customLabelEntryId) - val customLabelEntriesForWorkflowExecutionUuidAndLabelKey = Compiled( - (workflowExecutionUuid: Rep[String], labelKey: Rep[String]) => for { - customLabelEntry <- customLabelEntries - if customLabelEntry.workflowExecutionUuid === workflowExecutionUuid && - customLabelEntry.customLabelKey === labelKey - } yield customLabelEntry.forUpdate) + val customLabelEntriesForWorkflowExecutionUuidAndLabelKey = + Compiled((workflowExecutionUuid: Rep[String], labelKey: Rep[String]) => + for { + customLabelEntry <- customLabelEntries + if customLabelEntry.workflowExecutionUuid === workflowExecutionUuid && + customLabelEntry.customLabelKey === labelKey + } yield customLabelEntry.forUpdate + ) def existsWorkflowIdLabelKeyAndValue(workflowId: Rep[String], labelKey: Rep[String], - labelValue: Rep[String]): Rep[Boolean] = { - customLabelEntries.filter(customLabelEntry => - customLabelEntry.workflowExecutionUuid === workflowId && - customLabelEntry.customLabelKey === labelKey && - customLabelEntry.customLabelValue === labelValue - ).exists - } - - val labelsForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + labelValue: Rep[String] + ): Rep[Boolean] = + customLabelEntries + .filter(customLabelEntry => + customLabelEntry.workflowExecutionUuid === workflowId && + customLabelEntry.customLabelKey === labelKey && + customLabelEntry.customLabelValue === labelValue + ) + .exists + + val labelsForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { customLabelEntry <- customLabelEntries if customLabelEntry.workflowExecutionUuid === workflowExecutionUuid } yield (customLabelEntry.customLabelKey, customLabelEntry.customLabelValue) ) - val labelsForWorkflowAndSubworkflows = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val labelsForWorkflowAndSubworkflows = Compiled((workflowExecutionUuid: Rep[String]) => + for { summary <- workflowMetadataSummaryEntries if summary.rootWorkflowExecutionUuid === workflowExecutionUuid || summary.workflowExecutionUuid === workflowExecutionUuid customLabelEntry <- customLabelEntries diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/DockerHashStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/DockerHashStoreEntryComponent.scala index 5929b2e5a6a..be8bda6863b 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/DockerHashStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/DockerHashStoreEntryComponent.scala @@ -19,20 +19,27 @@ trait DockerHashStoreEntryComponent { def dockerSize = column[Option[Long]]("DOCKER_SIZE", O.Default(None)) - override def * = (workflowExecutionUuid, dockerTag, dockerHash, dockerSize, dockerHashStoreEntryId.?) <> (DockerHashStoreEntry.tupled, DockerHashStoreEntry.unapply) - - def ucDockerHashStoreEntryWeuDt = index("UC_DOCKER_HASH_STORE_ENTRY_WEU_DT", (workflowExecutionUuid, dockerTag), unique = true) + override def * = (workflowExecutionUuid, + dockerTag, + dockerHash, + dockerSize, + dockerHashStoreEntryId.? + ) <> (DockerHashStoreEntry.tupled, DockerHashStoreEntry.unapply) + + def ucDockerHashStoreEntryWeuDt = + index("UC_DOCKER_HASH_STORE_ENTRY_WEU_DT", (workflowExecutionUuid, dockerTag), unique = true) } val dockerHashStoreEntries = TableQuery[DockerHashStoreEntries] - val dockerHashStoreEntryIdsAutoInc = dockerHashStoreEntries returning dockerHashStoreEntries.map(_.dockerHashStoreEntryId) + val dockerHashStoreEntryIdsAutoInc = + dockerHashStoreEntries returning dockerHashStoreEntries.map(_.dockerHashStoreEntryId) /** * Useful for finding the docker hash store for a given workflow execution UUID */ - val dockerHashStoreEntriesForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val dockerHashStoreEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { dockerHashStoreEntry <- dockerHashStoreEntries if dockerHashStoreEntry.workflowExecutionUuid === workflowExecutionUuid } yield dockerHashStoreEntry diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala index 7822e9896ff..fb5160ba69d 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala @@ -53,16 +53,15 @@ trait DriverComponent { /** Adds quotes around the string if required by the DBMS. */ def quoted(string: String) = if (shouldQuote) s""""$string"""" else string - val clobToString: Rep[SerialClob] => Rep[String] = { + val clobToString: Rep[SerialClob] => Rep[String] = this.driver match { - /* + /* Workaround https://jira.mariadb.org/browse/CONJ-717 Bypass Slick `asColumnOf[String]` calling the JDBC `{fn convert(Column, VARCHAR)}`. Instead directly call `concat(Column)` supported by both the MariaDB driver and the MySQL driver. - */ + */ case MySQLProfile => SimpleFunction.unary("concat") case _ => _.asColumnOf[String] } - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/EngineDataAccessComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/EngineDataAccessComponent.scala index 6b1bda3d12d..cd2c16379ab 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/EngineDataAccessComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/EngineDataAccessComponent.scala @@ -2,18 +2,19 @@ package cromwell.database.slick.tables import slick.jdbc.JdbcProfile -class EngineDataAccessComponent(val driver: JdbcProfile) extends DataAccessComponent - with CallCachingDetritusEntryComponent - with CallCachingEntryComponent - with CallCachingHashEntryComponent - with CallCachingAggregationEntryComponent - with CallCachingSimpletonEntryComponent - with DockerHashStoreEntryComponent - with JobKeyValueEntryComponent - with JobStoreEntryComponent - with JobStoreSimpletonEntryComponent - with SubWorkflowStoreEntryComponent - with WorkflowStoreEntryComponent { +class EngineDataAccessComponent(val driver: JdbcProfile) + extends DataAccessComponent + with CallCachingDetritusEntryComponent + with CallCachingEntryComponent + with CallCachingHashEntryComponent + with CallCachingAggregationEntryComponent + with CallCachingSimpletonEntryComponent + with DockerHashStoreEntryComponent + with JobKeyValueEntryComponent + with JobStoreEntryComponent + with JobStoreSimpletonEntryComponent + with SubWorkflowStoreEntryComponent + with WorkflowStoreEntryComponent { import driver.api._ diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/JobKeyValueEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/JobKeyValueEntryComponent.scala index ee75cda27c0..cd730a5c154 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/JobKeyValueEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/JobKeyValueEntryComponent.scala @@ -23,11 +23,20 @@ trait JobKeyValueEntryComponent { def storeValue = column[String]("STORE_VALUE", O.Length(255)) - override def * = (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, storeKey, storeValue, - jobKeyValueEntryId.?) <> (JobKeyValueEntry.tupled, JobKeyValueEntry.unapply) - - def ucJobKeyValueEntryWeuCfqnJiJaSk = index("UC_JOB_KEY_VALUE_ENTRY_WEU_CFQN_JI_JA_SK", - (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, storeKey), unique = true) + override def * = (workflowExecutionUuid, + callFullyQualifiedName, + jobIndex, + jobAttempt, + storeKey, + storeValue, + jobKeyValueEntryId.? + ) <> (JobKeyValueEntry.tupled, JobKeyValueEntry.unapply) + + def ucJobKeyValueEntryWeuCfqnJiJaSk = index( + "UC_JOB_KEY_VALUE_ENTRY_WEU_CFQN_JI_JA_SK", + (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, storeKey), + unique = true + ) } protected val jobKeyValueEntries = TableQuery[JobKeyValueEntries] @@ -37,21 +46,27 @@ trait JobKeyValueEntryComponent { val jobKeyValueEntriesExists = Compiled(jobKeyValueEntries.take(1).exists) - val jobKeyValueEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => for { + val jobKeyValueEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { jobKeyValueEntry <- jobKeyValueEntries if jobKeyValueEntry.workflowExecutionUuid === workflowExecutionUuid } yield jobKeyValueEntry ) val storeValuesForJobKeyAndStoreKey = Compiled( - (workflowExecutionUuid: Rep[String], callFullyQualifiedName: Rep[String], jobIndex: Rep[Int], jobAttempt: Rep[Int], - storeKey: Rep[String]) => for { - jobKeyValueEntry <- jobKeyValueEntries - if jobKeyValueEntry.workflowExecutionUuid === workflowExecutionUuid - if jobKeyValueEntry.callFullyQualifiedName === callFullyQualifiedName - if jobKeyValueEntry.jobIndex === jobIndex - if jobKeyValueEntry.jobAttempt === jobAttempt - if jobKeyValueEntry.storeKey === storeKey - } yield jobKeyValueEntry.storeValue + (workflowExecutionUuid: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Int], + jobAttempt: Rep[Int], + storeKey: Rep[String] + ) => + for { + jobKeyValueEntry <- jobKeyValueEntries + if jobKeyValueEntry.workflowExecutionUuid === workflowExecutionUuid + if jobKeyValueEntry.callFullyQualifiedName === callFullyQualifiedName + if jobKeyValueEntry.jobIndex === jobIndex + if jobKeyValueEntry.jobAttempt === jobAttempt + if jobKeyValueEntry.storeKey === storeKey + } yield jobKeyValueEntry.storeValue ) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala index 04c01ead0cb..acc9c6f12d3 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala @@ -30,11 +30,21 @@ trait JobStoreEntryComponent { def retryableFailure = column[Option[Boolean]]("RETRYABLE_FAILURE") - override def * = (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, jobSuccessful, returnCode, - exceptionMessage, retryableFailure, jobStoreEntryId.?) <> (JobStoreEntry.tupled, JobStoreEntry.unapply) + override def * = (workflowExecutionUuid, + callFullyQualifiedName, + jobIndex, + jobAttempt, + jobSuccessful, + returnCode, + exceptionMessage, + retryableFailure, + jobStoreEntryId.? + ) <> (JobStoreEntry.tupled, JobStoreEntry.unapply) def ucJobStoreEntryWeuCfqnJiJa = index("UC_JOB_STORE_ENTRY_WEU_CFQN_JI_JA", - (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt), unique = true) + (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt), + unique = true + ) def ixJobStoreEntryWeu = index("IX_JOB_STORE_ENTRY_WEU", workflowExecutionUuid, unique = false) } @@ -46,8 +56,8 @@ trait JobStoreEntryComponent { /** * Useful for finding all job stores for a given workflow execution UUID (e.g. so you can delete them! Bwahaha) */ - val jobStoreEntriesForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val jobStoreEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { jobStoreEntry <- jobStoreEntries if jobStoreEntry.workflowExecutionUuid === workflowExecutionUuid } yield jobStoreEntry @@ -57,8 +67,11 @@ trait JobStoreEntryComponent { * Useful for finding the unique job store for a given job key */ val jobStoreEntriesForJobKey = Compiled( - (workflowExecutionUuid: Rep[String], callFullyQualifiedName: Rep[String], jobIndex: Rep[Int], - jobAttempt: Rep[Int]) => + (workflowExecutionUuid: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Int], + jobAttempt: Rep[Int] + ) => for { jobStoreEntry <- jobStoreEntries if jobStoreEntry.workflowExecutionUuid === workflowExecutionUuid && diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala index 285ccc93b82..da14cede6e8 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala @@ -26,7 +26,9 @@ trait JobStoreSimpletonEntryComponent { (JobStoreSimpletonEntry.tupled, JobStoreSimpletonEntry.unapply) def fkJobStoreSimpletonEntryJobStoreEntryId = foreignKey("FK_JOB_STORE_SIMPLETON_ENTRY_JOB_STORE_ENTRY_ID", - jobStoreEntryId, jobStoreEntries)(_.jobStoreEntryId, onDelete = Cascade) + jobStoreEntryId, + jobStoreEntries + )(_.jobStoreEntryId, onDelete = Cascade) def ucJobStoreSimpletonEntryJseiSk = index("UC_JOB_STORE_SIMPLETON_ENTRY_JSEI_SK", (jobStoreEntryId, simpletonKey), unique = true) @@ -40,8 +42,8 @@ trait JobStoreSimpletonEntryComponent { /** * Find all result simpletons which match a given JOB_STORE_ENTRY_ID */ - val jobStoreSimpletonEntriesForJobStoreEntryId = Compiled( - (jobStoreEntryId: Rep[Long]) => for { + val jobStoreSimpletonEntriesForJobStoreEntryId = Compiled((jobStoreEntryId: Rep[Long]) => + for { jobStoreSimpletonEntry <- jobStoreSimpletonEntries if jobStoreSimpletonEntry.jobStoreEntryId === jobStoreEntryId } yield jobStoreSimpletonEntry ) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataDataAccessComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataDataAccessComponent.scala index f3acdd6736d..a8eb1e2c22e 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataDataAccessComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataDataAccessComponent.scala @@ -2,17 +2,18 @@ package cromwell.database.slick.tables import slick.jdbc.JdbcProfile -class MetadataDataAccessComponent(val driver: JdbcProfile) extends DataAccessComponent - with CustomLabelEntryComponent - with MetadataEntryComponent - with SummaryStatusEntryComponent - with SummaryQueueEntryComponent - with WorkflowMetadataSummaryEntryComponent { +class MetadataDataAccessComponent(val driver: JdbcProfile) + extends DataAccessComponent + with CustomLabelEntryComponent + with MetadataEntryComponent + with SummaryStatusEntryComponent + with SummaryQueueEntryComponent + with WorkflowMetadataSummaryEntryComponent { import driver.api._ override lazy val schema: driver.SchemaDescription = - customLabelEntries.schema ++ + customLabelEntries.schema ++ metadataEntries.schema ++ summaryStatusEntries.schema ++ workflowMetadataSummaryEntries.schema ++ @@ -20,12 +21,11 @@ class MetadataDataAccessComponent(val driver: JdbcProfile) extends DataAccessCom // Looks like here is the most appropriate place for this val since it doesn't fit neither in // SummaryQueueEntryComponent nor in MetadataEntryComponent - val metadataEntriesToSummarizeQuery = { - Compiled( - (limit: ConstColumn[Long]) => (for { + val metadataEntriesToSummarizeQuery = + Compiled((limit: ConstColumn[Long]) => + (for { summaryEntry <- summaryQueueEntries.take(limit) metadataEntry <- metadataEntries if metadataEntry.metadataEntryId === summaryEntry.metadataJournalId } yield metadataEntry).sortBy(_.metadataEntryId) ) - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala index 8c3c0aa27c9..3ba54ee2760 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala @@ -27,7 +27,8 @@ trait MetadataEntryComponent { def metadataEntryId = column[Long]("METADATA_JOURNAL_ID", O.PrimaryKey, O.AutoInc) - def workflowExecutionUuid = column[String]("WORKFLOW_EXECUTION_UUID", O.Length(255)) // TODO: rename column via liquibase + def workflowExecutionUuid = + column[String]("WORKFLOW_EXECUTION_UUID", O.Length(255)) // TODO: rename column via liquibase def callFullyQualifiedName = column[Option[String]]("CALL_FQN", O.Length(255)) // TODO: rename column via liquibase @@ -43,8 +44,16 @@ trait MetadataEntryComponent { def metadataTimestamp = column[Timestamp]("METADATA_TIMESTAMP") - override def * = (workflowExecutionUuid, callFullyQualifiedName, jobIndex, jobAttempt, metadataKey, metadataValue, - metadataValueType, metadataTimestamp, metadataEntryId.?) <> (MetadataEntry.tupled, MetadataEntry.unapply) + override def * = (workflowExecutionUuid, + callFullyQualifiedName, + jobIndex, + jobAttempt, + metadataKey, + metadataValue, + metadataValueType, + metadataTimestamp, + metadataEntryId.? + ) <> (MetadataEntry.tupled, MetadataEntry.unapply) // TODO: rename index via liquibase def ixMetadataEntryWeu = index("METADATA_WORKFLOW_IDX", workflowExecutionUuid, unique = false) @@ -56,151 +65,173 @@ trait MetadataEntryComponent { val metadataEntriesExists = Compiled(metadataEntries.take(1).exists) - val metadataEntriesForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => (for { + val metadataEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + (for { metadataEntry <- metadataEntries if metadataEntry.workflowExecutionUuid === workflowExecutionUuid } yield metadataEntry).sortBy(_.metadataTimestamp) ) - val metadataEntriesForWorkflowSortedById = Compiled( - (workflowExecutionUuid: Rep[String]) => (for { + val metadataEntriesForWorkflowSortedById = Compiled((workflowExecutionUuid: Rep[String]) => + (for { metadataEntry <- metadataEntries if metadataEntry.workflowExecutionUuid === workflowExecutionUuid } yield metadataEntry).sortBy(_.metadataEntryId) ) - val countMetadataEntriesForWorkflowExecutionUuid = Compiled( - (rootWorkflowId: Rep[String], expandSubWorkflows: Rep[Boolean]) => { - val targetWorkflowIds = for { - summary <- workflowMetadataSummaryEntries - // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` - if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) - } yield summary.workflowExecutionUuid - - for { - metadata <- metadataEntries - if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` - } yield metadata - }.size - ) - - val metadataEntryExistsForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => (for { + val countMetadataEntriesForWorkflowExecutionUuid = + Compiled((rootWorkflowId: Rep[String], expandSubWorkflows: Rep[Boolean]) => + { + val targetWorkflowIds = for { + summary <- workflowMetadataSummaryEntries + // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` + if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) + } yield summary.workflowExecutionUuid + + for { + metadata <- metadataEntries + if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` + } yield metadata + }.size + ) + + val metadataEntryExistsForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + (for { metadataEntry <- metadataEntries if metadataEntry.workflowExecutionUuid === workflowExecutionUuid } yield metadataEntry).exists ) - def metadataEntryExistsForWorkflowExecutionUuid(workflowId: Rep[String], key: Rep[String]): Rep[Boolean] = { - metadataEntries.filter( metadataEntry => - metadataEntry.workflowExecutionUuid === workflowId && - metadataEntry.metadataKey === key && - metadataEntry.metadataValue.isDefined - ).exists - } - - val metadataEntriesForWorkflowExecutionUuidAndMetadataKey = Compiled( - (workflowExecutionUuid: Rep[String], metadataKey: Rep[String]) => (for { - metadataEntry <- metadataEntries - if metadataEntry.workflowExecutionUuid === workflowExecutionUuid - if metadataEntry.metadataKey === metadataKey - if metadataEntry.callFullyQualifiedName.isEmpty - if metadataEntry.jobIndex.isEmpty - if metadataEntry.jobAttempt.isEmpty - } yield metadataEntry).sortBy(_.metadataTimestamp) - ) + def metadataEntryExistsForWorkflowExecutionUuid(workflowId: Rep[String], key: Rep[String]): Rep[Boolean] = + metadataEntries + .filter(metadataEntry => + metadataEntry.workflowExecutionUuid === workflowId && + metadataEntry.metadataKey === key && + metadataEntry.metadataValue.isDefined + ) + .exists - val countMetadataEntriesForWorkflowExecutionUuidAndMetadataKey = Compiled( - (rootWorkflowId: Rep[String], metadataKey: Rep[String], expandSubWorkflows: Rep[Boolean]) => { - val targetWorkflowIds = for { - summary <- workflowMetadataSummaryEntries - // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` - if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) - } yield summary.workflowExecutionUuid - - for { - metadata <- metadataEntries - if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` - if metadata.metadataKey === metadataKey - if metadata.callFullyQualifiedName.isEmpty - if metadata.jobIndex.isEmpty - if metadata.jobAttempt.isEmpty - } yield metadata - }.size - ) + val metadataEntriesForWorkflowExecutionUuidAndMetadataKey = + Compiled((workflowExecutionUuid: Rep[String], metadataKey: Rep[String]) => + (for { + metadataEntry <- metadataEntries + if metadataEntry.workflowExecutionUuid === workflowExecutionUuid + if metadataEntry.metadataKey === metadataKey + if metadataEntry.callFullyQualifiedName.isEmpty + if metadataEntry.jobIndex.isEmpty + if metadataEntry.jobAttempt.isEmpty + } yield metadataEntry).sortBy(_.metadataTimestamp) + ) + + val countMetadataEntriesForWorkflowExecutionUuidAndMetadataKey = + Compiled((rootWorkflowId: Rep[String], metadataKey: Rep[String], expandSubWorkflows: Rep[Boolean]) => + { + val targetWorkflowIds = for { + summary <- workflowMetadataSummaryEntries + // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` + if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) + } yield summary.workflowExecutionUuid + + for { + metadata <- metadataEntries + if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` + if metadata.metadataKey === metadataKey + if metadata.callFullyQualifiedName.isEmpty + if metadata.jobIndex.isEmpty + if metadata.jobAttempt.isEmpty + } yield metadata + }.size + ) val metadataEntriesForJobKey = Compiled( - (workflowExecutionUuid: Rep[String], callFullyQualifiedName: Rep[String], jobIndex: Rep[Option[Int]], - jobAttempt: Rep[Option[Int]]) => (for { - metadataEntry <- metadataEntries - if metadataEntry.workflowExecutionUuid === workflowExecutionUuid - if metadataEntry.callFullyQualifiedName === callFullyQualifiedName - if hasSameIndex(metadataEntry, jobIndex) - if hasSameAttempt(metadataEntry, jobAttempt) - } yield metadataEntry).sortBy(_.metadataTimestamp) + (workflowExecutionUuid: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Option[Int]], + jobAttempt: Rep[Option[Int]] + ) => + (for { + metadataEntry <- metadataEntries + if metadataEntry.workflowExecutionUuid === workflowExecutionUuid + if metadataEntry.callFullyQualifiedName === callFullyQualifiedName + if hasSameIndex(metadataEntry, jobIndex) + if hasSameAttempt(metadataEntry, jobAttempt) + } yield metadataEntry).sortBy(_.metadataTimestamp) ) val countMetadataEntriesForJobKey = Compiled( - (rootWorkflowId: Rep[String], callFullyQualifiedName: Rep[String], jobIndex: Rep[Option[Int]], - jobAttempt: Rep[Option[Int]], expandSubWorkflows: Rep[Boolean]) => { - val targetWorkflowIds = for { - summary <- workflowMetadataSummaryEntries - // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` - if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) - } yield summary.workflowExecutionUuid - - for { - metadata <- metadataEntries - if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` - if metadata.callFullyQualifiedName === callFullyQualifiedName - if hasSameIndex(metadata, jobIndex) - if hasSameAttempt(metadata, jobAttempt) - } yield metadata - }.size + (rootWorkflowId: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Option[Int]], + jobAttempt: Rep[Option[Int]], + expandSubWorkflows: Rep[Boolean] + ) => + { + val targetWorkflowIds = for { + summary <- workflowMetadataSummaryEntries + // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` + if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) + } yield summary.workflowExecutionUuid + + for { + metadata <- metadataEntries + if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` + if metadata.callFullyQualifiedName === callFullyQualifiedName + if hasSameIndex(metadata, jobIndex) + if hasSameAttempt(metadata, jobAttempt) + } yield metadata + }.size ) val metadataEntriesForJobKeyAndMetadataKey = Compiled( - (workflowExecutionUuid: Rep[String], metadataKey: Rep[String], callFullyQualifiedName: Rep[String], - jobIndex: Rep[Option[Int]], jobAttempt: Rep[Option[Int]]) => (for { - metadataEntry <- metadataEntries - if metadataEntry.workflowExecutionUuid === workflowExecutionUuid - if metadataEntry.metadataKey === metadataKey - if metadataEntry.callFullyQualifiedName === callFullyQualifiedName - if hasSameIndex(metadataEntry, jobIndex) - if hasSameAttempt(metadataEntry, jobAttempt) - } yield metadataEntry).sortBy(_.metadataTimestamp) + (workflowExecutionUuid: Rep[String], + metadataKey: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Option[Int]], + jobAttempt: Rep[Option[Int]] + ) => + (for { + metadataEntry <- metadataEntries + if metadataEntry.workflowExecutionUuid === workflowExecutionUuid + if metadataEntry.metadataKey === metadataKey + if metadataEntry.callFullyQualifiedName === callFullyQualifiedName + if hasSameIndex(metadataEntry, jobIndex) + if hasSameAttempt(metadataEntry, jobAttempt) + } yield metadataEntry).sortBy(_.metadataTimestamp) ) val countMetadataEntriesForJobKeyAndMetadataKey = Compiled( - (rootWorkflowId: Rep[String], metadataKey: Rep[String], callFullyQualifiedName: Rep[String], - jobIndex: Rep[Option[Int]], jobAttempt: Rep[Option[Int]], expandSubWorkflows: Rep[Boolean]) => { - val targetWorkflowIds = for { - summary <- workflowMetadataSummaryEntries - // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` - if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) - } yield summary.workflowExecutionUuid - - for { - metadata <- metadataEntries - if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` - if metadata.metadataKey === metadataKey - if metadata.callFullyQualifiedName === callFullyQualifiedName - if hasSameIndex(metadata, jobIndex) - if hasSameAttempt(metadata, jobAttempt) - } yield metadata - }.size + (rootWorkflowId: Rep[String], + metadataKey: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Option[Int]], + jobAttempt: Rep[Option[Int]], + expandSubWorkflows: Rep[Boolean] + ) => + { + val targetWorkflowIds = for { + summary <- workflowMetadataSummaryEntries + // Uses `IX_WORKFLOW_METADATA_SUMMARY_ENTRY_RWEU`, `UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU` + if summary.workflowExecutionUuid === rootWorkflowId || ((summary.rootWorkflowExecutionUuid === rootWorkflowId) && expandSubWorkflows) + } yield summary.workflowExecutionUuid + + for { + metadata <- metadataEntries + if metadata.workflowExecutionUuid in targetWorkflowIds // Uses `METADATA_WORKFLOW_IDX` + if metadata.metadataKey === metadataKey + if metadata.callFullyQualifiedName === callFullyQualifiedName + if hasSameIndex(metadata, jobIndex) + if hasSameAttempt(metadata, jobAttempt) + } yield metadata + }.size ) - val metadataEntriesForIdRange = Compiled( - (minMetadataEntryId: Rep[Long], maxMetadataEntryId: Rep[Long]) => { - for { - metadataEntry <- metadataEntries - if metadataEntry.metadataEntryId >= minMetadataEntryId - if metadataEntry.metadataEntryId <= maxMetadataEntryId - } yield metadataEntry - } - ) + val metadataEntriesForIdRange = Compiled { (minMetadataEntryId: Rep[Long], maxMetadataEntryId: Rep[Long]) => + for { + metadataEntry <- metadataEntries + if metadataEntry.metadataEntryId >= minMetadataEntryId + if metadataEntry.metadataEntryId <= maxMetadataEntryId + } yield metadataEntry + } /** * Returns metadata entries that are "like" metadataKeys for the specified workflow. @@ -210,14 +241,14 @@ trait MetadataEntryComponent { def metadataEntriesWithKeyConstraints(workflowExecutionUuid: String, metadataKeysToFilterFor: List[String], metadataKeysToFilterOut: List[String], - requireEmptyJobKey: Boolean) = { + requireEmptyJobKey: Boolean + ) = (for { metadataEntry <- metadataEntries if metadataEntry.workflowExecutionUuid === workflowExecutionUuid if metadataEntryHasMetadataKeysLike(metadataEntry, metadataKeysToFilterFor, metadataKeysToFilterOut) if metadataEntryHasEmptyJobKey(metadataEntry, requireEmptyJobKey) } yield metadataEntry).sortBy(_.metadataTimestamp) - } /** * Counts metadata entries that are "like" metadataKeys for the specified workflow. @@ -228,7 +259,8 @@ trait MetadataEntryComponent { metadataKeysToFilterFor: List[String], metadataKeysToFilterOut: List[String], requireEmptyJobKey: Boolean, - expandSubWorkflows: Boolean) = { + expandSubWorkflows: Boolean + ) = { val targetWorkflowIds = for { summary <- workflowMetadataSummaryEntries @@ -253,7 +285,8 @@ trait MetadataEntryComponent { metadataKeysToFilterOut: List[String], callFqn: String, jobIndex: Option[Int], - jobAttempt: Option[Int]) = { + jobAttempt: Option[Int] + ) = (for { metadataEntry <- metadataEntries if metadataEntry.workflowExecutionUuid === workflowExecutionUuid @@ -265,7 +298,6 @@ trait MetadataEntryComponent { // regardless of the attempt if (metadataEntry.jobAttempt === jobAttempt) || jobAttempt.isEmpty } yield metadataEntry).sortBy(_.metadataTimestamp) - } /** * Counts metadata entries that are "like" metadataKeys for the specified call. @@ -277,7 +309,8 @@ trait MetadataEntryComponent { callFqn: String, jobIndex: Option[Int], jobAttempt: Option[Int], - expandSubWorkflows: Boolean) = { + expandSubWorkflows: Boolean + ) = { val targetWorkflowIds = for { summary <- workflowMetadataSummaryEntries @@ -301,31 +334,43 @@ trait MetadataEntryComponent { def metadataTableSizeInformation() = { val query = sql""" - |SELECT DATA_LENGTH, INDEX_LENGTH, DATA_FREE - |FROM information_schema.tables - |WHERE TABLE_NAME = 'METADATA_ENTRY' + |SELECT DATA_LENGTH, INDEX_LENGTH, DATA_FREE + |FROM information_schema.tables + |WHERE TABLE_NAME = 'METADATA_ENTRY' """.stripMargin - query.as[InformationSchemaEntry](rconv = GetResult { r => - InformationSchemaEntry(r.<<, r.<<, r.<<) - }).headOption + query + .as[InformationSchemaEntry](rconv = GetResult { r => + InformationSchemaEntry(r.<<, r.<<, r.<<) + }) + .headOption } def failedJobsMetadataWithWorkflowId(rootWorkflowId: String, isPostgres: Boolean) = { - val getMetadataEntryResult = GetResult(r => { - MetadataEntry(r.<<, r.<<, r.<<, r.<<, r.<<, r.nextClobOption().map(clob => new SerialClob(clob)), r.<<, r.<<, r.<<) - }) - - def dbIdentifierWrapper(identifier: String, isPostgres: Boolean) = { - if(isPostgres) s"${'"'}$identifier${'"'}" else identifier + val getMetadataEntryResult = GetResult { r => + MetadataEntry(r.<<, + r.<<, + r.<<, + r.<<, + r.<<, + r.nextClobOption().map(clob => new SerialClob(clob)), + r.<<, + r.<<, + r.<< + ) } - def evaluateMetadataValue(isPostgres: Boolean, colName: String): String = { - if(isPostgres) s"convert_from(lo_get(${colName}::oid), 'UTF8')" else colName - } + def dbIdentifierWrapper(identifier: String, isPostgres: Boolean) = + if (isPostgres) s"${'"'}$identifier${'"'}" else identifier + + def evaluateMetadataValue(isPostgres: Boolean, colName: String): String = + if (isPostgres) s"convert_from(lo_get(${colName}::oid), 'UTF8')" else colName - def attemptAndIndexSelectStatement(callFqn: String, scatterIndex: String, retryAttempt: String, variablePrefix: String): String = { + def attemptAndIndexSelectStatement(callFqn: String, + scatterIndex: String, + retryAttempt: String, + variablePrefix: String + ): String = s"SELECT ${callFqn}, MAX(COALESCE(${scatterIndex}, 0)) as ${variablePrefix}Scatter, MAX(COALESCE(${retryAttempt}, 0)) AS ${variablePrefix}Retry" - } val workflowUuid = dbIdentifierWrapper("WORKFLOW_EXECUTION_UUID", isPostgres) val callFqn = dbIdentifierWrapper("CALL_FQN", isPostgres) @@ -339,7 +384,8 @@ trait MetadataEntryComponent { val metadataValue = dbIdentifierWrapper("METADATA_VALUE", isPostgres) val metadataEntry = dbIdentifierWrapper("METADATA_ENTRY", isPostgres) val wmse = dbIdentifierWrapper("WORKFLOW_METADATA_SUMMARY_ENTRY", isPostgres) - val resultSetColumnNames = s"me.${workflowUuid}, me.${callFqn}, me.${scatterIndex}, me.${retryAttempt}, me.${metadataKey}, me.${metadataValue}, me.${metadataValueType}, me.${metadataTimestamp}, me.${metadataJournalId}" + val resultSetColumnNames = + s"me.${workflowUuid}, me.${callFqn}, me.${scatterIndex}, me.${retryAttempt}, me.${metadataKey}, me.${metadataValue}, me.${metadataValueType}, me.${metadataTimestamp}, me.${metadataJournalId}" val query = sql""" @@ -351,7 +397,9 @@ trait MetadataEntryComponent { INNER JOIN #${wmse} wmse ON wmse.#${workflowUuid} = me.#${workflowUuid} WHERE (wmse.#${rootUuid} = $rootWorkflowId OR wmse.#${workflowUuid} = $rootWorkflowId) - AND (me.#${metadataKey} in ('executionStatus', 'backendStatus') AND #${evaluateMetadataValue(isPostgres, metadataValue)} = 'Failed') + AND (me.#${metadataKey} in ('executionStatus', 'backendStatus') AND #${evaluateMetadataValue(isPostgres, + metadataValue + )} = 'Failed') GROUP BY #${callFqn}, #${metadataValue} HAVING #${evaluateMetadataValue(isPostgres, metadataValue)} = 'Failed' ) AS failedCalls @@ -396,7 +444,8 @@ trait MetadataEntryComponent { private[this] def metadataEntryHasMetadataKeysLike(metadataEntry: MetadataEntries, metadataKeysToFilterFor: List[String], - metadataKeysToFilterOut: List[String]): Rep[Boolean] = { + metadataKeysToFilterOut: List[String] + ): Rep[Boolean] = { def containsKey(key: String): Rep[Boolean] = metadataEntry.metadataKey like key @@ -414,19 +463,17 @@ trait MetadataEntryComponent { } } - private[this] def hasSameIndex(metadataEntry: MetadataEntries, jobIndex: Rep[Option[Int]]) = { + private[this] def hasSameIndex(metadataEntry: MetadataEntries, jobIndex: Rep[Option[Int]]) = (metadataEntry.jobIndex.isEmpty && jobIndex.isEmpty) || (metadataEntry.jobIndex === jobIndex) - } - private[this] def hasSameAttempt(metadataEntry: MetadataEntries, jobAttempt: Rep[Option[Int]]) = { + private[this] def hasSameAttempt(metadataEntry: MetadataEntries, jobAttempt: Rep[Option[Int]]) = (metadataEntry.jobAttempt.isEmpty && jobAttempt.isEmpty) || (metadataEntry.jobAttempt === jobAttempt) - } private[this] def metadataEntryHasEmptyJobKey(metadataEntry: MetadataEntries, - requireEmptyJobKey: Rep[Boolean]): Rep[Boolean] = { + requireEmptyJobKey: Rep[Boolean] + ): Rep[Boolean] = !requireEmptyJobKey || (metadataEntry.callFullyQualifiedName.isEmpty && metadataEntry.jobIndex.isEmpty && metadataEntry.jobAttempt.isEmpty) - } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/SubWorkflowStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/SubWorkflowStoreEntryComponent.scala index 11fa2191cb2..77e39b43816 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/SubWorkflowStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/SubWorkflowStoreEntryComponent.scala @@ -13,7 +13,7 @@ trait SubWorkflowStoreEntryComponent { def subWorkflowStoreEntryId = column[Long]("SUB_WORKFLOW_STORE_ENTRY_ID", O.PrimaryKey, O.AutoInc) def rootWorkflowId = column[Long]("ROOT_WORKFLOW_ID") - + def parentWorkflowExecutionUuid = column[String]("PARENT_WORKFLOW_EXECUTION_UUID", O.Length(255)) def callFullyQualifiedName = column[String]("CALL_FULLY_QUALIFIED_NAME", O.Length(255)) @@ -24,23 +24,37 @@ trait SubWorkflowStoreEntryComponent { def subWorkflowExecutionUuid = column[String]("SUB_WORKFLOW_EXECUTION_UUID", O.Length(255)) - override def * = (rootWorkflowId.?, parentWorkflowExecutionUuid, callFullyQualifiedName, callIndex, callAttempt, subWorkflowExecutionUuid, subWorkflowStoreEntryId.?) <> (SubWorkflowStoreEntry.tupled, SubWorkflowStoreEntry.unapply) - - def ucSubWorkflowStoreEntryPweuCfqnCiCa = index("UC_SUB_WORKFLOW_STORE_ENTRY_PWEU_CFQN_CI_CA", - (parentWorkflowExecutionUuid, callFullyQualifiedName, callIndex, callAttempt), unique = true) + override def * = (rootWorkflowId.?, + parentWorkflowExecutionUuid, + callFullyQualifiedName, + callIndex, + callAttempt, + subWorkflowExecutionUuid, + subWorkflowStoreEntryId.? + ) <> (SubWorkflowStoreEntry.tupled, SubWorkflowStoreEntry.unapply) + + def ucSubWorkflowStoreEntryPweuCfqnCiCa = index( + "UC_SUB_WORKFLOW_STORE_ENTRY_PWEU_CFQN_CI_CA", + (parentWorkflowExecutionUuid, callFullyQualifiedName, callIndex, callAttempt), + unique = true + ) def fkSubWorkflowStoreEntryRootWorkflowId = foreignKey("FK_SUB_WORKFLOW_STORE_ENTRY_ROOT_WORKFLOW_ID", - rootWorkflowId, workflowStoreEntries)(_.workflowStoreEntryId, onDelete = Cascade) + rootWorkflowId, + workflowStoreEntries + )(_.workflowStoreEntryId, onDelete = Cascade) - def ixSubWorkflowStoreEntryPweu = index("IX_SUB_WORKFLOW_STORE_ENTRY_PWEU", parentWorkflowExecutionUuid, unique = false) + def ixSubWorkflowStoreEntryPweu = + index("IX_SUB_WORKFLOW_STORE_ENTRY_PWEU", parentWorkflowExecutionUuid, unique = false) } protected val subWorkflowStoreEntries = TableQuery[SubWorkflowStoreEntries] - val subWorkflowStoreEntryIdsAutoInc = subWorkflowStoreEntries returning subWorkflowStoreEntries.map(_.subWorkflowStoreEntryId) + val subWorkflowStoreEntryIdsAutoInc = + subWorkflowStoreEntries returning subWorkflowStoreEntries.map(_.subWorkflowStoreEntryId) - val subWorkflowStoreEntriesForRootWorkflowId = Compiled( - (rootWorkflowId: Rep[Long]) => for { + val subWorkflowStoreEntriesForRootWorkflowId = Compiled((rootWorkflowId: Rep[Long]) => + for { subWorkflowStoreEntry <- subWorkflowStoreEntries if subWorkflowStoreEntry.rootWorkflowId === rootWorkflowId } yield subWorkflowStoreEntry @@ -50,8 +64,11 @@ trait SubWorkflowStoreEntryComponent { * Useful for finding the unique sub workflow entry for a given job key */ val subWorkflowStoreEntriesForJobKey = Compiled( - (parentWorkflowExecutionUuid: Rep[String], callFullyQualifiedName: Rep[String], jobIndex: Rep[Int], - jobAttempt: Rep[Int]) => + (parentWorkflowExecutionUuid: Rep[String], + callFullyQualifiedName: Rep[String], + jobIndex: Rep[Int], + jobAttempt: Rep[Int] + ) => for { subWorkflowStoreEntry <- subWorkflowStoreEntries if subWorkflowStoreEntry.parentWorkflowExecutionUuid === parentWorkflowExecutionUuid && diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/SummaryStatusEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/SummaryStatusEntryComponent.scala index 807cb0802f7..72d1ece4bb0 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/SummaryStatusEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/SummaryStatusEntryComponent.scala @@ -24,8 +24,8 @@ trait SummaryStatusEntryComponent { val summaryStatusEntryIdsAutoInc = summaryStatusEntries returning summaryStatusEntries.map(_.summaryStatusEntryId) - val summaryPositionForSummaryName = Compiled( - (summaryName: Rep[String]) => for { + val summaryPositionForSummaryName = Compiled((summaryName: Rep[String]) => + for { summaryStatusEntry <- summaryStatusEntries if summaryStatusEntry.summaryName === summaryName } yield summaryStatusEntry.summaryPosition diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala index c3020790be3..82489d4ae28 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala @@ -17,7 +17,7 @@ trait WorkflowMetadataSummaryEntryComponent { import driver.api._ class WorkflowMetadataSummaryEntries(tag: Tag) - extends Table[WorkflowMetadataSummaryEntry](tag, "WORKFLOW_METADATA_SUMMARY_ENTRY") { + extends Table[WorkflowMetadataSummaryEntry](tag, "WORKFLOW_METADATA_SUMMARY_ENTRY") { def workflowMetadataSummaryEntryId = column[Long]("WORKFLOW_METADATA_SUMMARY_ENTRY_ID", O.PrimaryKey, O.AutoInc) def workflowExecutionUuid = column[String]("WORKFLOW_EXECUTION_UUID", O.Length(100)) @@ -38,10 +38,19 @@ trait WorkflowMetadataSummaryEntryComponent { def metadataArchiveStatus: Rep[Option[String]] = column[Option[String]]("METADATA_ARCHIVE_STATUS", O.Length(30)) - def baseProjection = (workflowExecutionUuid, workflowName, workflowStatus, startTimestamp, endTimestamp, - submissionTimestamp, parentWorkflowExecutionUuid, rootWorkflowExecutionUuid, metadataArchiveStatus) + def baseProjection = (workflowExecutionUuid, + workflowName, + workflowStatus, + startTimestamp, + endTimestamp, + submissionTimestamp, + parentWorkflowExecutionUuid, + rootWorkflowExecutionUuid, + metadataArchiveStatus + ) - override def * = baseProjection ~ workflowMetadataSummaryEntryId.? <> (WorkflowMetadataSummaryEntry.tupled, WorkflowMetadataSummaryEntry.unapply) + override def * = + baseProjection ~ workflowMetadataSummaryEntryId.? <> (WorkflowMetadataSummaryEntry.tupled, WorkflowMetadataSummaryEntry.unapply) def forUpdate = baseProjection.shaped <> ( tuple => WorkflowMetadataSummaryEntry.tupled(tuple :+ None), @@ -71,112 +80,123 @@ trait WorkflowMetadataSummaryEntryComponent { val workflowMetadataSummaryEntryIdsAutoInc = workflowMetadataSummaryEntries returning workflowMetadataSummaryEntries.map(_.workflowMetadataSummaryEntryId) - val workflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp = Compiled( - (metadataArchiveStatus: Rep[Option[String]], workflowEndTimestampThreshold: Rep[Timestamp], batchSize: ConstColumn[Long]) => { + val workflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp = Compiled { + (metadataArchiveStatus: Rep[Option[String]], + workflowEndTimestampThreshold: Rep[Timestamp], + batchSize: ConstColumn[Long] + ) => (for { summary <- workflowMetadataSummaryEntries if summary.metadataArchiveStatus === metadataArchiveStatus if summary.endTimestamp <= workflowEndTimestampThreshold } yield summary.workflowExecutionUuid).take(batchSize) - }) + } - val workflowMetadataSummaryEntriesForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val workflowMetadataSummaryEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { workflowMetadataSummaryEntry <- workflowMetadataSummaryEntries if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid - } yield workflowMetadataSummaryEntry.forUpdate) + } yield workflowMetadataSummaryEntry.forUpdate + ) - val workflowMetadataSummaryEntryExistsForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => (for { + val workflowMetadataSummaryEntryExistsForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + (for { summaryEntry <- workflowMetadataSummaryEntries if summaryEntry.workflowExecutionUuid === workflowExecutionUuid } yield summaryEntry).exists ) - val workflowStatusesForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val workflowStatusesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { workflowMetadataSummaryEntry <- workflowMetadataSummaryEntries if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid } yield workflowMetadataSummaryEntry.workflowStatus ) - val rootWorkflowId = Compiled( - (workflowId: Rep[String]) => for { + val rootWorkflowId = Compiled((workflowId: Rep[String]) => + for { summary <- workflowMetadataSummaryEntries if summary.workflowExecutionUuid === workflowId - } yield { - summary.rootWorkflowExecutionUuid.getOrElse(summary.workflowExecutionUuid) - } + } yield summary.rootWorkflowExecutionUuid.getOrElse(summary.workflowExecutionUuid) ) - val metadataArchiveStatusByWorkflowId = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val metadataArchiveStatusByWorkflowId = Compiled((workflowExecutionUuid: Rep[String]) => + for { workflowMetadataSummaryEntry <- workflowMetadataSummaryEntries if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid - } yield workflowMetadataSummaryEntry.metadataArchiveStatus) + } yield workflowMetadataSummaryEntry.metadataArchiveStatus + ) - val metadataArchiveStatusAndEndTimeByWorkflowId = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val metadataArchiveStatusAndEndTimeByWorkflowId = Compiled((workflowExecutionUuid: Rep[String]) => + for { workflowMetadataSummaryEntry <- workflowMetadataSummaryEntries if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid - } yield (workflowMetadataSummaryEntry.metadataArchiveStatus, workflowMetadataSummaryEntry.endTimestamp)) + } yield (workflowMetadataSummaryEntry.metadataArchiveStatus, workflowMetadataSummaryEntry.endTimestamp) + ) private def fetchAllWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], - workflowEndTimestampThreshold: Timestamp): Query[WorkflowMetadataSummaryEntries, WorkflowMetadataSummaryEntry, Seq] = { + workflowEndTimestampThreshold: Timestamp + ): Query[WorkflowMetadataSummaryEntries, WorkflowMetadataSummaryEntry, Seq] = for { summaryEntry <- workflowMetadataSummaryEntries if summaryEntry.workflowStatus.inSet(workflowStatuses) if summaryEntry.metadataArchiveStatus.isEmpty // get Unarchived workflows only if summaryEntry.endTimestamp <= workflowEndTimestampThreshold } yield summaryEntry - } - private def fetchAllWorkflowsToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: Timestamp): Query[WorkflowMetadataSummaryEntries, WorkflowMetadataSummaryEntry, Seq] = { + private def fetchAllWorkflowsToDeleteThatEndedOnOrBeforeThresholdTimestamp( + workflowEndTimestampThreshold: Timestamp + ): Query[WorkflowMetadataSummaryEntries, WorkflowMetadataSummaryEntry, Seq] = for { summaryEntry <- workflowMetadataSummaryEntries if summaryEntry.metadataArchiveStatus === Option("Archived") // get archived but not deleted workflows only if summaryEntry.endTimestamp <= workflowEndTimestampThreshold } yield summaryEntry - } def workflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], workflowEndTimestampThreshold: Timestamp, - batchSize: Long): Query[WorkflowMetadataSummaryEntries, WorkflowMetadataSummaryEntry, Seq] = { + batchSize: Long + ): Query[WorkflowMetadataSummaryEntries, WorkflowMetadataSummaryEntry, Seq] = fetchAllWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp( workflowStatuses, workflowEndTimestampThreshold ).sortBy(_.endTimestamp).take(batchSize) - } def countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], - workflowEndTimestampThreshold: Timestamp): Rep[Int] = { + workflowEndTimestampThreshold: Timestamp + ): Rep[Int] = fetchAllWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp( workflowStatuses, workflowEndTimestampThreshold ).length - } - def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: Timestamp): Rep[Int] = { + def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp( + workflowEndTimestampThreshold: Timestamp + ): Rep[Int] = fetchAllWorkflowsToDeleteThatEndedOnOrBeforeThresholdTimestamp( workflowEndTimestampThreshold ).length - } - def concat(a: SQLActionBuilder, b: SQLActionBuilder): SQLActionBuilder = { - SQLActionBuilder(a.queryParts ++ b.queryParts, (p: Unit, pp: PositionedParameters) => { - a.unitPConv.apply(p, pp) - b.unitPConv.apply(p, pp) - }) - } + def concat(a: SQLActionBuilder, b: SQLActionBuilder): SQLActionBuilder = + SQLActionBuilder(a.queryParts ++ b.queryParts, + (p: Unit, pp: PositionedParameters) => { + a.unitPConv.apply(p, pp) + b.unitPConv.apply(p, pp) + } + ) - def concatNel(nel: NonEmptyList[SQLActionBuilder]): SQLActionBuilder = nel.tail.foldLeft(nel.head) { (acc, next) => concat(acc, next) } + def concatNel(nel: NonEmptyList[SQLActionBuilder]): SQLActionBuilder = nel.tail.foldLeft(nel.head) { (acc, next) => + concat(acc, next) + } - def and(list: NonEmptyList[SQLActionBuilder]): SQLActionBuilder = if (list.size == 1) list.head else { + def and(list: NonEmptyList[SQLActionBuilder]): SQLActionBuilder = if (list.size == 1) list.head + else { val fullList = data.NonEmptyList.of(sql"(") ++ list.init.flatMap(x => List(x, sql" AND ")) :+ list.last :+ sql")" concatNel(fullList) } - def or(list: NonEmptyList[SQLActionBuilder]): SQLActionBuilder = if (list.size == 1) list.head else { + def or(list: NonEmptyList[SQLActionBuilder]): SQLActionBuilder = if (list.size == 1) list.head + else { val fullList = data.NonEmptyList.of(sql"(") ++ list.init.flatMap(x => List(x, sql" OR ")) :+ list.last :+ sql")" concatNel(fullList) } @@ -192,15 +212,16 @@ trait WorkflowMetadataSummaryEntryComponent { workflowStatuses: Set[String], workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestampOption: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], metadataArchiveStatus: Set[Option[String]], - includeSubworkflows: Boolean): SQLActionBuilder = { + includeSubworkflows: Boolean + ): SQLActionBuilder = { val customLabelEntryTable = quoted("CUSTOM_LABEL_ENTRY") val workflowMetadataSummaryEntryTable = quoted("WORKFLOW_METADATA_SUMMARY_ENTRY") @@ -212,8 +233,8 @@ trait WorkflowMetadataSummaryEntryComponent { val summaryTableAlias = quoted("summaryTable") val labelsOrTableAlias = quoted("labelsOrMixin") - val labelsAndTableAliases = labelAndKeyLabelValues.zipWithIndex.map { - case (labelPair, i) => quoted(s"labelAndTable$i") -> labelPair + val labelsAndTableAliases = labelAndKeyLabelValues.zipWithIndex.map { case (labelPair, i) => + quoted(s"labelAndTable$i") -> labelPair }.toMap val selectColumns = List( @@ -226,7 +247,7 @@ trait WorkflowMetadataSummaryEntryComponent { "PARENT_WORKFLOW_EXECUTION_UUID", "ROOT_WORKFLOW_EXECUTION_UUID", "METADATA_ARCHIVE_STATUS", - "WORKFLOW_METADATA_SUMMARY_ENTRY_ID", + "WORKFLOW_METADATA_SUMMARY_ENTRY_ID" ) .map(quoted) .mkString(s"$summaryTableAlias.", ", ", "") @@ -241,11 +262,10 @@ trait WorkflowMetadataSummaryEntryComponent { } val labelOrJoin = if (labelOrKeyLabelValues.nonEmpty) { - Option( - sql"""| JOIN #$customLabelEntryTable #$labelsOrTableAlias - | ON #$summaryTableAlias.#$workflowExecutionUuidColumn - | = #$labelsOrTableAlias.#$workflowExecutionUuidColumn - |""".stripMargin) + Option(sql"""| JOIN #$customLabelEntryTable #$labelsOrTableAlias + | ON #$summaryTableAlias.#$workflowExecutionUuidColumn + | = #$labelsOrTableAlias.#$workflowExecutionUuidColumn + |""".stripMargin) } else None val labelAndJoins = labelsAndTableAliases.toList.map { case (labelAndTableAlias, _) => @@ -255,9 +275,8 @@ trait WorkflowMetadataSummaryEntryComponent { |""".stripMargin } - val from = concatNel(NonEmptyList.of( - sql"""|FROM #$workflowMetadataSummaryEntryTable #$summaryTableAlias - |""".stripMargin) ++ labelOrJoin.toList ++ labelAndJoins) + val from = concatNel(NonEmptyList.of(sql"""|FROM #$workflowMetadataSummaryEntryTable #$summaryTableAlias + |""".stripMargin) ++ labelOrJoin.toList ++ labelAndJoins) def makeSetConstraint(column: String, elements: Set[String]) = { val list = elements.toList.map(element => sql"""#$summaryTableAlias.#${quoted(column)} = $element""") @@ -272,9 +291,8 @@ trait WorkflowMetadataSummaryEntryComponent { NonEmptyList.fromList(list).map(or).toList } - def makeTimeConstraint(column: String, comparison: String, elementOption: Option[Timestamp]) = { + def makeTimeConstraint(column: String, comparison: String, elementOption: Option[Timestamp]) = elementOption.map(element => sql"""#$summaryTableAlias.#${quoted(column)} #$comparison $element""").toList - } val statusConstraint = makeSetConstraint("WORKFLOW_STATUS", workflowStatuses) val nameConstraint = makeSetConstraint("WORKFLOW_NAME", workflowNames) @@ -287,23 +305,30 @@ trait WorkflowMetadataSummaryEntryComponent { val metadataArchiveStatusConstraint = makeSetConstraintWithNulls("METADATA_ARCHIVE_STATUS", metadataArchiveStatus) // *ALL* of the labelAnd list of KV pairs must exist: - val labelsAndConstraint = NonEmptyList.fromList(labelsAndTableAliases.toList.map { - case (labelsAndTableAlias, (labelKey, labelValue)) => - and(NonEmptyList.of( - sql"""#$labelsAndTableAlias.#$customLabelKeyColumn = $labelKey""", - sql"""#$labelsAndTableAlias.#$customLabelValueColumn = $labelValue""", - )) - }).map(and).toList + val labelsAndConstraint = NonEmptyList + .fromList(labelsAndTableAliases.toList.map { case (labelsAndTableAlias, (labelKey, labelValue)) => + and( + NonEmptyList.of( + sql"""#$labelsAndTableAlias.#$customLabelKeyColumn = $labelKey""", + sql"""#$labelsAndTableAlias.#$customLabelValueColumn = $labelValue""" + ) + ) + }) + .map(and) + .toList // At least one of the labelOr list of KV pairs must exist: - val labelOrConstraint = NonEmptyList.fromList(labelOrKeyLabelValues.toList.map { - case (labelKey, labelValue) => - and(NonEmptyList.of( - sql"""#$labelsOrTableAlias.#$customLabelKeyColumn = $labelKey""", - sql"""#$labelsOrTableAlias.#$customLabelValueColumn = $labelValue""", - )) - }).map(or).toList - + val labelOrConstraint = NonEmptyList + .fromList(labelOrKeyLabelValues.toList.map { case (labelKey, labelValue) => + and( + NonEmptyList.of( + sql"""#$labelsOrTableAlias.#$customLabelKeyColumn = $labelKey""", + sql"""#$labelsOrTableAlias.#$customLabelValueColumn = $labelValue""" + ) + ) + }) + .map(or) + .toList var mixinTableCounter = 0 @@ -322,18 +347,26 @@ trait WorkflowMetadataSummaryEntryComponent { } // *ALL* of the excludeLabelOr list of KV pairs must *NOT* exist: - val excludeLabelsOrConstraint = NonEmptyList.fromList(excludeLabelOrValues.toList map { - case (labelKey, labelValue) => not(labelExists(labelKey, labelValue)) - }).map(and).toList + val excludeLabelsOrConstraint = NonEmptyList + .fromList(excludeLabelOrValues.toList map { case (labelKey, labelValue) => + not(labelExists(labelKey, labelValue)) + }) + .map(and) + .toList // At least one of the excludeLabelAnd list of KV pairs must *NOT* exist: - val excludeLabelsAndConstraint = NonEmptyList.fromList(excludeLabelAndValues.toList.map { - case (labelKey, labelValue) => not(labelExists(labelKey, labelValue)) - }).map(or).toList - - val includeSubworkflowsConstraint = if (includeSubworkflows) List.empty else { - List(sql"""#$summaryTableAlias.#$parentWorkflowExecutionUuidColumn IS NULL""".stripMargin) - } + val excludeLabelsAndConstraint = NonEmptyList + .fromList(excludeLabelAndValues.toList.map { case (labelKey, labelValue) => + not(labelExists(labelKey, labelValue)) + }) + .map(or) + .toList + + val includeSubworkflowsConstraint = + if (includeSubworkflows) List.empty + else { + List(sql"""#$summaryTableAlias.#$parentWorkflowExecutionUuidColumn IS NULL""".stripMargin) + } val constraintList = statusConstraint ++ @@ -359,19 +392,20 @@ trait WorkflowMetadataSummaryEntryComponent { concatNel((NonEmptyList.of(select) :+ from) ++ where) } - def countWorkflowMetadataSummaryEntries(parentIdWorkflowMetadataKey: String, - workflowStatuses: Set[String], workflowNames: Set[String], + workflowStatuses: Set[String], + workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestampOption: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], metadataArchiveStatus: Set[Option[String]], - includeSubworkflows: Boolean) = { + includeSubworkflows: Boolean + ) = buildQueryAction( selectOrCount = Count, parentIdWorkflowMetadataKey, @@ -388,18 +422,18 @@ trait WorkflowMetadataSummaryEntryComponent { metadataArchiveStatus, includeSubworkflows = includeSubworkflows ).as[Int].head - } /** * Query workflow execution using the filter criteria encapsulated by the `WorkflowExecutionQueryParameters`. */ def queryWorkflowMetadataSummaryEntries(parentIdWorkflowMetadataKey: String, - workflowStatuses: Set[String], workflowNames: Set[String], + workflowStatuses: Set[String], + workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestampOption: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], @@ -407,7 +441,8 @@ trait WorkflowMetadataSummaryEntryComponent { includeSubworkflows: Boolean, page: Option[Int], pageSize: Option[Int], - newestFirst: Boolean) = { + newestFirst: Boolean + ) = { val mainQuery = buildQueryAction( selectOrCount = Select, parentIdWorkflowMetadataKey, @@ -433,13 +468,13 @@ trait WorkflowMetadataSummaryEntryComponent { // `true` for queries, newest workflows are the most relevant // `false` for archiving, going oldest-to-newest - val orderByAddendum = if (newestFirst) - sql"""| ORDER BY #${quoted("WORKFLOW_METADATA_SUMMARY_ENTRY_ID")} DESC - |""".stripMargin - else - sql"""| ORDER BY #${quoted("WORKFLOW_METADATA_SUMMARY_ENTRY_ID")} ASC - |""".stripMargin - + val orderByAddendum = + if (newestFirst) + sql"""| ORDER BY #${quoted("WORKFLOW_METADATA_SUMMARY_ENTRY_ID")} DESC + |""".stripMargin + else + sql"""| ORDER BY #${quoted("WORKFLOW_METADATA_SUMMARY_ENTRY_ID")} ASC + |""".stripMargin // NB you can preview the prepared statement created here by using, for example: println(result.statements.head) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala index 847fa862405..874fcbd0c0c 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala @@ -43,8 +43,23 @@ trait WorkflowStoreEntryComponent { def hogGroup = column[Option[String]]("HOG_GROUP", O.Length(100)) - override def * = (workflowExecutionUuid, workflowDefinition, workflowUrl, workflowRoot, workflowType, workflowTypeVersion, workflowInputs, workflowOptions, workflowState, - submissionTime, importsZip, customLabels, cromwellId, heartbeatTimestamp, hogGroup, workflowStoreEntryId.?) <> ((WorkflowStoreEntry.apply _).tupled, WorkflowStoreEntry.unapply) + override def * = (workflowExecutionUuid, + workflowDefinition, + workflowUrl, + workflowRoot, + workflowType, + workflowTypeVersion, + workflowInputs, + workflowOptions, + workflowState, + submissionTime, + importsZip, + customLabels, + cromwellId, + heartbeatTimestamp, + hogGroup, + workflowStoreEntryId.? + ) <> ((WorkflowStoreEntry.apply _).tupled, WorkflowStoreEntry.unapply) def ucWorkflowStoreEntryWeu = index("UC_WORKFLOW_STORE_ENTRY_WEU", workflowExecutionUuid, unique = true) @@ -58,15 +73,15 @@ trait WorkflowStoreEntryComponent { /** * Useful for finding the workflow store for a given workflow execution UUID */ - val workflowStoreEntriesForWorkflowExecutionUuid = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val workflowStoreEntriesForWorkflowExecutionUuid = Compiled((workflowExecutionUuid: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowExecutionUuid === workflowExecutionUuid } yield workflowStoreEntry ) - val heartbeatForWorkflowStoreEntry = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val heartbeatForWorkflowStoreEntry = Compiled((workflowExecutionUuid: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowExecutionUuid === workflowExecutionUuid } yield workflowStoreEntry.heartbeatTimestamp @@ -77,7 +92,8 @@ trait WorkflowStoreEntryComponent { */ def getHogGroupWithLowestRunningWfs(heartbeatTimestampTimedOut: Timestamp, excludeWorkflowState: String, - excludedGroups: Set[String]): Query[Rep[Option[String]], Option[String], Seq] = { + excludedGroups: Set[String] + ): Query[Rep[Option[String]], Option[String], Seq] = { val startableWorkflows = for { row <- workflowStoreEntries /* @@ -86,7 +102,7 @@ trait WorkflowStoreEntryComponent { 2) Workflows with old heartbeats, presumably abandoned by a defunct Cromwell. 3) Workflows not in "OnHold" state 4) Workflows that don't belong to hog groups in excludedGroups - */ + */ if (row.heartbeatTimestamp.isEmpty || row.heartbeatTimestamp < heartbeatTimestampTimedOut) && (row.workflowState =!= excludeWorkflowState) && !(row.hogGroup inSet excludedGroups) @@ -109,7 +125,7 @@ trait WorkflowStoreEntryComponent { This looks for: 1) Workflows not in "OnHold" state 2) Workflows that don't belong to hog groups in excludedGroups - */ + */ if row.workflowState =!= excludeWorkflowState && !(row.hogGroup inSet excludedGroups) } yield row @@ -129,27 +145,31 @@ trait WorkflowStoreEntryComponent { Seq ] = for { (hog_group, workflows_ct) <- totalWorkflowsByHogGroup - (startable_hog_group, startable_workflows_ct, oldest_submission_time) <- numOfStartableWfsByHogGroup if hog_group === startable_hog_group + (startable_hog_group, startable_workflows_ct, oldest_submission_time) <- numOfStartableWfsByHogGroup + if hog_group === startable_hog_group } yield (hog_group, workflows_ct - startable_workflows_ct, oldest_submission_time) // sort the above calculated result set first by the count of actively running workflows, then by hog group with // oldest submission timestamp and then sort it alphabetically by hog group name. Then take the first row of // the result and return the hog group name. - wfsRunningPerHogGroup.sortBy { - case (hogGroupName, running_wf_ct, oldest_submission_time) => (running_wf_ct.asc, oldest_submission_time, hogGroupName) - }.take(1).map(_._1) + wfsRunningPerHogGroup + .sortBy { case (hogGroupName, running_wf_ct, oldest_submission_time) => + (running_wf_ct.asc, oldest_submission_time, hogGroupName) + } + .take(1) + .map(_._1) } /** * Returns up to "limit" startable workflows, sorted by submission time, that belong to * given hog group and are not in "OnHold" status. */ - val fetchStartableWfsForHogGroup = Compiled( + val fetchStartableWfsForHogGroup = Compiled { (limit: ConstColumn[Long], heartbeatTimestampTimedOut: ConstColumn[Timestamp], excludeWorkflowState: Rep[String], - hogGroup: Rep[Option[String]]) => { - + hogGroup: Rep[Option[String]] + ) => val workflowsToStart = for { row <- workflowStoreEntries /* @@ -158,7 +178,7 @@ trait WorkflowStoreEntryComponent { 2) Workflows with old heartbeats, presumably abandoned by a defunct Cromwell. 3) Workflows not in "OnHold" state 4) Workflows that belong to included hog group - */ + */ if (row.heartbeatTimestamp.isEmpty || row.heartbeatTimestamp < heartbeatTimestampTimedOut) && (row.workflowState =!= excludeWorkflowState) && (row.hogGroup === hogGroup) @@ -169,8 +189,7 @@ trait WorkflowStoreEntryComponent { do an update subsequent to this select in the same transaction that we know will impact those readers. */ workflowsToStart.forUpdate.sortBy(_.submissionTime.asc).take(limit) - } - ) + } /** * Useful for counting workflows in a given state. @@ -184,8 +203,8 @@ trait WorkflowStoreEntryComponent { /** * Useful for updating the relevant fields of a workflow store entry when a workflow is picked up for processing. */ - val workflowStoreFieldsForPickup = Compiled( - (workflowExecutionUuid: Rep[String]) => for { + val workflowStoreFieldsForPickup = Compiled((workflowExecutionUuid: Rep[String]) => + for { row <- workflowStoreEntries if row.workflowExecutionUuid === workflowExecutionUuid } yield (row.workflowState, row.cromwellId, row.heartbeatTimestamp) @@ -194,8 +213,8 @@ trait WorkflowStoreEntryComponent { /** * Useful for clearing out cromwellId and heartbeatTimestamp on an orderly Cromwell shutdown. */ - val releaseWorkflowStoreEntries = Compiled( - (cromwellId: Rep[String]) => for { + val releaseWorkflowStoreEntries = Compiled((cromwellId: Rep[String]) => + for { row <- workflowStoreEntries if row.cromwellId === cromwellId } yield (row.cromwellId, row.heartbeatTimestamp) @@ -204,8 +223,8 @@ trait WorkflowStoreEntryComponent { /** * Useful for updating state for all entries matching a given state */ - val workflowStateForWorkflowState = Compiled( - (workflowState: Rep[String]) => for { + val workflowStateForWorkflowState = Compiled((workflowState: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowState === workflowState } yield workflowStoreEntry.workflowState @@ -214,8 +233,8 @@ trait WorkflowStoreEntryComponent { /** * Useful for updating a given workflow to a new state */ - val workflowStateForWorkflowExecutionUUid = Compiled( - (workflowId: Rep[String]) => for { + val workflowStateForWorkflowExecutionUUid = Compiled((workflowId: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowExecutionUuid === workflowId } yield workflowStoreEntry.workflowState @@ -224,53 +243,48 @@ trait WorkflowStoreEntryComponent { /** * Useful for updating a given workflow to a 'Submitted' state when it's currently 'On Hold' */ - val workflowStateForWorkflowExecutionUUidAndWorkflowState = Compiled( - (workflowId: Rep[String], workflowState: Rep[String]) => { + val workflowStateForWorkflowExecutionUUidAndWorkflowState = Compiled { + (workflowId: Rep[String], workflowState: Rep[String]) => for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowExecutionUuid === workflowId if workflowStoreEntry.workflowState === workflowState } yield workflowStoreEntry.workflowState - } - ) + } /** * Useful for deleting a given workflow to a 'Submitted' state when it's currently 'On Hold' or 'Submitted' */ - val workflowStoreEntryForWorkflowExecutionUUidAndWorkflowStates = Compiled( - (workflowId: Rep[String], - workflowStateOr1: Rep[String], - workflowStateOr2: Rep[String] - ) => { + val workflowStoreEntryForWorkflowExecutionUUidAndWorkflowStates = Compiled { + (workflowId: Rep[String], workflowStateOr1: Rep[String], workflowStateOr2: Rep[String]) => for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowExecutionUuid === workflowId if workflowStoreEntry.workflowState === workflowStateOr1 || workflowStoreEntry.workflowState === workflowStateOr2 } yield workflowStoreEntry - } - ) + } // Find workflows running on a given Cromwell instance with abort requested: - val findWorkflowsWithAbortRequested = Compiled( - (cromwellId: Rep[String]) => for { + val findWorkflowsWithAbortRequested = Compiled((cromwellId: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowState === "Aborting" && workflowStoreEntry.cromwellId === cromwellId } yield workflowStoreEntry.workflowExecutionUuid ) // Find workflows running on a given Cromwell instance: - val findWorkflows = Compiled( - (cromwellId: Rep[String]) => for { + val findWorkflows = Compiled((cromwellId: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.cromwellId === cromwellId } yield workflowStoreEntry.workflowExecutionUuid ) - val checkExists = Compiled( - (workflowId: Rep[String]) => (for { + val checkExists = Compiled((workflowId: Rep[String]) => + for { workflowStoreEntry <- workflowStoreEntries if workflowStoreEntry.workflowExecutionUuid === workflowId - } yield 1) + } yield 1 ) } diff --git a/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala index 25380dc41b5..ef51b15b2ff 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala @@ -8,23 +8,27 @@ import scala.concurrent.{ExecutionContext, Future} trait CallCachingSqlDatabase { def addCallCaching(joins: Seq[CallCachingJoin], batchSize: Int)(implicit ec: ExecutionContext): Future[Unit] - def hasMatchingCallCachingEntriesForBaseAggregation(baseAggregationHash: String, callCachePathPrefixes: Option[List[String]]) - (implicit ec: ExecutionContext): Future[Boolean] + def hasMatchingCallCachingEntriesForBaseAggregation(baseAggregationHash: String, + callCachePathPrefixes: Option[List[String]] + )(implicit ec: ExecutionContext): Future[Boolean] - def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Long]) - (implicit ec: ExecutionContext): Future[Option[Long]] + def findCacheHitForAggregation(baseAggregationHash: String, + inputFilesAggregationHash: Option[String], + callCachePathPrefixes: Option[List[String]], + excludedIds: Set[Long] + )(implicit ec: ExecutionContext): Future[Option[Long]] - def queryResultsForCacheId(callCachingEntryId: Long) - (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] - - def callCacheJoinForCall(workflowExecutionUuid: String, callFqn: String, index: Int) - (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] + def queryResultsForCacheId(callCachingEntryId: Long)(implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] - def invalidateCall(callCachingEntryId: Long) - (implicit ec: ExecutionContext): Future[Option[CallCachingEntry]] + def callCacheJoinForCall(workflowExecutionUuid: String, callFqn: String, index: Int)(implicit + ec: ExecutionContext + ): Future[Option[CallCachingJoin]] - def invalidateCallCacheEntryIdsForWorkflowId(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Unit] + def invalidateCall(callCachingEntryId: Long)(implicit ec: ExecutionContext): Future[Option[CallCachingEntry]] + + def invalidateCallCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Future[Unit] def callCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[Long]] } diff --git a/database/sql/src/main/scala/cromwell/database/sql/DockerHashStoreSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/DockerHashStoreSqlDatabase.scala index 8a719185c98..11c09154364 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/DockerHashStoreSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/DockerHashStoreSqlDatabase.scala @@ -11,15 +11,15 @@ trait DockerHashStoreSqlDatabase { * Adds a docker hash entry to the store. * */ - def addDockerHashStoreEntry(dockerHashStoreEntry: DockerHashStoreEntry) - (implicit ec: ExecutionContext): Future[Unit] + def addDockerHashStoreEntry(dockerHashStoreEntry: DockerHashStoreEntry)(implicit ec: ExecutionContext): Future[Unit] /** * Retrieves docker hash entries for a workflow. * */ - def queryDockerHashStoreEntries(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Seq[DockerHashStoreEntry]] + def queryDockerHashStoreEntries(workflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Future[Seq[DockerHashStoreEntry]] /** * Deletes docker hash entries related to a workflow, returning the number of rows affected. diff --git a/database/sql/src/main/scala/cromwell/database/sql/EngineSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/EngineSqlDatabase.scala index 1062e18ed25..3e5e16f881c 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/EngineSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/EngineSqlDatabase.scala @@ -1,9 +1,10 @@ package cromwell.database.sql -trait EngineSqlDatabase extends SqlDatabase - with JobKeyValueSqlDatabase - with CallCachingSqlDatabase - with JobStoreSqlDatabase - with WorkflowStoreSqlDatabase - with SubWorkflowStoreSqlDatabase - with DockerHashStoreSqlDatabase +trait EngineSqlDatabase + extends SqlDatabase + with JobKeyValueSqlDatabase + with CallCachingSqlDatabase + with JobStoreSqlDatabase + with WorkflowStoreSqlDatabase + with SubWorkflowStoreSqlDatabase + with DockerHashStoreSqlDatabase diff --git a/database/sql/src/main/scala/cromwell/database/sql/JobKeyValueSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/JobKeyValueSqlDatabase.scala index 372c4907df0..efda8010eb0 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/JobKeyValueSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/JobKeyValueSqlDatabase.scala @@ -9,16 +9,18 @@ trait JobKeyValueSqlDatabase { def existsJobKeyValueEntries()(implicit ec: ExecutionContext): Future[Boolean] - def addJobKeyValueEntry(jobKeyValueEntry: JobKeyValueEntry) - (implicit ec: ExecutionContext): Future[Unit] + def addJobKeyValueEntry(jobKeyValueEntry: JobKeyValueEntry)(implicit ec: ExecutionContext): Future[Unit] - def addJobKeyValueEntries(jobKeyValueEntries: Iterable[JobKeyValueEntry]) - (implicit ec: ExecutionContext): Future[Unit] + def addJobKeyValueEntries(jobKeyValueEntries: Iterable[JobKeyValueEntry])(implicit ec: ExecutionContext): Future[Unit] - def queryJobKeyValueEntries(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Future[Seq[JobKeyValueEntry]] + def queryJobKeyValueEntries(workflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Future[Seq[JobKeyValueEntry]] - def queryStoreValue(workflowExecutionUuid: String, callFqn: String, jobScatterIndex: Int, - jobRetryAttempt: Int, storeKey: String) - (implicit ec: ExecutionContext): Future[Option[String]] + def queryStoreValue(workflowExecutionUuid: String, + callFqn: String, + jobScatterIndex: Int, + jobRetryAttempt: Int, + storeKey: String + )(implicit ec: ExecutionContext): Future[Option[String]] } diff --git a/database/sql/src/main/scala/cromwell/database/sql/JobStoreSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/JobStoreSqlDatabase.scala index a5497ae49c2..769eac0a3b2 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/JobStoreSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/JobStoreSqlDatabase.scala @@ -9,8 +9,9 @@ trait JobStoreSqlDatabase { def addJobStores(jobStoreJoins: Seq[JobStoreJoin], batchSize: Int)(implicit ec: ExecutionContext): Future[Unit] - def queryJobStores(workflowExecutionUuid: String, callFqn: String, jobScatterIndex: Int, jobScatterAttempt: Int) - (implicit ec: ExecutionContext): Future[Option[JobStoreJoin]] + def queryJobStores(workflowExecutionUuid: String, callFqn: String, jobScatterIndex: Int, jobScatterAttempt: Int)( + implicit ec: ExecutionContext + ): Future[Option[JobStoreJoin]] def removeJobStores(workflowExecutionUuids: Seq[String])(implicit ec: ExecutionContext): Future[Seq[Int]] } diff --git a/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala index 9139c819999..5746af4f501 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala @@ -34,56 +34,55 @@ trait MetadataSqlDatabase extends SqlDatabase { submissionMetadataKey: String, parentWorkflowIdKey: String, rootWorkflowIdKey: String, - labelMetadataKey: String)(implicit ec: ExecutionContext): Future[Unit] + labelMetadataKey: String + )(implicit ec: ExecutionContext): Future[Unit] def metadataEntryExists(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Boolean] def metadataSummaryEntryExists(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Boolean] - def queryMetadataEntries(workflowExecutionUuid: String, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] + def queryMetadataEntries(workflowExecutionUuid: String, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Seq[MetadataEntry]] def streamMetadataEntries(workflowExecutionUuid: String): DatabasePublisher[MetadataEntry] - def countMetadataEntries(workflowExecutionUuid: String, - expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] + def countMetadataEntries(workflowExecutionUuid: String, expandSubWorkflows: Boolean, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Int] - def queryMetadataEntries(workflowExecutionUuid: String, - metadataKey: String, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] + def queryMetadataEntries(workflowExecutionUuid: String, metadataKey: String, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Seq[MetadataEntry]] def countMetadataEntries(workflowExecutionUuid: String, metadataKey: String, expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] def queryMetadataEntries(workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] def countMetadataEntries(workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] def queryMetadataEntries(workflowUuid: String, metadataKey: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] def countMetadataEntries(workflowUuid: String, metadataKey: String, @@ -91,23 +90,23 @@ trait MetadataSqlDatabase extends SqlDatabase { jobIndex: Option[Int], jobAttempt: Option[Int], expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] def queryMetadataEntryWithKeyConstraints(workflowExecutionUuid: String, metadataKeysToFilterFor: List[String], metadataKeysToFilterAgainst: List[String], metadataJobQueryValue: MetadataJobQueryValue, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Seq[MetadataEntry]] def countMetadataEntryWithKeyConstraints(workflowExecutionUuid: String, metadataKeysToFilterFor: List[String], metadataKeysToFilterAgainst: List[String], metadataJobQueryValue: MetadataJobQueryValue, expandSubWorkflows: Boolean, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Int] + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] /** * Retrieves next summarizable block of metadata satisfying the specified criteria. @@ -115,12 +114,11 @@ trait MetadataSqlDatabase extends SqlDatabase { * @param buildUpdatedSummary Takes in the optional existing summary and the metadata, returns the new summary. * @return A `Future` with the number of rows summarized by the invocation, and the number of rows still to summarize. */ - def summarizeIncreasing(labelMetadataKey: String, - limit: Int, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) - => WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): Future[Long] + def summarizeIncreasing( + labelMetadataKey: String, + limit: Int, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): Future[Long] /** * Retrieves a window of summarizable metadata satisfying the specified criteria. @@ -128,14 +126,13 @@ trait MetadataSqlDatabase extends SqlDatabase { * @param buildUpdatedSummary Takes in the optional existing summary and the metadata, returns the new summary. * @return A `Future` with the number of rows summarized by this invocation, and the number of rows still to summarize. */ - def summarizeDecreasing(summaryNameDecreasing: String, - summaryNameIncreasing: String, - labelMetadataKey: String, - limit: Int, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) - => WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): Future[(Long, Long)] + def summarizeDecreasing( + summaryNameDecreasing: String, + summaryNameIncreasing: String, + labelMetadataKey: String, + limit: Int, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): Future[(Long, Long)] def updateMetadataArchiveStatus(workflowExecutionUuid: String, newArchiveStatus: Option[String]): Future[Int] @@ -143,16 +140,18 @@ trait MetadataSqlDatabase extends SqlDatabase { def getWorkflowLabels(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Map[String, String]] - def getRootAndSubworkflowLabels(rootWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Map[String, Map[String, String]]] + def getRootAndSubworkflowLabels(rootWorkflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Future[Map[String, Map[String, String]]] def queryWorkflowSummaries(parentIdWorkflowMetadataKey: String, workflowStatuses: Set[String], workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestamp: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], @@ -160,43 +159,57 @@ trait MetadataSqlDatabase extends SqlDatabase { includeSubworkflows: Boolean, page: Option[Int], pageSize: Option[Int], - newestFirst: Boolean) - (implicit ec: ExecutionContext): Future[Iterable[WorkflowMetadataSummaryEntry]] + newestFirst: Boolean + )(implicit ec: ExecutionContext): Future[Iterable[WorkflowMetadataSummaryEntry]] def countWorkflowSummaries(parentIdWorkflowMetadataKey: String, - workflowStatuses: Set[String], workflowNames: Set[String], + workflowStatuses: Set[String], + workflowNames: Set[String], workflowExecutionUuids: Set[String], - labelAndKeyLabelValues: Set[(String,String)], - labelOrKeyLabelValues: Set[(String,String)], - excludeLabelAndValues: Set[(String,String)], - excludeLabelOrValues: Set[(String,String)], + labelAndKeyLabelValues: Set[(String, String)], + labelOrKeyLabelValues: Set[(String, String)], + excludeLabelAndValues: Set[(String, String)], + excludeLabelOrValues: Set[(String, String)], submissionTimestamp: Option[Timestamp], startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], metadataArchiveStatus: Set[Option[String]], - includeSubworkflows: Boolean) - (implicit ec: ExecutionContext): Future[Int] + includeSubworkflows: Boolean + )(implicit ec: ExecutionContext): Future[Int] - def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(rootWorkflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int] + def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(rootWorkflowId: String, newArchiveStatus: Option[String])( + implicit ec: ExecutionContext + ): Future[Int] def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] - def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSizeOpt: Long)(implicit ec: ExecutionContext): Future[Seq[String]] + def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], + thresholdTimestamp: Timestamp, + batchSizeOpt: Long + )(implicit ec: ExecutionContext): Future[Seq[String]] def getSummaryQueueSize()(implicit ec: ExecutionContext): Future[Int] - def getMetadataArchiveStatusAndEndTime(workflowId: String)(implicit ec: ExecutionContext): Future[(Option[String], Option[Timestamp])] + def getMetadataArchiveStatusAndEndTime(workflowId: String)(implicit + ec: ExecutionContext + ): Future[(Option[String], Option[Timestamp])] def queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], workflowEndTimestampThreshold: Timestamp, - batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] def countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], - workflowEndTimestampThreshold: Timestamp)(implicit ec: ExecutionContext): Future[Int] + workflowEndTimestampThreshold: Timestamp + )(implicit ec: ExecutionContext): Future[Int] - def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: Timestamp)(implicit ec: ExecutionContext): Future[Int] + def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: Timestamp)(implicit + ec: ExecutionContext + ): Future[Int] def getMetadataTableSizeInformation()(implicit ec: ExecutionContext): Future[Option[InformationSchemaEntry]] - def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: String)(implicit ec: ExecutionContext): Future[Vector[MetadataEntry]] + def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: String)(implicit + ec: ExecutionContext + ): Future[Vector[MetadataEntry]] } diff --git a/database/sql/src/main/scala/cromwell/database/sql/SqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/SqlDatabase.scala index 842dcd775d4..64a4f2dd2a3 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/SqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/SqlDatabase.scala @@ -16,6 +16,7 @@ trait SqlDatabase extends AutoCloseable { } object SqlDatabase { + /** * Modifies config.getString("url") to return a unique schema, if the original url contains the text * "\${uniqueSchema}". diff --git a/database/sql/src/main/scala/cromwell/database/sql/SubWorkflowStoreSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/SubWorkflowStoreSqlDatabase.scala index 10707dc9031..e6430730497 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/SubWorkflowStoreSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/SubWorkflowStoreSqlDatabase.scala @@ -12,10 +12,12 @@ trait SubWorkflowStoreSqlDatabase { callFullyQualifiedName: String, jobIndex: Int, jobAttempt: Int, - subWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Unit] + subWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Unit] - def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, jobAttempt: Int) - (implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] + def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, jobAttempt: Int)( + implicit ec: ExecutionContext + ): Future[Option[SubWorkflowStoreEntry]] def removeSubWorkflowStoreEntries(parentWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Int] } diff --git a/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala index 82dc51c875b..66dc97be3da 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/WorkflowStoreSqlDatabase.scala @@ -29,8 +29,7 @@ ____ __ ____ ______ .______ __ ___ _______ __ ______ /** * Set all running workflows to aborting state. */ - def setStateToState(fromWorkflowState: String, toWorkflowState: String) - (implicit ec: ExecutionContext): Future[Unit] + def setStateToState(fromWorkflowState: String, toWorkflowState: String)(implicit ec: ExecutionContext): Future[Unit] /** * Set the workflow Id from one state to another. @@ -45,14 +44,15 @@ ____ __ ____ ______ .______ __ ___ _______ __ ______ def deleteOrUpdateWorkflowToState(workflowExecutionUuid: String, workflowStateToDelete1: String, workflowStateToDelete2: String, - workflowStateForUpdate: String) - (implicit ec: ExecutionContext): Future[Option[Boolean]] + workflowStateForUpdate: String + )(implicit ec: ExecutionContext): Future[Option[Boolean]] /** * Adds the requested WorkflowSourceFiles to the store. */ - def addWorkflowStoreEntries(workflowStoreEntries: Iterable[WorkflowStoreEntry]) - (implicit ec: ExecutionContext): Future[Unit] + def addWorkflowStoreEntries(workflowStoreEntries: Iterable[WorkflowStoreEntry])(implicit + ec: ExecutionContext + ): Future[Unit] /** * Retrieves a limited number of workflows which have not already been pulled into the engine and updates their @@ -75,12 +75,12 @@ ____ __ ____ ______ .______ __ ___ _______ __ ______ workflowStateFrom: String, workflowStateTo: String, workflowStateExcluded: String, - excludedGroups: Set[String]) - (implicit ec: ExecutionContext): Future[Seq[WorkflowStoreEntry]] + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[Seq[WorkflowStoreEntry]] - def writeWorkflowHeartbeats(workflowExecutionUuids: Seq[String], - heartbeatTimestamp: Timestamp) - (implicit ec: ExecutionContext): Future[Int] + def writeWorkflowHeartbeats(workflowExecutionUuids: Seq[String], heartbeatTimestamp: Timestamp)(implicit + ec: ExecutionContext + ): Future[Int] /** * Clears out cromwellId and heartbeatTimestamp for all workflow store entries currently assigned @@ -98,8 +98,9 @@ ____ __ ____ ______ .______ __ ___ _______ __ ______ /** * Returns the number of rows updated from one state to another. */ - def updateWorkflowState(workflowExecutionUuid: String, fromWorkflowState: String, toWorkflowState: String) - (implicit ec: ExecutionContext): Future[Int] + def updateWorkflowState(workflowExecutionUuid: String, fromWorkflowState: String, toWorkflowState: String)(implicit + ec: ExecutionContext + ): Future[Int] def findWorkflowsWithAbortRequested(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[String]] diff --git a/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingDiffJoin.scala b/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingDiffJoin.scala index 2b56e9f88d5..d45e7a72cf3 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingDiffJoin.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingDiffJoin.scala @@ -2,4 +2,7 @@ package cromwell.database.sql.joins import cromwell.database.sql.tables.CallCachingEntry -case class CallCachingDiffJoin(cacheEntryA: CallCachingEntry, cacheEntryB: CallCachingEntry, diff: Seq[(Option[(String, String)], Option[(String, String)])]) +case class CallCachingDiffJoin(cacheEntryA: CallCachingEntry, + cacheEntryB: CallCachingEntry, + diff: Seq[(Option[(String, String)], Option[(String, String)])] +) diff --git a/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingJoin.scala b/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingJoin.scala index 07313980bc1..a9d32ee9115 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingJoin.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/joins/CallCachingJoin.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.joins import cromwell.database.sql.tables._ -case class CallCachingJoin -( +case class CallCachingJoin( callCachingEntry: CallCachingEntry, callCachingHashEntries: Seq[CallCachingHashEntry], callCachingAggregationEntry: Option[CallCachingAggregationEntry], diff --git a/database/sql/src/main/scala/cromwell/database/sql/joins/JobStoreJoin.scala b/database/sql/src/main/scala/cromwell/database/sql/joins/JobStoreJoin.scala index 3a2c19a0b81..d47ffd8f615 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/joins/JobStoreJoin.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/joins/JobStoreJoin.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.joins import cromwell.database.sql.tables.{JobStoreEntry, JobStoreSimpletonEntry} -case class JobStoreJoin -( +case class JobStoreJoin( jobStoreEntry: JobStoreEntry, jobStoreSimpletonEntries: Seq[JobStoreSimpletonEntry] ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala index 088c07bf3c7..c1f87bd1b02 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class CallCachingAggregationEntry -( +case class CallCachingAggregationEntry( baseAggregation: String, inputFilesAggregation: Option[String], callCachingEntryId: Option[Long] = None, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala index 36afadd4c8e..3462ad527cc 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.tables import javax.sql.rowset.serial.SerialClob -case class CallCachingDetritusEntry -( +case class CallCachingDetritusEntry( detritusKey: String, detritusValue: Option[SerialClob], callCachingEntryId: Option[Long] = None, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala index 24263b0cb80..ba748261e82 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class CallCachingEntry -( +case class CallCachingEntry( workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Int, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala index 5f2aff9fb14..580c5e5bb3f 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class CallCachingHashEntry -( +case class CallCachingHashEntry( hashKey: String, hashValue: String, callCachingEntryId: Option[Long] = None, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala index c4e6628ee81..36a39f7fd5f 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.tables import javax.sql.rowset.serial.SerialClob -case class CallCachingSimpletonEntry -( +case class CallCachingSimpletonEntry( simpletonKey: String, simpletonValue: Option[SerialClob], wdlType: String, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CustomLabelEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CustomLabelEntry.scala index 030a5e0bb25..19c959be929 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CustomLabelEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CustomLabelEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class CustomLabelEntry -( +case class CustomLabelEntry( customLabelKey: String, customLabelValue: String, workflowExecutionUuid: String, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/DockerHashStoreEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/DockerHashStoreEntry.scala index 542558324f5..05fc6c4e7db 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/DockerHashStoreEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/DockerHashStoreEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class DockerHashStoreEntry -( +case class DockerHashStoreEntry( workflowExecutionUuid: String, dockerTag: String, dockerHash: String, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/InformationSchemaEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/InformationSchemaEntry.scala index 0cf05e0b5b5..b29bba61205 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/InformationSchemaEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/InformationSchemaEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -final case class InformationSchemaEntry -( +final case class InformationSchemaEntry( dataLength: Long, indexLength: Long, dataFree: Long diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/JobKeyValueEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/JobKeyValueEntry.scala index 5f91bee2091..8b768ff66de 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/JobKeyValueEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/JobKeyValueEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class JobKeyValueEntry -( +case class JobKeyValueEntry( workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Int, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala index 6f59a0c738c..a7be44561c4 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.tables import javax.sql.rowset.serial.SerialClob -case class JobStoreEntry -( +case class JobStoreEntry( workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Int, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala index 4a6c19864dd..b0c3463625f 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.tables import javax.sql.rowset.serial.SerialClob -case class JobStoreSimpletonEntry -( +case class JobStoreSimpletonEntry( simpletonKey: String, simpletonValue: Option[SerialClob], wdlType: String, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala index c273c3e47e3..97c4cc9acf2 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala @@ -4,8 +4,7 @@ import java.sql.Timestamp import javax.sql.rowset.serial.SerialClob -case class MetadataEntry -( +case class MetadataEntry( workflowExecutionUuid: String, callFullyQualifiedName: Option[String], jobIndex: Option[Int], diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/SubWorkflowStoreEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/SubWorkflowStoreEntry.scala index 4cf89381ad0..6d081845a84 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/SubWorkflowStoreEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/SubWorkflowStoreEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class SubWorkflowStoreEntry -( +case class SubWorkflowStoreEntry( rootWorkflowId: Option[Long], parentWorkflowExecutionUuid: String, callFullyQualifiedName: String, diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/SummaryStatusEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/SummaryStatusEntry.scala index 6315295d478..8dfdeea1626 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/SummaryStatusEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/SummaryStatusEntry.scala @@ -1,7 +1,6 @@ package cromwell.database.sql.tables -case class SummaryStatusEntry -( +case class SummaryStatusEntry( summaryName: String, summaryPosition: Long, summaryStatusEntryId: Option[Int] = None diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowMetadataSummaryEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowMetadataSummaryEntry.scala index 19e553630e6..bd98d398d75 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowMetadataSummaryEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowMetadataSummaryEntry.scala @@ -2,8 +2,7 @@ package cromwell.database.sql.tables import java.sql.Timestamp -case class WorkflowMetadataSummaryEntry -( +case class WorkflowMetadataSummaryEntry( workflowExecutionUuid: String, workflowName: Option[String], workflowStatus: Option[String], diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala index efb594444d4..6017bb5d539 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala @@ -4,8 +4,7 @@ import java.sql.Timestamp import javax.sql.rowset.serial.{SerialBlob, SerialClob} -case class WorkflowStoreEntry -( +case class WorkflowStoreEntry( workflowExecutionUuid: String, workflowDefinition: Option[SerialClob], workflowUrl: Option[String], diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerClientHelper.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerClientHelper.scala index 706cbfef3d8..e034319033a 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerClientHelper.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerClientHelper.scala @@ -7,10 +7,10 @@ import cromwell.docker.DockerInfoActor.DockerInfoResponse import scala.concurrent.duration.FiniteDuration trait DockerClientHelper extends RobustClientHelper { this: Actor with ActorLogging => - + protected def dockerHashingActor: ActorRef - - private [docker] def dockerResponseReceive: Receive = { + + private[docker] def dockerResponseReceive: Receive = { case dockerResponse: DockerInfoResponse if hasTimeout(dockerResponse.request) => cancelTimeout(dockerResponse.request) receive.apply(dockerResponse) @@ -19,9 +19,10 @@ trait DockerClientHelper extends RobustClientHelper { this: Actor with ActorLogg receive.apply(context -> dockerResponse) } - def sendDockerCommand(dockerHashRequest: DockerInfoRequest, timeout: FiniteDuration = RobustClientHelper.DefaultRequestLostTimeout) = { + def sendDockerCommand(dockerHashRequest: DockerInfoRequest, + timeout: FiniteDuration = RobustClientHelper.DefaultRequestLostTimeout + ) = robustSend(dockerHashRequest, dockerHashingActor, timeout) - } def dockerReceive = robustReceive orElse dockerResponseReceive } diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerHashResult.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerHashResult.scala index e3120516a9a..771db6d6576 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerHashResult.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerHashResult.scala @@ -5,13 +5,13 @@ import scala.util.{Failure, Success, Try} object DockerHashResult { // See https://docs.docker.com/registry/spec/api/#/content-digests val DigestRegex = """([a-zA-Z0-9_+.-]+):([a-zA-Z0-9]+)""".r - - def fromString(str: String): Try[DockerHashResult] = { + + def fromString(str: String): Try[DockerHashResult] = str match { case DigestRegex(alg, hash) => Success(DockerHashResult(alg, hash)) - case _ => Failure(new IllegalArgumentException(s"Hash value $str does not have the expected 'algorithm:hash' syntax")) + case _ => + Failure(new IllegalArgumentException(s"Hash value $str does not have the expected 'algorithm:hash' syntax")) } - } } case class DockerHashResult(hashAlgorithm: String, hashValue: String) { diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala index a798f351f17..f34702509db 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala @@ -16,25 +16,33 @@ sealed trait DockerImageIdentifier { lazy val name = repository map { r => s"$r/$image" } getOrElse image // The name of the image with a repository prefix if a repository was specified, or with a default repository prefix of // "library" if no repository was specified. - lazy val nameWithDefaultRepository = { + lazy val nameWithDefaultRepository = // In ACR, the repository is part of the registry domain instead of the path // e.g. `terrabatchdev.azurecr.io` if (host.exists(_.contains(AzureContainerRegistry.domain))) image else repository.getOrElse("library") + s"/$image" - } lazy val hostAsString = host map { h => s"$h/" } getOrElse "" // The full name of this image, including a repository prefix only if a repository was explicitly specified. lazy val fullName = s"$hostAsString$name:$reference" } -case class DockerImageIdentifierWithoutHash(host: Option[String], repository: Option[String], image: String, reference: String) extends DockerImageIdentifier { +case class DockerImageIdentifierWithoutHash(host: Option[String], + repository: Option[String], + image: String, + reference: String +) extends DockerImageIdentifier { def withHash(hash: DockerHashResult) = DockerImageIdentifierWithHash(host, repository, image, reference, hash) override def swapReference(newReference: String) = this.copy(reference = newReference) } -case class DockerImageIdentifierWithHash(host: Option[String], repository: Option[String], image: String, reference: String, hash: DockerHashResult) extends DockerImageIdentifier { +case class DockerImageIdentifierWithHash(host: Option[String], + repository: Option[String], + image: String, + reference: String, + hash: DockerHashResult +) extends DockerImageIdentifier { override lazy val fullName: String = s"$hostAsString$name@${hash.algorithmAndHash}" override def swapReference(newReference: String) = this.copy(reference = newReference) } @@ -68,12 +76,11 @@ object DockerImageIdentifier { )? """.trim.r - def fromString(dockerString: String): Try[DockerImageIdentifier] = { + def fromString(dockerString: String): Try[DockerImageIdentifier] = dockerString.trim match { case DockerStringRegex(name, tag, hash) => buildId(name, Option(tag), Option(hash)) case _ => Failure(new IllegalArgumentException(s"Docker image $dockerString has an invalid syntax.")) } - } private def isRegistryHostName(str: String) = str.contains('.') || str.startsWith("localhost") @@ -97,10 +104,17 @@ object DockerImageIdentifier { } (tag, hash) match { - case (None, None) => Success(DockerImageIdentifierWithoutHash(dockerHost, dockerRepo, dockerImage, DefaultDockerTag)) + case (None, None) => + Success(DockerImageIdentifierWithoutHash(dockerHost, dockerRepo, dockerImage, DefaultDockerTag)) case (Some(t), None) => Success(DockerImageIdentifierWithoutHash(dockerHost, dockerRepo, dockerImage, t)) - case (None, Some(h)) => DockerHashResult.fromString(h) map { hash => DockerImageIdentifierWithHash(dockerHost, dockerRepo, dockerImage, h, hash) } - case (Some(t), Some(h)) => DockerHashResult.fromString(h) map { hash => DockerImageIdentifierWithHash(dockerHost, dockerRepo, dockerImage, s"$t@$h", hash) } + case (None, Some(h)) => + DockerHashResult.fromString(h) map { hash => + DockerImageIdentifierWithHash(dockerHost, dockerRepo, dockerImage, h, hash) + } + case (Some(t), Some(h)) => + DockerHashResult.fromString(h) map { hash => + DockerImageIdentifierWithHash(dockerHost, dockerRepo, dockerImage, s"$t@$h", hash) + } } } } diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerInfoActor.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerInfoActor.scala index 3ebb8d98f39..3b3c0e5d7c5 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerInfoActor.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerInfoActor.scala @@ -30,16 +30,21 @@ import scala.concurrent.ExecutionContextExecutor import scala.concurrent.duration._ final class DockerInfoActor( - dockerRegistryFlows: Seq[DockerRegistry], - queueBufferSize: Int, - cacheEntryTTL: FiniteDuration, - cacheSize: Long - ) extends Actor with ActorLogging { + dockerRegistryFlows: Seq[DockerRegistry], + queueBufferSize: Int, + cacheEntryTTL: FiniteDuration, + cacheSize: Long +) extends Actor + with ActorLogging { implicit val system: ActorSystem = context.system implicit val ec: ExecutionContextExecutor = context.dispatcher implicit val cs: ContextShift[IO] = IO.contextShift(ec) - val retryPolicy: RetryPolicy[IO] = RetryPolicy[IO](RetryPolicy.exponentialBackoff(DockerConfiguration.instance.maxTimeBetweenRetries, DockerConfiguration.instance.maxRetries)) + val retryPolicy: RetryPolicy[IO] = RetryPolicy[IO]( + RetryPolicy.exponentialBackoff(DockerConfiguration.instance.maxTimeBetweenRetries, + DockerConfiguration.instance.maxRetries + ) + ) /* Use the guava CacheBuilder class that implements a thread safe map with built in cache features. * https://google.github.io/guava/releases/20.0/api/docs/com/google/common/cache/CacheBuilder.html @@ -47,29 +52,29 @@ final class DockerInfoActor( * - Added to the cache by the streamSink, running on a thread from the stream thread pool * - Accessed by this actor on its receive method thread, to check if an element is in the cache and use it * - Automatically removed from the cache by the cache itself - * + * * + Concurrency level is the number of expected threads to modify the cache. * Set to "2" because the stream will add elements, and the cache itself remove them. - * This value has not a critical impact: + * This value has not a critical impact: * https://google.github.io/guava/releases/20.0/api/docs/com/google/common/cache/CacheBuilder.html#concurrencyLevel-int- - * - * + expireAfterWrite sets the time after which cache entries must expire. + * + * + expireAfterWrite sets the time after which cache entries must expire. * We use expireAfterWrite (as opposed to expireAfterAccess because we want to the entry to expire - * even if it's accessed. The goal here is to force the actor to ask again for the hash after a certain + * even if it's accessed. The goal here is to force the actor to ask again for the hash after a certain * amount of time to guarantee its relative accuracy. - * - * + maximumSize sets the maximum amount of entries the cache can contain. + * + * + maximumSize sets the maximum amount of entries the cache can contain. * If/when this size is reached, least used entries will be expired */ - private val cache = CacheBuilder.newBuilder() + private val cache = CacheBuilder + .newBuilder() .concurrencyLevel(2) .expireAfterWrite(cacheEntryTTL._1, cacheEntryTTL._2) .maximumSize(cacheSize) .build[DockerImageIdentifier, DockerInformation]() - private def checkCache(dockerHashRequest: DockerInfoRequest) = { + private def checkCache(dockerHashRequest: DockerInfoRequest) = Option(cache.getIfPresent(dockerHashRequest.dockerImageID)) - } override def receive: Receive = receiveBehavior(Map.empty) @@ -83,26 +88,24 @@ final class DockerInfoActor( } case ShutdownCommand => // Shutdown all streams by sending None to the queue - registries - .values.toList + registries.values.toList .parTraverse[IO, Unit](_.enqueue1(None)) .unsafeRunSync() context stop self } - def sendToStream(registries: Map[DockerRegistry, StreamQueue], context: DockerInfoActor.DockerInfoContext): Unit = { + def sendToStream(registries: Map[DockerRegistry, StreamQueue], context: DockerInfoActor.DockerInfoContext): Unit = registries collectFirst { case (registry, queue) if registry.accepts(context.dockerImageID) => queue } match { case Some(queue) => enqueue(context, queue) case None => context.replyTo ! DockerHashUnknownRegistry(context.request) } - } def enqueue(dockerInfoContext: DockerInfoContext, queue: StreamQueue): Unit = { val enqueueIO = queue.offer1(Option(dockerInfoContext)).runAsync { - case Right(true) => IO.unit// Good ! + case Right(true) => IO.unit // Good ! case _ => backpressure(dockerInfoContext) } @@ -148,13 +151,13 @@ final class DockerInfoActor( // If the registry imposes throttling, debounce the stream to ensure the throttling rate is respected val throttledSource = registry.config.throttle.map(_.delay).map(source.metered[IO]).getOrElse(source) - val stream = clientStream.flatMap({ client => + val stream = clientStream.flatMap { client => throttledSource // Run requests in parallel - allow nbThreads max concurrent requests - order doesn't matter - .parEvalMapUnordered(registry.config.nbThreads)({ request => registry.run(request)(client) }) + .parEvalMapUnordered(registry.config.nbThreads)(request => registry.run(request)(client)) // Send to the sink for finalization of the result .through(streamSink) - }) + } // Start the stream now asynchronously. It will keep running until we terminate the queue by sending None to it stream.compile.drain.unsafeRunAsyncAndForget() @@ -166,7 +169,7 @@ final class DockerInfoActor( override def preStart(): Unit = { // Force initialization of the header constants to make sure they're valid locally(DockerRegistryV2Abstract) - + val registries = dockerRegistryFlows.toList .parTraverse(startAndRegisterStream) @@ -194,12 +197,14 @@ object DockerInfoActor { } case class DockerInformation(dockerHash: DockerHashResult, dockerCompressedSize: Option[DockerSize]) - case class DockerInfoSuccessResponse(dockerInformation: DockerInformation, request: DockerInfoRequest) extends DockerInfoResponse + case class DockerInfoSuccessResponse(dockerInformation: DockerInformation, request: DockerInfoRequest) + extends DockerInfoResponse sealed trait DockerHashFailureResponse extends DockerInfoResponse { def reason: String } - case class DockerInfoFailedResponse(failure: Throwable, request: DockerInfoRequest) extends DockerHashFailureResponse { + case class DockerInfoFailedResponse(failure: Throwable, request: DockerInfoRequest) + extends DockerHashFailureResponse { override val reason = s"Failed to get docker hash for ${request.dockerImageID.fullName} ${failure.getMessage}" } case class DockerHashUnknownRegistry(request: DockerInfoRequest) extends DockerHashFailureResponse { @@ -224,9 +229,10 @@ object DockerInfoActor { def props(dockerRegistryFlows: Seq[DockerRegistry], queueBufferSize: Int = 100, cacheEntryTTL: FiniteDuration, - cacheSize: Long): Props = { - Props(new DockerInfoActor(dockerRegistryFlows, queueBufferSize, cacheEntryTTL, cacheSize)).withDispatcher(Dispatcher.IoDispatcher) - } + cacheSize: Long + ): Props = + Props(new DockerInfoActor(dockerRegistryFlows, queueBufferSize, cacheEntryTTL, cacheSize)) + .withDispatcher(Dispatcher.IoDispatcher) def remoteRegistriesFromConfig(config: Config): List[DockerRegistry] = { import cats.syntax.traverse._ @@ -237,8 +243,8 @@ object DockerInfoActor { ("dockerhub", { c: DockerRegistryConfig => new DockerHubRegistry(c) }), ("google", { c: DockerRegistryConfig => new GoogleRegistry(c) }), ("quay", { c: DockerRegistryConfig => new QuayRegistry(c) }) - ).traverse[ErrorOr, DockerRegistry]({ - case (configPath, constructor) => DockerRegistryConfig.fromConfig(config.as[Config](configPath)).map(constructor) - }).unsafe("Docker registry configuration") + ).traverse[ErrorOr, DockerRegistry] { case (configPath, constructor) => + DockerRegistryConfig.fromConfig(config.as[Config](configPath)).map(constructor) + }.unsafe("Docker registry configuration") } } diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerRegistry.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerRegistry.scala index d35e6e0c91b..fac486d114d 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerRegistry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerRegistry.scala @@ -8,7 +8,9 @@ import org.http4s.client.Client * Interface used by the docker hash actor to build a flow and validate whether or not it can accept an image. */ trait DockerRegistry { - def run(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]): IO[(DockerInfoResponse, DockerInfoContext)] + def run(dockerInfoContext: DockerInfoContext)(implicit + client: Client[IO] + ): IO[(DockerInfoResponse, DockerInfoContext)] def accepts(dockerImageIdentifier: DockerImageIdentifier): Boolean def config: DockerRegistryConfig } diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerRegistryConfig.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerRegistryConfig.scala index a6e15846e5e..327d9e8038c 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerRegistryConfig.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerRegistryConfig.scala @@ -19,9 +19,9 @@ object DockerRegistryConfig { lazy val default = DockerRegistryConfig(None, 5) def fromConfig(config: Config): ErrorOr[DockerRegistryConfig] = { - val throttle = validate { config.getAs[Throttle]("throttle") } - val threads = validate { config.as[Int]("num-threads") } - + val throttle = validate(config.getAs[Throttle]("throttle")) + val threads = validate(config.as[Int]("num-threads")) + (throttle, threads) mapN DockerRegistryConfig.apply } } diff --git a/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliClient.scala b/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliClient.scala index e0e2a383d92..4f2e2b7e891 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliClient.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliClient.scala @@ -11,13 +11,14 @@ import scala.util.Try * https://docs.docker.com/engine/api/v1.27/ */ trait DockerCliClient { + /** * Looks up a docker hash. * * @param dockerCliKey The docker hash to lookup. * @return The hash if found, None if not found, and Failure if an error occurs. */ - def lookupHash(dockerCliKey: DockerCliKey): Try[Option[String]] = { + def lookupHash(dockerCliKey: DockerCliKey): Try[Option[String]] = /* The stdout contains the tab separated repository/tag/digest for __all__ local images. Would be great to just get a single hash using the key... unfortunately @@ -26,16 +27,14 @@ trait DockerCliClient { forRun("docker", "images", "--digests", "--format", """{{printf "%s\t%s\t%s" .Repository .Tag .Digest}}""") { _.flatMap(parseHashLine).find(_.key == dockerCliKey).map(_.digest) } - } /** * Pulls a docker image. * @param dockerCliKey The docker hash to lookup. * @return Failure if an error occurs. */ - def pull(dockerCliKey: DockerCliKey): Try[Unit] = { - forRun("docker", "pull", dockerCliKey.fullName) { const(()) } - } + def pull(dockerCliKey: DockerCliKey): Try[Unit] = + forRun("docker", "pull", dockerCliKey.fullName)(const(())) /** * Tries to run the command, then feeds the stdout to `f`. If the exit code is non-zero, returns a `Failure` with @@ -46,20 +45,18 @@ trait DockerCliClient { * @tparam A Return type. * @return An attempt to run A. */ - private def forRun[A](cmd: String*)(f: Seq[String] => A): Try[A] = { + private def forRun[A](cmd: String*)(f: Seq[String] => A): Try[A] = Try { val dockerCliResult = run(cmd) if (dockerCliResult.exitCode == 0) { f(dockerCliResult.stdout) } else { - throw new RuntimeException( - s"""|Error running: ${cmd.mkString(" ")} - |Exit code: ${dockerCliResult.exitCode} - |${dockerCliResult.stderr.mkString("\n")} - |""".stripMargin) + throw new RuntimeException(s"""|Error running: ${cmd.mkString(" ")} + |Exit code: ${dockerCliResult.exitCode} + |${dockerCliResult.stderr.mkString("\n")} + |""".stripMargin) } } - } /** * Run a command and return the result. Overridable for testing. diff --git a/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala b/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala index f6bf6e96d4c..513d27bc001 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/local/DockerCliFlow.scala @@ -28,20 +28,22 @@ class DockerCliFlow(implicit ec: ExecutionContext) extends DockerRegistry { override def run(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]) = { implicit val timer = IO.timer(ec) - DockerCliFlow.lookupHashOrTimeout(firstLookupTimeout)(dockerInfoContext) - .flatMap({ + DockerCliFlow + .lookupHashOrTimeout(firstLookupTimeout)(dockerInfoContext) + .flatMap { // If the image isn't there, pull it and try again case (_: DockerInfoNotFound, _) => DockerCliFlow.pull(dockerInfoContext) DockerCliFlow.lookupHashOrTimeout(firstLookupTimeout)(dockerInfoContext) case other => IO.pure(other) - }) + } } override def config = DockerRegistryConfig.default } object DockerCliFlow { + /** * Lookup the hash for the image referenced in the context. * @@ -53,10 +55,11 @@ object DockerCliFlow { DockerInfoActor.logger.debug("Looking up hash of {}", dockerCliKey.fullName) val result = DockerCliClient.lookupHash(dockerCliKey) match { case Success(None) => DockerInfoNotFound(context.request) - case Success(Some(hash)) => DockerHashResult.fromString(hash) match { - case Success(r) => DockerInfoSuccessResponse(DockerInformation(r, None), context.request) - case Failure(t) => DockerInfoFailedResponse(t, context.request) - } + case Success(Some(hash)) => + DockerHashResult.fromString(hash) match { + case Success(r) => DockerInfoSuccessResponse(DockerInformation(r, None), context.request) + case Failure(t) => DockerInfoFailedResponse(t, context.request) + } case Failure(throwable) => DockerInfoFailedResponse(throwable, context.request) } // give the compiler a hint on the debug() override we're trying to use. @@ -72,22 +75,22 @@ object DockerCliFlow { * @param context The image to lookup. * @return The docker hash response plus the context of our flow. */ - private def lookupHashOrTimeout(timeout: FiniteDuration) - (context: DockerInfoContext) - (implicit cs: ContextShift[IO], timer: Timer[IO]): IO[(DockerInfoResponse, DockerInfoContext)] = { - IO(lookupHash(context)).timeout(timeout) - .handleErrorWith({ - case _: TimeoutException => IO.pure { - val dockerCliKey = cliKeyFromImageId(context) - val exception = new TimeoutException( - s"""|Timeout while looking up hash of ${dockerCliKey.fullName}. - |Ensure that docker is running correctly. - |""".stripMargin) - DockerInfoFailedResponse(exception, context.request) -> context - } + private def lookupHashOrTimeout(timeout: FiniteDuration)( + context: DockerInfoContext + )(implicit cs: ContextShift[IO], timer: Timer[IO]): IO[(DockerInfoResponse, DockerInfoContext)] = + IO(lookupHash(context)) + .timeout(timeout) + .handleErrorWith { + case _: TimeoutException => + IO.pure { + val dockerCliKey = cliKeyFromImageId(context) + val exception = new TimeoutException(s"""|Timeout while looking up hash of ${dockerCliKey.fullName}. + |Ensure that docker is running correctly. + |""".stripMargin) + DockerInfoFailedResponse(exception, context.request) -> context + } case other => IO.pure(DockerInfoFailedResponse(other, context.request) -> context) - }) - } + } /** * Pull the docker image referenced in context. diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerManifest.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerManifest.scala index a7990d3cb09..8f9a2743d20 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerManifest.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerManifest.scala @@ -3,7 +3,7 @@ package cromwell.docker.registryv2 // From https://docs.docker.com/registry/spec/manifest-v2-2/ sealed trait DockerManifestResponse -case class DockerManifest(layers: Array[DockerLayer]) extends DockerManifestResponse{ +case class DockerManifest(layers: Array[DockerLayer]) extends DockerManifestResponse { lazy val compressedSize: Long = layers.map(_.size).sum } case class DockerLayer(size: Long) diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerRegistryV2Abstract.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerRegistryV2Abstract.scala index bb25cb4bc3d..234b762429d 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerRegistryV2Abstract.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/DockerRegistryV2Abstract.scala @@ -34,16 +34,21 @@ object DockerRegistryV2Abstract { val OCIIndexV1MediaType = "application/vnd.oci.image.index.v1+json" // If one of those fails it means someone changed one of the strings above to an invalid one. - val DockerManifestV2MediaRange = MediaRange.parse(DockerManifestV2MediaType) + val DockerManifestV2MediaRange = MediaRange + .parse(DockerManifestV2MediaType) .unsafe("Cannot parse invalid manifest v2 content type. Please report this error.") - val DockerManifestListV2MediaRange = MediaRange.parse(DockerManifestListV2MediaType) + val DockerManifestListV2MediaRange = MediaRange + .parse(DockerManifestListV2MediaType) .unsafe("Cannot parse invalid manifest list v2 content type. Please report this error.") - val AcceptDockerManifestV2Header = Accept.parse(DockerManifestV2MediaType) + val AcceptDockerManifestV2Header = Accept + .parse(DockerManifestV2MediaType) .unsafe("Cannot parse invalid manifest v2 Accept header. Please report this error.") - val OCIIndexV1MediaRange = MediaRange.parse(OCIIndexV1MediaType) + val OCIIndexV1MediaRange = MediaRange + .parse(OCIIndexV1MediaType) .unsafe("Cannot parse invalid OCI index v1 content type. Please report this error.") - val AcceptOCIIndexV1Header = Accept.parse(OCIIndexV1MediaType) + val AcceptOCIIndexV1Header = Accept + .parse(OCIIndexV1MediaType) .unsafe("Cannot parse invalid OCI index v1 Accept header. Please report this error.") implicit val entityManifestDecoder = jsonEntityDecoder[DockerManifest](DockerManifestV2MediaRange) @@ -55,19 +60,21 @@ object DockerRegistryV2Abstract { * This is necessary because the docker registry API responds with an "application/vnd.docker.distribution.manifest.v2+json" * and not the traditional "application/json". Adapted from CirceInstances.jsonOf */ - private def jsonEntityDecoder[A](mediaRange: MediaRange)(implicit decoder: Decoder[A]): EntityDecoder[IO, A] = EntityDecoder.decodeBy[IO, A](mediaRange) { message => - CirceInstances.builder.build.jsonDecoderByteBuffer[IO] - .decode(message, strict = false) - .flatMap({ json => - decoder.decodeJson(json) - .fold( - failure => - DecodeResult.failureT( - InvalidMessageBodyFailure(s"Could not decode JSON: $json", Some(failure))), - DecodeResult.successT(_) - ) - }) - } + private def jsonEntityDecoder[A](mediaRange: MediaRange)(implicit decoder: Decoder[A]): EntityDecoder[IO, A] = + EntityDecoder.decodeBy[IO, A](mediaRange) { message => + CirceInstances.builder.build + .jsonDecoderByteBuffer[IO] + .decode(message, strict = false) + .flatMap { json => + decoder + .decodeJson(json) + .fold( + failure => + DecodeResult.failureT(InvalidMessageBodyFailure(s"Could not decode JSON: $json", Some(failure))), + DecodeResult.successT(_) + ) + } + } // Placeholder exceptions that can be carried through IO before being converted to a DockerInfoFailedResponse private class Unauthorized(message: String) extends Exception(message) @@ -88,7 +95,9 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi /** * This is the main function. Given a docker context and an http client, retrieve information about the docker image. */ - def run(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]): IO[(DockerInfoResponse, DockerInfoContext)] = { + def run( + dockerInfoContext: DockerInfoContext + )(implicit client: Client[IO]): IO[(DockerInfoResponse, DockerInfoContext)] = { val dockerResponse = for { token <- getToken(dockerInfoContext) dockerSuccessResponse <- getDockerResponse(token, dockerInfoContext) @@ -97,19 +106,20 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi // Always map failures to a DockerHashFailedResponse instead of letting the IO fail, this is important so that the stream // that is calling this function will not fail dockerResponse.attempt - .map({ + .map { case Left(_: Unauthorized) => DockerInfoUnauthorized(dockerInfoContext.request) case Left(_: NotFound) => DockerInfoNotFound(dockerInfoContext.request) case Left(failure) => DockerInfoFailedResponse(failure, dockerInfoContext.request) case Right(value) => value - }) + } .map(_ -> dockerInfoContext) } // Execute a request. No retries because they're expected to already be handled by the client - protected def executeRequest[A](request: IO[Request[IO]], handler: Response[IO] => IO[A])(implicit client: Client[IO]): IO[A] = { + protected def executeRequest[A](request: IO[Request[IO]], handler: Response[IO] => IO[A])(implicit + client: Client[IO] + ): IO[A] = request.flatMap(client.run(_).use[IO, A](handler)) - } /** * Obtain an authorization token for the subsequent manifest request. Return IO.pure(None) if no token is needed @@ -129,11 +139,16 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi * @param client http client * @return docker info response */ - protected def getDockerResponse(token: Option[String], dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]): IO[DockerInfoSuccessResponse] = { + protected def getDockerResponse(token: Option[String], dockerInfoContext: DockerInfoContext)(implicit + client: Client[IO] + ): IO[DockerInfoSuccessResponse] = { val requestDockerManifest = manifestRequest(token, dockerInfoContext.dockerImageID, AcceptDockerManifestV2Header) lazy val requestOCIManifest = manifestRequest(token, dockerInfoContext.dockerImageID, AcceptOCIIndexV1Header) def tryOCIManifest(err: Throwable) = { - logger.info(s"Manifest request failed for docker manifest V2, falling back to OCI manifest. Image: ${dockerInfoContext.dockerImageID}", err) + logger.info( + s"Manifest request failed for docker manifest V2, falling back to OCI manifest. Image: ${dockerInfoContext.dockerImageID}", + err + ) executeRequest(requestOCIManifest, handleManifestResponse(dockerInfoContext, token)) } // Try to execute a request using the Docker Manifest format, and if that fails, try using the newer OCI manifest format @@ -145,7 +160,8 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi * Returns true if this flow is able to process this docker image, * false otherwise */ - def accepts(dockerImageIdentifier: DockerImageIdentifier) = dockerImageIdentifier.host.contains(registryHostName(dockerImageIdentifier)) + def accepts(dockerImageIdentifier: DockerImageIdentifier) = + dockerImageIdentifier.host.contains(registryHostName(dockerImageIdentifier)) /* Methods that must to be implemented by a subclass */ @@ -201,26 +217,30 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi */ private def tokenResponseHandler(response: Response[IO]): IO[String] = response match { case Status.Successful(r) => r.as[DockerAccessToken].map(_.token) - case r => r.as[String] - .flatMap(b => IO.raiseError(new Exception(s"Request failed with status ${r.status.code} and body $b"))) + case r => + r.as[String] + .flatMap(b => IO.raiseError(new Exception(s"Request failed with status ${r.status.code} and body $b"))) } /** * Builds the manifest URI to be queried based on a DockerImageID */ - private def buildManifestUri(dockerImageID: DockerImageIdentifier): Uri = { + private def buildManifestUri(dockerImageID: DockerImageIdentifier): Uri = Uri.apply( scheme = Option(Scheme.https), authority = Option(Authority(host = Uri.RegName(registryHostName(dockerImageID)))), path = s"/v2/${dockerImageID.nameWithDefaultRepository}/manifests/${dockerImageID.reference}" ) - } /** * Request to get the manifest, using the auth token if provided */ - private def manifestRequest(token: Option[String], imageId: DockerImageIdentifier, manifestHeader: Accept): IO[Request[IO]] = { - val authorizationHeader: Option[Authorization] = token.map(t => Authorization(Credentials.Token(AuthScheme.Bearer, t))) + private def manifestRequest(token: Option[String], + imageId: DockerImageIdentifier, + manifestHeader: Accept + ): IO[Request[IO]] = { + val authorizationHeader: Option[Authorization] = + token.map(t => Authorization(Credentials.Token(AuthScheme.Bearer, t))) val request = Method.GET( buildManifestUri(imageId), List( @@ -231,19 +251,26 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi request } - private def handleManifestResponse(dockerInfoContext: DockerInfoContext, token: Option[String])(response: Response[IO])(implicit client: Client[IO]): IO[DockerInfoSuccessResponse] = { + private def handleManifestResponse(dockerInfoContext: DockerInfoContext, token: Option[String])( + response: Response[IO] + )(implicit client: Client[IO]): IO[DockerInfoSuccessResponse] = { // Getting the manifest content is not strictly necessary but just a bonus to get the size. If it fails, log the error and return None - def handleManifestAttempt(attempt: Either[Throwable, Option[DockerManifest]]): Option[DockerManifest] = attempt match { - case Left(failure) => - logger.warn(s"Could not get manifest for ${dockerInfoContext.dockerImageID.fullName}", failure) - None - case Right(manifest) => manifest - } + def handleManifestAttempt(attempt: Either[Throwable, Option[DockerManifest]]): Option[DockerManifest] = + attempt match { + case Left(failure) => + logger.warn(s"Could not get manifest for ${dockerInfoContext.dockerImageID.fullName}", failure) + None + case Right(manifest) => manifest + } for { hashResult <- getDigestFromResponse(response) - maybeManifest <- parseManifest(dockerInfoContext.dockerImageID, token)(response).attempt.map(handleManifestAttempt) - } yield DockerInfoSuccessResponse(DockerInformation(hashResult, maybeManifest.map(_.compressedSize).map(DockerSize.apply)), dockerInfoContext.request) + maybeManifest <- parseManifest(dockerInfoContext.dockerImageID, token)(response).attempt + .map(handleManifestAttempt) + } yield DockerInfoSuccessResponse( + DockerInformation(hashResult, maybeManifest.map(_.compressedSize).map(DockerSize.apply)), + dockerInfoContext.request + ) } /** @@ -259,19 +286,22 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi * between platforms. * If that assumption turns out to be incorrect, a smarter decision may need to be made to choose the manifest to lookup. */ - private def parseManifest(dockerImageIdentifier: DockerImageIdentifier, token: Option[String])(response: Response[IO])(implicit client: Client[IO]): IO[Option[DockerManifest]] = response match { + private def parseManifest(dockerImageIdentifier: DockerImageIdentifier, token: Option[String])( + response: Response[IO] + )(implicit client: Client[IO]): IO[Option[DockerManifest]] = response match { case Status.Successful(r) if r.headers.exists(_.value.equalsIgnoreCase(DockerManifestV2MediaType)) => r.as[DockerManifest].map(Option.apply) case Status.Successful(r) if r.headers.exists(_.value.equalsIgnoreCase(DockerManifestListV2MediaType)) => - r.as[DockerManifestList].flatMap({ dockerManifestList => + r.as[DockerManifestList].flatMap { dockerManifestList => obtainManifestFromList(dockerManifestList, dockerImageIdentifier, token) - }) + } case _ => IO.pure(None) } private def obtainManifestFromList(dockerManifestList: DockerManifestList, dockerImageIdentifier: DockerImageIdentifier, - token: Option[String])(implicit client: Client[IO]): IO[Option[DockerManifest]] = { + token: Option[String] + )(implicit client: Client[IO]): IO[Option[DockerManifest]] = dockerManifestList.manifests.headOption .map(_.digest) .map(dockerImageIdentifier.swapReference) match { @@ -279,23 +309,24 @@ abstract class DockerRegistryV2Abstract(override val config: DockerRegistryConfi val request = manifestRequest(token, identifierWithNewHash, AcceptDockerManifestV2Header) executeRequest(request, parseManifest(dockerImageIdentifier, token)) case None => - logger.error(s"The manifest list for ${dockerImageIdentifier.fullName} was empty. Cannot proceed to obtain the size of image") + logger.error( + s"The manifest list for ${dockerImageIdentifier.fullName} was empty. Cannot proceed to obtain the size of image" + ) IO.pure(None) } - } private def getDigestFromResponse(response: Response[IO]): IO[DockerHashResult] = response match { case Status.Successful(r) => extractDigestFromHeaders(r.headers) - case Status.Unauthorized(r) => r.as[String].flatMap(body => IO.raiseError(new Unauthorized(r.status.toString + " " + body))) + case Status.Unauthorized(r) => + r.as[String].flatMap(body => IO.raiseError(new Unauthorized(r.status.toString + " " + body))) case Status.NotFound(r) => r.as[String].flatMap(body => IO.raiseError(new NotFound(r.status.toString + " " + body))) - case failed => failed.as[String].flatMap(body => IO.raiseError(new UnknownError(failed.status.toString + " " + body)) - ) + case failed => + failed.as[String].flatMap(body => IO.raiseError(new UnknownError(failed.status.toString + " " + body))) } - private def extractDigestFromHeaders(headers: Headers) = { + private def extractDigestFromHeaders(headers: Headers) = headers.find(a => a.toRaw.name.equals(DigestHeaderName)) match { case Some(digest) => IO.fromEither(DockerHashResult.fromString(digest.value).toEither) case None => IO.raiseError(new Exception(s"Manifest response did not have a digest header")) } - } } diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/azure/AzureContainerRegistry.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/azure/AzureContainerRegistry.scala index 46dfd116bc6..ff251839ba9 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/azure/AzureContainerRegistry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/azure/AzureContainerRegistry.scala @@ -15,7 +15,6 @@ import org.http4s.client.Client import io.circe.generic.auto._ import org.http4s._ - class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistryV2Abstract(config) with LazyLogging { /** @@ -25,7 +24,7 @@ class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistr dockerImageIdentifier.host.getOrElse("") override def accepts(dockerImageIdentifier: DockerImageIdentifier): Boolean = - dockerImageIdentifier.hostAsString.contains(domain) + dockerImageIdentifier.hostAsString.contains(domain) override protected def authorizationServerHostName(dockerImageIdentifier: DockerImageIdentifier): String = dockerImageIdentifier.host.getOrElse("") @@ -35,13 +34,12 @@ class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistr */ override def serviceName: Option[String] = throw new Exception("ACR service name is host of user-defined registry, must derive from `DockerImageIdentifier`") - + /** * Builds the list of headers for the token request */ - override protected def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext): List[Header] = { + override protected def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext): List[Header] = List(contentTypeHeader) - } private val contentTypeHeader: Header = { import org.http4s.headers.`Content-Type` @@ -49,7 +47,7 @@ class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistr `Content-Type`(MediaType.application.`x-www-form-urlencoded`) } - + private def getRefreshToken(authServerHostname: String, defaultAccessToken: String): IO[Request[IO]] = { import org.http4s.Uri.{Authority, Scheme} import org.http4s.client.dsl.io._ @@ -69,16 +67,16 @@ class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistr "grant_type" -> "access_token" ), uri, - List(contentTypeHeader): _* + List(contentTypeHeader): _* ) } /* Unlike other repositories, Azure reserves `GET /oauth2/token` for Basic Authentication [0] In order to use Oauth we must `POST /oauth2/token` [1] - + [0] https://github.com/Azure/acr/blob/main/docs/Token-BasicAuth.md#using-the-token-api - [1] https://github.com/Azure/acr/blob/main/docs/AAD-OAuth.md#calling-post-oauth2token-to-get-an-acr-access-token + [1] https://github.com/Azure/acr/blob/main/docs/AAD-OAuth.md#calling-post-oauth2token-to-get-an-acr-access-token */ private def getDockerAccessToken(hostname: String, repository: String, refreshToken: String): IO[Request[IO]] = { import org.http4s.Uri.{Authority, Scheme} @@ -102,14 +100,17 @@ class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistr "grant_type" -> "refresh_token" ), uri, - List(contentTypeHeader): _* + List(contentTypeHeader): _* ) } - override protected def getToken(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]): IO[Option[String]] = { + override protected def getToken( + dockerInfoContext: DockerInfoContext + )(implicit client: Client[IO]): IO[Option[String]] = { val hostname = authorizationServerHostName(dockerInfoContext.dockerImageID) - val maybeAadAccessToken: ErrorOr[String] = AzureCredentials.getAccessToken(None) // AAD token suitable for get-refresh-token request - val repository = dockerInfoContext.dockerImageID.image // ACR uses what we think of image name, as the repository + val maybeAadAccessToken: ErrorOr[String] = + AzureCredentials.getAccessToken(None) // AAD token suitable for get-refresh-token request + val repository = dockerInfoContext.dockerImageID.image // ACR uses what we think of image name, as the repository // Top-level flow: AAD access token -> refresh token -> ACR access token maybeAadAccessToken match { @@ -131,19 +132,21 @@ class AzureContainerRegistry(config: DockerRegistryConfig) extends DockerRegistr private def parseRefreshToken(response: Response[IO]): IO[String] = response match { case Status.Successful(r) => r.as[AcrRefreshToken].map(_.refresh_token) case r => - r.as[String].flatMap(b => IO.raiseError(new Exception(s"Request failed with status ${r.status.code} and body $b"))) + r.as[String] + .flatMap(b => IO.raiseError(new Exception(s"Request failed with status ${r.status.code} and body $b"))) } private def parseAccessToken(response: Response[IO]): IO[String] = response match { case Status.Successful(r) => r.as[AcrAccessToken].map(_.access_token) case r => - r.as[String].flatMap(b => IO.raiseError(new Exception(s"Request failed with status ${r.status.code} and body $b"))) + r.as[String] + .flatMap(b => IO.raiseError(new Exception(s"Request failed with status ${r.status.code} and body $b"))) } } object AzureContainerRegistry { - + def domain: String = "azurecr.io" - + } diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/dockerhub/DockerHubRegistry.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/dockerhub/DockerHubRegistry.scala index 33fb2dabfa9..c523fe71169 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/dockerhub/DockerHubRegistry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/dockerhub/DockerHubRegistry.scala @@ -17,12 +17,10 @@ class DockerHubRegistry(config: DockerRegistryConfig) extends DockerRegistryV2Ab /** * Builds the list of headers for the token request */ - def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext) = { - dockerInfoContext.credentials collect { - case DockerCredentials(token) => - Authorization(org.http4s.BasicCredentials(token)) + def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext) = + dockerInfoContext.credentials collect { case DockerCredentials(token) => + Authorization(org.http4s.BasicCredentials(token)) } - } override def accepts(dockerImageIdentifier: DockerImageIdentifier): Boolean = dockerImageIdentifier.host |> isValidDockerHubHost diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/google/GoogleRegistry.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/google/GoogleRegistry.scala index 4db74317e07..81bcb4dd4ec 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/google/GoogleRegistry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/google/GoogleRegistry.scala @@ -11,12 +11,17 @@ import scala.concurrent.duration._ class GoogleRegistry(config: DockerRegistryConfig) extends DockerRegistryV2Abstract(config) { private val AccessTokenAcceptableTTL = 1.minute - - def googleRegion(dockerImageIdentifier: DockerImageIdentifier): String = dockerImageIdentifier.host.flatMap(_.split("/").headOption).getOrElse("") - override def registryHostName(dockerImageIdentifier: DockerImageIdentifier): String = googleRegion(dockerImageIdentifier) - override def authorizationServerHostName(dockerImageIdentifier: DockerImageIdentifier): String = googleRegion(dockerImageIdentifier) - + def googleRegion(dockerImageIdentifier: DockerImageIdentifier): String = + dockerImageIdentifier.host.flatMap(_.split("/").headOption).getOrElse("") + + override def registryHostName(dockerImageIdentifier: DockerImageIdentifier): String = googleRegion( + dockerImageIdentifier + ) + override def authorizationServerHostName(dockerImageIdentifier: DockerImageIdentifier): String = googleRegion( + dockerImageIdentifier + ) + override protected def buildTokenRequestUri(dockerImageID: DockerImageIdentifier): Uri = { val uri = super.buildTokenRequestUri(dockerImageID) uri.withPath(s"/v2${uri.path}") @@ -25,17 +30,15 @@ class GoogleRegistry(config: DockerRegistryConfig) extends DockerRegistryV2Abstr /** * Builds the list of headers for the token request */ - def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext): List[Authorization] = { - dockerInfoContext.credentials collect { - case credentials: OAuth2Credentials => Authorization(org.http4s.Credentials.Token(AuthScheme.Bearer, freshAccessToken(credentials))) + def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext): List[Authorization] = + dockerInfoContext.credentials collect { case credentials: OAuth2Credentials => + Authorization(org.http4s.Credentials.Token(AuthScheme.Bearer, freshAccessToken(credentials))) } - } - + private def freshAccessToken(credential: OAuth2Credentials) = { - def accessTokenTTLIsAcceptable(accessToken: AccessToken) = { + def accessTokenTTLIsAcceptable(accessToken: AccessToken) = (accessToken.getExpirationTime.getTime - System.currentTimeMillis()).millis.gteq(AccessTokenAcceptableTTL) - } - + Option(credential.getAccessToken) match { case Some(accessToken) if accessTokenTTLIsAcceptable(accessToken) => accessToken.getTokenValue case _ => @@ -44,8 +47,9 @@ class GoogleRegistry(config: DockerRegistryConfig) extends DockerRegistryV2Abstr } } - override def accepts(dockerImageIdentifier: DockerImageIdentifier): Boolean = { + override def accepts(dockerImageIdentifier: DockerImageIdentifier): Boolean = // Supports both GCR (Google Container Registry) and GAR (Google Artifact Registry). - dockerImageIdentifier.hostAsString.contains("gcr.io") || dockerImageIdentifier.hostAsString.contains("-docker.pkg.dev") - } + dockerImageIdentifier.hostAsString.contains("gcr.io") || dockerImageIdentifier.hostAsString.contains( + "-docker.pkg.dev" + ) } diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/quay/QuayRegistry.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/quay/QuayRegistry.scala index d739eba2651..00ca2836636 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/quay/QuayRegistry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/quay/QuayRegistry.scala @@ -9,12 +9,13 @@ import org.http4s.client.Client class QuayRegistry(config: DockerRegistryConfig) extends DockerRegistryV2Abstract(config) { override protected def registryHostName(dockerImageIdentifier: DockerImageIdentifier): String = "quay.io" - // Not used for now because we bypass the token part as quay doesn't require one for public images + // Not used for now because we bypass the token part as quay doesn't require one for public images override protected def authorizationServerHostName(dockerImageIdentifier: DockerImageIdentifier): String = "quay.io" // Not used for now, same reason as above override protected def buildTokenRequestHeaders(dockerInfoContext: DockerInfoContext): List[Header] = List.empty - override protected def getToken(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]): IO[Option[String]] = { + override protected def getToken(dockerInfoContext: DockerInfoContext)(implicit + client: Client[IO] + ): IO[Option[String]] = IO.pure(None) - } } diff --git a/dockerHashing/src/test/scala/cromwell/docker/DockerHashMocks.scala b/dockerHashing/src/test/scala/cromwell/docker/DockerHashMocks.scala index 4c664912aeb..d39cd538d17 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/DockerHashMocks.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/DockerHashMocks.scala @@ -11,26 +11,27 @@ class DockerRegistryMock(responses: MockHashResponse*) extends DockerRegistry { // Counts the number of elements going through this "flow" private var _count: Int = 0 - private def nextResponse(context: DockerInfoContext): (DockerInfoResponse, DockerInfoContext) = - responsesLeft.headOption match { - case Some(mockResponse) => - _count += 1 - if (mockResponse.nb > 1) - responsesLeft.update(0, mockResponse.copy(nb = mockResponse.nb - 1)) - else - responsesLeft.remove(0) - (mockResponse.hashResponse, context) - // When we hit the end, loop - case None => - responsesLeft = responses.toBuffer - nextResponse(context) - } - + private def nextResponse(context: DockerInfoContext): (DockerInfoResponse, DockerInfoContext) = + responsesLeft.headOption match { + case Some(mockResponse) => + _count += 1 + if (mockResponse.nb > 1) + responsesLeft.update(0, mockResponse.copy(nb = mockResponse.nb - 1)) + else + responsesLeft.remove(0) + (mockResponse.hashResponse, context) + // When we hit the end, loop + case None => + responsesLeft = responses.toBuffer + nextResponse(context) + } + def count() = _count override def accepts(dockerImageIdentifier: DockerImageIdentifier): Boolean = true - override def run(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]) = IO.pure(nextResponse(dockerInfoContext)) + override def run(dockerInfoContext: DockerInfoContext)(implicit client: Client[IO]) = + IO.pure(nextResponse(dockerInfoContext)) override def config = DockerRegistryConfig.default } diff --git a/dockerHashing/src/test/scala/cromwell/docker/DockerImageIdentifierSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/DockerImageIdentifierSpec.scala index 41353934fc6..35fe341737b 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/DockerImageIdentifierSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/DockerImageIdentifierSpec.scala @@ -5,30 +5,49 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -class DockerImageIdentifierSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class DockerImageIdentifierSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { behavior of "DockerImageID" it should "parse valid docker images" in { val valid = Table( - ("sourceString", "host", "repo", "image", "reference"), + ("sourceString", "host", "repo", "image", "reference"), // Without tags -> latest - ("ubuntu", None, None, "ubuntu", "latest"), - ("broad/cromwell", None, Option("broad"), "cromwell", "latest"), - ("index.docker.io/ubuntu", Option("index.docker.io"), None, "ubuntu", "latest"), - ("broad/cromwell/submarine", None, Option("broad/cromwell"), "submarine", "latest"), - ("gcr.io/google/slim", Option("gcr.io"), Option("google"), "slim", "latest"), - ("us-central1-docker.pkg.dev/google/slim", Option("us-central1-docker.pkg.dev"), Option("google"), "slim", "latest"), - ("terrabatchdev.azurecr.io/postgres", Option("terrabatchdev.azurecr.io"), None, "postgres", "latest"), + ("ubuntu", None, None, "ubuntu", "latest"), + ("broad/cromwell", None, Option("broad"), "cromwell", "latest"), + ("index.docker.io/ubuntu", Option("index.docker.io"), None, "ubuntu", "latest"), + ("broad/cromwell/submarine", None, Option("broad/cromwell"), "submarine", "latest"), + ("gcr.io/google/slim", Option("gcr.io"), Option("google"), "slim", "latest"), + ("us-central1-docker.pkg.dev/google/slim", + Option("us-central1-docker.pkg.dev"), + Option("google"), + "slim", + "latest" + ), + ("terrabatchdev.azurecr.io/postgres", Option("terrabatchdev.azurecr.io"), None, "postgres", "latest"), // With tags - ("ubuntu:latest", None, None, "ubuntu", "latest"), - ("ubuntu:1235-SNAP", None, None, "ubuntu", "1235-SNAP"), - ("ubuntu:V3.8-5_1", None, None, "ubuntu", "V3.8-5_1"), - ("index.docker.io:9999/ubuntu:170904", Option("index.docker.io:9999"), None, "ubuntu", "170904"), - ("localhost:5000/capture/transwf:170904", Option("localhost:5000"), Option("capture"), "transwf", "170904"), - ("quay.io/biocontainers/platypus-variant:0.8.1.1--htslib1.5_0", Option("quay.io"), Option("biocontainers"), "platypus-variant", "0.8.1.1--htslib1.5_0"), + ("ubuntu:latest", None, None, "ubuntu", "latest"), + ("ubuntu:1235-SNAP", None, None, "ubuntu", "1235-SNAP"), + ("ubuntu:V3.8-5_1", None, None, "ubuntu", "V3.8-5_1"), + ("index.docker.io:9999/ubuntu:170904", Option("index.docker.io:9999"), None, "ubuntu", "170904"), + ("localhost:5000/capture/transwf:170904", Option("localhost:5000"), Option("capture"), "transwf", "170904"), + ("quay.io/biocontainers/platypus-variant:0.8.1.1--htslib1.5_0", + Option("quay.io"), + Option("biocontainers"), + "platypus-variant", + "0.8.1.1--htslib1.5_0" + ), ("terrabatchdev.azurecr.io/postgres:latest", Option("terrabatchdev.azurecr.io"), None, "postgres", "latest"), // Very long tags with trailing spaces cause problems for the re engine - ("someuser/someimage:supercalifragilisticexpialidociouseventhoughthesoundofitissomethingquiteatrociousifyousayitloudenoughyoullalwayssoundprecocious ", None, Some("someuser"), "someimage", "supercalifragilisticexpialidociouseventhoughthesoundofitissomethingquiteatrociousifyousayitloudenoughyoullalwayssoundprecocious") + ("someuser/someimage:supercalifragilisticexpialidociouseventhoughthesoundofitissomethingquiteatrociousifyousayitloudenoughyoullalwayssoundprecocious ", + None, + Some("someuser"), + "someimage", + "supercalifragilisticexpialidociouseventhoughthesoundofitissomethingquiteatrociousifyousayitloudenoughyoullalwayssoundprecocious" + ) ) forAll(valid) { (dockerString, host, repo, image, reference) => diff --git a/dockerHashing/src/test/scala/cromwell/docker/DockerInfoActorSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/DockerInfoActorSpec.scala index 72baec70825..ed48762c2cb 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/DockerInfoActorSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/DockerInfoActorSpec.scala @@ -25,44 +25,40 @@ class DockerInfoActorSpec extends DockerRegistrySpec with AnyFlatSpecLike with M it should "retrieve a public docker hash" taggedAs IntegrationTest in { dockerActor ! makeRequest("ubuntu:latest") - - expectMsgPF(5 second) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + + expectMsgPF(5 second) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } it should "retrieve a public docker hash on gcr" taggedAs IntegrationTest in { dockerActor ! makeRequest("gcr.io/google-containers/alpine-with-bash:1.0") - expectMsgPF(5 second) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + expectMsgPF(5 second) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } it should "retrieve a public docker hash on gar" taggedAs IntegrationTest in { dockerActor ! makeRequest("us-central1-docker.pkg.dev/broad-dsde-cromwell-dev/bt-335/ubuntu:bt-335") - expectMsgPF(5 second) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + expectMsgPF(5 second) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } it should "retrieve a private docker hash on acr" taggedAs IntegrationTest in { dockerActor ! makeRequest("terrabatchdev.azurecr.io/postgres:latest") - expectMsgPF(15 second) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + expectMsgPF(15 second) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } - + it should "send image not found message back if the image does not exist" taggedAs IntegrationTest in { val notFound = makeRequest("ubuntu:nonexistingtag") dockerActor ! notFound @@ -76,38 +72,38 @@ class DockerInfoActorSpec extends DockerRegistrySpec with AnyFlatSpecLike with M expectMsgClass(5 seconds, classOf[DockerInfoUnauthorized]) } - + it should "send an unrecognized host message if no flow can process the docker string" taggedAs IntegrationTest in { val unauthorized = makeRequest("unknown.io/image:v1") dockerActor ! unauthorized expectMsgClass(5 seconds, classOf[DockerHashUnknownRegistry]) } - + it should "cache results" in { - + val image1 = dockerImage("ubuntu:latest") val request = DockerInfoRequest(image1) - + val hashSuccess = DockerHashResult("sha256", "hashvalue") val responseSuccess = DockerInfoSuccessResponse(DockerInformation(hashSuccess, None), request) val mockResponseSuccess = MockHashResponse(responseSuccess, 1) - + val responseFailure = DockerInfoFailedResponse(new Exception("Docker hash failed - part of test flow"), request) val mockResponseFailure = MockHashResponse(responseFailure, 1) - + // Send back success, failure, success, failure, ... val mockHttpFlow = new DockerRegistryMock(mockResponseSuccess, mockResponseFailure) val dockerActorWithCache = system.actorOf( props = DockerInfoActor.props(Seq(mockHttpFlow), 1000, 3 seconds, 10), - name = "dockerActorWithCache", + name = "dockerActorWithCache" ) - + dockerActorWithCache ! request expectMsg(DockerInfoSuccessResponse(DockerInformation(hashSuccess, None), request)) // Necessary to give some time to the cache to be updated - as it's decoupled from sending back the response Thread.sleep(1000) - + dockerActorWithCache ! request // Without caching, the second request would have yielded a Failure since the mock flow alternates between a success and a failure // Getting a success here means the request didn't make it to the stream @@ -122,11 +118,10 @@ class DockerInfoActorSpec extends DockerRegistrySpec with AnyFlatSpecLike with M mockHttpFlow.count() shouldBe 2 } - it should "not deadlock" taggedAs IntegrationTest in { lazy val dockerActorScale = system.actorOf( props = DockerInfoActor.props(registryFlows, 1000, 20.minutes, 0), - name = "dockerActorScale", + name = "dockerActorScale" ) 0 until 400 foreach { _ => dockerActorScale ! makeRequest("gcr.io/google-containers/alpine-with-bash:1.0") diff --git a/dockerHashing/src/test/scala/cromwell/docker/DockerRegistrySpec.scala b/dockerHashing/src/test/scala/cromwell/docker/DockerRegistrySpec.scala index ab35c216b51..5742f44bc2c 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/DockerRegistrySpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/DockerRegistrySpec.scala @@ -18,14 +18,13 @@ abstract class DockerRegistrySpec extends TestKitSuite with ImplicitSender { // Disable cache by setting a cache size of 0 - A separate test tests the cache lazy val dockerActor: ActorRef = system.actorOf( props = DockerInfoActor.props(registryFlows, 1000, 20.minutes, 0), - name = "dockerActor", + name = "dockerActor" ) def dockerImage(string: String): DockerImageIdentifier = DockerImageIdentifier.fromString(string).get - def makeRequest(string: String): DockerInfoRequest = { + def makeRequest(string: String): DockerInfoRequest = DockerInfoRequest(dockerImage(string)) - } override protected def afterAll(): Unit = { system.stop(dockerActor) diff --git a/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliClientSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliClientSpec.scala index 6553979f9ff..14b9e4f3ba3 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliClientSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliClientSpec.scala @@ -7,18 +7,24 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.util.{Failure, Success} -class DockerCliClientSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class DockerCliClientSpec + extends AnyFlatSpecLike + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { behavior of "DockerCliClient" private val lookupSuccessStdout = Seq( "\t\t", "fauxbuntu\tlatest\tsha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - "fauxbuntu\tmytag\tsha256:00001111222233334444555566667777888899990000aaaabbbbccccddddeeee") + "fauxbuntu\tmytag\tsha256:00001111222233334444555566667777888899990000aaaabbbbccccddddeeee" + ) private val lookupSuccessHashes = Table( ("dockerCliKey", "hashValue"), (DockerCliKey("fauxbuntu", "latest"), "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"), - (DockerCliKey("fauxbuntu", "mytag"), "00001111222233334444555566667777888899990000aaaabbbbccccddddeeee")) + (DockerCliKey("fauxbuntu", "mytag"), "00001111222233334444555566667777888899990000aaaabbbbccccddddeeee") + ) forAll(lookupSuccessHashes) { (dockerCliKey, hashValue) => it should s"successfully lookup simulated hash for ${dockerCliKey.fullName}" in { @@ -47,7 +53,8 @@ class DockerCliClientSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with """|Error running: docker images --digests --format {{printf "%s\t%s\t%s" .Repository .Tag .Digest}} |Exit code: 1 |Error response from daemon: Bad response from Docker engine - |""".stripMargin) + |""".stripMargin + ) } } diff --git a/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliSpec.scala index 3a8d88cdb01..7565195f1d2 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliSpec.scala @@ -1,7 +1,7 @@ package cromwell.docker.local import cromwell.core.Tags.IntegrationTest -import cromwell.docker.DockerInfoActor.{DockerInfoNotFound, DockerInfoSuccessResponse, DockerInformation} +import cromwell.docker.DockerInfoActor.{DockerInfoNotFound, DockerInformation, DockerInfoSuccessResponse} import cromwell.docker.{DockerHashResult, DockerRegistry, DockerRegistrySpec} import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers @@ -16,30 +16,27 @@ class DockerCliSpec extends DockerRegistrySpec with AnyFlatSpecLike with Matcher it should "retrieve a public docker hash" taggedAs IntegrationTest in { dockerActor ! makeRequest("ubuntu:latest") - expectMsgPF(30.seconds) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + expectMsgPF(30.seconds) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } it should "retrieve a public docker hash on gcr" taggedAs IntegrationTest in { dockerActor ! makeRequest("gcr.io/google-containers/alpine-with-bash:1.0") - expectMsgPF(30.seconds) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + expectMsgPF(30.seconds) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } it should "retrieve a public docker hash on gar" taggedAs IntegrationTest in { dockerActor ! makeRequest("us-central1-docker.pkg.dev/broad-dsde-cromwell-dev/bt-335/ubuntu:bt-335") - expectMsgPF(30.seconds) { - case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => - alg shouldBe "sha256" - hash should not be empty + expectMsgPF(30.seconds) { case DockerInfoSuccessResponse(DockerInformation(DockerHashResult(alg, hash), _), _) => + alg shouldBe "sha256" + hash should not be empty } } diff --git a/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliTimeoutSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliTimeoutSpec.scala index 90b3a2fa0ca..445c4378dd1 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliTimeoutSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/local/DockerCliTimeoutSpec.scala @@ -19,24 +19,20 @@ class DockerCliTimeoutSpec extends DockerRegistrySpec with AnyFlatSpecLike with it should "timeout retrieving a public docker hash" taggedAs IntegrationTest in { dockerActor ! makeRequest("ubuntu:latest") - expectMsgPF(5.seconds) { - case DockerInfoFailedResponse(exception: TimeoutException, _) => - exception.getMessage should be( - """|Timeout while looking up hash of ubuntu:latest. - |Ensure that docker is running correctly. - |""".stripMargin) + expectMsgPF(5.seconds) { case DockerInfoFailedResponse(exception: TimeoutException, _) => + exception.getMessage should be("""|Timeout while looking up hash of ubuntu:latest. + |Ensure that docker is running correctly. + |""".stripMargin) } } it should "timeout retrieving a public docker hash on gcr" taggedAs IntegrationTest in { dockerActor ! makeRequest("gcr.io/google-containers/alpine-with-bash:1.0") - expectMsgPF(5.seconds) { - case DockerInfoFailedResponse(exception: TimeoutException, _) => - exception.getMessage should be( - """|Timeout while looking up hash of gcr.io/google-containers/alpine-with-bash:1.0. - |Ensure that docker is running correctly. - |""".stripMargin) + expectMsgPF(5.seconds) { case DockerInfoFailedResponse(exception: TimeoutException, _) => + exception.getMessage should be("""|Timeout while looking up hash of gcr.io/google-containers/alpine-with-bash:1.0. + |Ensure that docker is running correctly. + |""".stripMargin) } } @@ -44,12 +40,12 @@ class DockerCliTimeoutSpec extends DockerRegistrySpec with AnyFlatSpecLike with it should "timeout retrieving a public docker hash on gar" taggedAs IntegrationTest in { dockerActor ! makeRequest("us-central1-docker.pkg.dev/broad-dsde-cromwell-dev/bt-335/ubuntu:bt-335") - expectMsgPF(5.seconds) { - case DockerInfoFailedResponse(exception: TimeoutException, _) => - exception.getMessage should be( - """|Timeout while looking up hash of us-central1-docker.pkg.dev/broad-dsde-cromwell-dev/bt-335/ubuntu:bt-335. - |Ensure that docker is running correctly. - |""".stripMargin) + expectMsgPF(5.seconds) { case DockerInfoFailedResponse(exception: TimeoutException, _) => + exception.getMessage should be( + """|Timeout while looking up hash of us-central1-docker.pkg.dev/broad-dsde-cromwell-dev/bt-335/ubuntu:bt-335. + |Ensure that docker is running correctly. + |""".stripMargin + ) } } @@ -58,12 +54,10 @@ class DockerCliTimeoutSpec extends DockerRegistrySpec with AnyFlatSpecLike with val notFound = makeRequest("ubuntu:nonexistingtag") dockerActor ! notFound - expectMsgPF(5.seconds) { - case DockerInfoFailedResponse(exception: TimeoutException, _) => - exception.getMessage should be( - """|Timeout while looking up hash of ubuntu:nonexistingtag. - |Ensure that docker is running correctly. - |""".stripMargin) + expectMsgPF(5.seconds) { case DockerInfoFailedResponse(exception: TimeoutException, _) => + exception.getMessage should be("""|Timeout while looking up hash of ubuntu:nonexistingtag. + |Ensure that docker is running correctly. + |""".stripMargin) } } @@ -71,12 +65,10 @@ class DockerCliTimeoutSpec extends DockerRegistrySpec with AnyFlatSpecLike with val unauthorized = makeRequest("tjeandet/sinatra:v1") dockerActor ! unauthorized - expectMsgPF(5.seconds) { - case DockerInfoFailedResponse(exception: TimeoutException, _) => - exception.getMessage should be( - """|Timeout while looking up hash of tjeandet/sinatra:v1. - |Ensure that docker is running correctly. - |""".stripMargin) + expectMsgPF(5.seconds) { case DockerInfoFailedResponse(exception: TimeoutException, _) => + exception.getMessage should be("""|Timeout while looking up hash of tjeandet/sinatra:v1. + |Ensure that docker is running correctly. + |""".stripMargin) } } @@ -84,12 +76,10 @@ class DockerCliTimeoutSpec extends DockerRegistrySpec with AnyFlatSpecLike with val unauthorized = makeRequest("unknown.io/image:v1") dockerActor ! unauthorized - expectMsgPF(5.seconds) { - case DockerInfoFailedResponse(exception: TimeoutException, _) => - exception.getMessage should be( - """|Timeout while looking up hash of unknown.io/library/image:v1. - |Ensure that docker is running correctly. - |""".stripMargin) + expectMsgPF(5.seconds) { case DockerInfoFailedResponse(exception: TimeoutException, _) => + exception.getMessage should be("""|Timeout while looking up hash of unknown.io/library/image:v1. + |Ensure that docker is running correctly. + |""".stripMargin) } } } diff --git a/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala index e99383ac131..f084a85b035 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala @@ -23,15 +23,18 @@ class DockerRegistryV2AbstractSpec extends AnyFlatSpec with CromwellTimeoutSpec val mediaType = MediaType.parse(DockerRegistryV2Abstract.DockerManifestV2MediaType).toOption.get val contentType: Header = `Content-Type`(mediaType) - val mockClient = Client({ _: Request[IO] => + val mockClient = Client { _: Request[IO] => // This response will have an empty body, so we need to be explicit about the typing: - Resource.pure[IO, Response[IO]](Response(headers = Headers.of(contentType))) : Resource[IO, Response[IO]] - }) + Resource.pure[IO, Response[IO]](Response(headers = Headers.of(contentType))): Resource[IO, Response[IO]] + } val dockerImageIdentifier = DockerImageIdentifier.fromString("ubuntu").get val dockerInfoRequest = DockerInfoRequest(dockerImageIdentifier) val context = DockerInfoContext(dockerInfoRequest, null) val result = registry.run(context)(mockClient).unsafeRunSync() - result.asInstanceOf[(DockerInfoFailedResponse, DockerInfoContext)]._1.reason shouldBe "Failed to get docker hash for ubuntu:latest Malformed message body: Invalid JSON: empty body" + result + .asInstanceOf[(DockerInfoFailedResponse, DockerInfoContext)] + ._1 + .reason shouldBe "Failed to get docker hash for ubuntu:latest Malformed message body: Invalid JSON: empty body" } } diff --git a/engine/src/main/scala/cromwell/Simpletons.scala b/engine/src/main/scala/cromwell/Simpletons.scala index 259e57a62e8..6ef40548438 100644 --- a/engine/src/main/scala/cromwell/Simpletons.scala +++ b/engine/src/main/scala/cromwell/Simpletons.scala @@ -12,13 +12,11 @@ import scala.util.Try * to `WdlSingleFile` instances. */ object Simpletons { - def toSimpleton(entry: CallCachingSimpletonEntry): WomValueSimpleton = { + def toSimpleton(entry: CallCachingSimpletonEntry): WomValueSimpleton = toSimpleton(entry.wdlType, entry.simpletonKey, entry.simpletonValue.toRawString) - } - def toSimpleton(entry: JobStoreSimpletonEntry): WomValueSimpleton = { + def toSimpleton(entry: JobStoreSimpletonEntry): WomValueSimpleton = toSimpleton(entry.wdlType, entry.simpletonKey, entry.simpletonValue.toRawString) - } private def toSimpleton(womType: String, simpletonKey: String, simpletonValue: String): WomValueSimpleton = { val womValue: String => Try[WomValue] = womType match { diff --git a/engine/src/main/scala/cromwell/engine/EngineFilesystems.scala b/engine/src/main/scala/cromwell/engine/EngineFilesystems.scala index c648261a7f4..332664f1d56 100644 --- a/engine/src/main/scala/cromwell/engine/EngineFilesystems.scala +++ b/engine/src/main/scala/cromwell/engine/EngineFilesystems.scala @@ -19,17 +19,18 @@ object EngineFilesystems { .filter(_ => config.as[Boolean]("engine.filesystems.local.enabled")) .to(collection.immutable.SortedMap) - private val pathBuilderFactories: SortedMap[String, PathBuilderFactory] = { + private val pathBuilderFactories: SortedMap[String, PathBuilderFactory] = // Unordered maps are a classical source of randomness injection into a system ( - CromwellFileSystems.instance.factoriesFromConfig(config.as[Config]("engine")) + CromwellFileSystems.instance + .factoriesFromConfig(config.as[Config]("engine")) .unsafe("Failed to instantiate engine filesystem") ++ defaultFileSystemFactory ).to(collection.immutable.SortedMap) - } def configuredPathBuilderFactories: List[PathBuilderFactory] = pathBuilderFactories.values.toList - def pathBuildersForWorkflow(workflowOptions: WorkflowOptions, factories: List[PathBuilderFactory])(implicit as: ActorSystem): Future[List[PathBuilder]] = { + def pathBuildersForWorkflow(workflowOptions: WorkflowOptions, factories: List[PathBuilderFactory])(implicit + as: ActorSystem + ): Future[List[PathBuilder]] = PathBuilderFactory.instantiatePathBuilders(factories, workflowOptions) - } } diff --git a/engine/src/main/scala/cromwell/engine/EngineIoFunctions.scala b/engine/src/main/scala/cromwell/engine/EngineIoFunctions.scala index 6dcc487b8d6..d752c3b4d42 100644 --- a/engine/src/main/scala/cromwell/engine/EngineIoFunctions.scala +++ b/engine/src/main/scala/cromwell/engine/EngineIoFunctions.scala @@ -8,16 +8,23 @@ import better.files.File._ import scala.concurrent.{ExecutionContext, Future} -class EngineIoFunctions(val pathBuilders: List[PathBuilder], override val asyncIo: AsyncIo, override val ec: ExecutionContext) extends ReadLikeFunctions with WorkflowCorePathFunctions { - override def glob(pattern: String): Future[Seq[String]] = throw new UnsupportedOperationException(s"glob(path, pattern) not implemented yet") +class EngineIoFunctions(val pathBuilders: List[PathBuilder], + override val asyncIo: AsyncIo, + override val ec: ExecutionContext +) extends ReadLikeFunctions + with WorkflowCorePathFunctions { + override def glob(pattern: String): Future[Seq[String]] = throw new UnsupportedOperationException( + s"glob(path, pattern) not implemented yet" + ) // TODO: This is not suited for multi backend / multi filesystem use. Keep local for now to not break local CWL conf tests override def writeFile(path: String, content: String): Future[WomSingleFile] = Future.successful { val cromwellPath = buildPath(path) - val string = if (cromwellPath.isAbsolute) - cromwellPath.write(content).pathAsString - else - (newTemporaryDirectory() / path).write(content).pathAsString + val string = + if (cromwellPath.isAbsolute) + cromwellPath.write(content).pathAsString + else + (newTemporaryDirectory() / path).write(content).pathAsString WomSingleFile(string) } @@ -27,7 +34,9 @@ class EngineIoFunctions(val pathBuilders: List[PathBuilder], override val asyncI override def listAllFilesUnderDirectory(dirPath: String): Nothing = throw new UnsupportedOperationException(s"listAllFilesUnderDirectory not implemented yet") - override def listDirectory(path: String)(visited: Vector[String]) = throw new UnsupportedOperationException(s"listDirectory not implemented yet") + override def listDirectory(path: String)(visited: Vector[String]) = throw new UnsupportedOperationException( + s"listDirectory not implemented yet" + ) override def isDirectory(path: String) = Future.successful(buildPath(path).isDirectory) diff --git a/engine/src/main/scala/cromwell/engine/EngineWorkflowDescriptor.scala b/engine/src/main/scala/cromwell/engine/EngineWorkflowDescriptor.scala index ab062853a2b..99c93ba330b 100644 --- a/engine/src/main/scala/cromwell/engine/EngineWorkflowDescriptor.scala +++ b/engine/src/main/scala/cromwell/engine/EngineWorkflowDescriptor.scala @@ -13,7 +13,8 @@ case class EngineWorkflowDescriptor(topLevelCallable: Callable, failureMode: WorkflowFailureMode, pathBuilders: List[PathBuilder], callCachingMode: CallCachingMode, - parentWorkflow: Option[EngineWorkflowDescriptor] = None) { + parentWorkflow: Option[EngineWorkflowDescriptor] = None +) { val rootWorkflow: EngineWorkflowDescriptor = parentWorkflow match { case Some(parent) => parent.rootWorkflow diff --git a/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala b/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala index 410a6261b5c..74d652b741d 100644 --- a/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala +++ b/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala @@ -8,10 +8,11 @@ import scala.util.{Failure, Success, Try} case class BackendConfigurationEntry(name: String, lifecycleActorFactoryClass: String, config: Config) { def asBackendLifecycleActorFactory: Try[BackendLifecycleActorFactory] = Try { - Class.forName(lifecycleActorFactoryClass) - .getConstructor(classOf[String], classOf[BackendConfigurationDescriptor]) - .newInstance(name, asBackendConfigurationDescriptor) - .asInstanceOf[BackendLifecycleActorFactory] + Class + .forName(lifecycleActorFactoryClass) + .getConstructor(classOf[String], classOf[BackendConfigurationDescriptor]) + .newInstance(name, asBackendConfigurationDescriptor) + .asInstanceOf[BackendLifecycleActorFactory] } def asBackendConfigurationDescriptor = BackendConfigurationDescriptor(config, ConfigFactory.load) @@ -21,7 +22,8 @@ object BackendConfiguration { private val BackendConfig = ConfigFactory.load.getConfig("backend") private val DefaultBackendName = BackendConfig.getString("default") private val BackendProviders = BackendConfig.getConfig("providers") - private val BackendNames: Set[String] = BackendProviders.entrySet().asScala.map(_.getKey.split("\\.").toSeq.head).toSet + private val BackendNames: Set[String] = + BackendProviders.entrySet().asScala.map(_.getKey.split("\\.").toSeq.head).toSet val AllBackendEntries: List[BackendConfigurationEntry] = BackendNames.toList map { backendName => val entry = BackendProviders.getConfig(backendName) @@ -33,14 +35,17 @@ object BackendConfiguration { } val DefaultBackendEntry: BackendConfigurationEntry = AllBackendEntries.find(_.name == DefaultBackendName) getOrElse { - throw new IllegalArgumentException(s"Could not find specified default backend name '$DefaultBackendName' " + - s"in '${BackendNames.mkString("', '")}'.") + throw new IllegalArgumentException( + s"Could not find specified default backend name '$DefaultBackendName' " + + s"in '${BackendNames.mkString("', '")}'." + ) } - def backendConfigurationDescriptor(backendName: String): Try[BackendConfigurationDescriptor] = { - AllBackendEntries.collect({case entry if entry.name.equalsIgnoreCase(backendName) => entry.asBackendConfigurationDescriptor}).headOption match { + def backendConfigurationDescriptor(backendName: String): Try[BackendConfigurationDescriptor] = + AllBackendEntries.collect { + case entry if entry.name.equalsIgnoreCase(backendName) => entry.asBackendConfigurationDescriptor + }.headOption match { case Some(descriptor) => Success(descriptor) case None => Failure(new Exception(s"invalid backend: $backendName")) } - } } diff --git a/engine/src/main/scala/cromwell/engine/backend/CromwellBackends.scala b/engine/src/main/scala/cromwell/engine/backend/CromwellBackends.scala index 24eb59d2d95..09acd74476a 100644 --- a/engine/src/main/scala/cromwell/engine/backend/CromwellBackends.scala +++ b/engine/src/main/scala/cromwell/engine/backend/CromwellBackends.scala @@ -11,11 +11,11 @@ import cromwell.backend.BackendLifecycleActorFactory case class CromwellBackends(backendEntries: List[BackendConfigurationEntry]) { // Raise the exception here if some backend factories failed to instantiate - val backendLifecycleActorFactories = TryUtil.sequenceMap(backendEntries.map(e => e.name -> e.asBackendLifecycleActorFactory).toMap).get + val backendLifecycleActorFactories = + TryUtil.sequenceMap(backendEntries.map(e => e.name -> e.asBackendLifecycleActorFactory).toMap).get - def backendLifecycleActorFactoryByName(backendName: String): ErrorOr[BackendLifecycleActorFactory] = { + def backendLifecycleActorFactoryByName(backendName: String): ErrorOr[BackendLifecycleActorFactory] = backendLifecycleActorFactories.get(backendName).toValidNel(s"Backend $backendName was not found") - } def isValidBackendName(name: String): Boolean = backendLifecycleActorFactories.contains(name) } @@ -24,21 +24,17 @@ object CromwellBackends { var instance: Option[CromwellBackends] = None - def isValidBackendName(name: String): Boolean = evaluateIfInitialized(_.isValidBackendName(name)) - def backendLifecycleFactoryActorByName(backendName: String): ErrorOr[BackendLifecycleActorFactory] = { + def backendLifecycleFactoryActorByName(backendName: String): ErrorOr[BackendLifecycleActorFactory] = evaluateIfInitialized(_.backendLifecycleActorFactoryByName(backendName)) - } - private def evaluateIfInitialized[A](func: CromwellBackends => A): A = { + private def evaluateIfInitialized[A](func: CromwellBackends => A): A = instance match { case Some(cromwellBackend) => func(cromwellBackend) case None => throw new Exception("Cannot use CromwellBackend until initBackends is called") } - } - def initBackends(backendEntries: List[BackendConfigurationEntry]): Unit = { + def initBackends(backendEntries: List[BackendConfigurationEntry]): Unit = instance = Option(CromwellBackends(backendEntries)) - } } diff --git a/engine/src/main/scala/cromwell/engine/engine.scala b/engine/src/main/scala/cromwell/engine/engine.scala index 1c282aab703..e5971622beb 100644 --- a/engine/src/main/scala/cromwell/engine/engine.scala +++ b/engine/src/main/scala/cromwell/engine/engine.scala @@ -13,7 +13,9 @@ final case class CallAttempt(fqn: FullyQualifiedName, attempt: Int) object WorkflowFailureMode { def tryParse(mode: String): Try[WorkflowFailureMode] = { val modes = Seq(ContinueWhilePossible, NoNewCalls) - modes find { _.toString.equalsIgnoreCase(mode) } map { Success(_) } getOrElse Failure(new Exception(s"Invalid workflow failure mode: $mode")) + modes find { _.toString.equalsIgnoreCase(mode) } map { Success(_) } getOrElse Failure( + new Exception(s"Invalid workflow failure mode: $mode") + ) } } sealed trait WorkflowFailureMode { diff --git a/engine/src/main/scala/cromwell/engine/instrumentation/HttpInstrumentation.scala b/engine/src/main/scala/cromwell/engine/instrumentation/HttpInstrumentation.scala index 2c909e1f776..aa65d6506be 100644 --- a/engine/src/main/scala/cromwell/engine/instrumentation/HttpInstrumentation.scala +++ b/engine/src/main/scala/cromwell/engine/instrumentation/HttpInstrumentation.scala @@ -18,29 +18,31 @@ object HttpInstrumentation { } trait HttpInstrumentation extends CromwellInstrumentation { - - private def makeRequestPath(httpRequest: HttpRequest, httpResponse: HttpResponse): InstrumentationPath = NonEmptyList.of( - // Returns the path of the URI only, without query parameters (e.g: api/engine/workflows/metadata) - httpRequest.uri.path.toString().stripPrefix("/") - // Replace UUIDs with [id] to keep paths same regardless of the workflow - .replaceAll(HttpInstrumentation.UUIDRegex, "[id]"), - // Name of the method (e.g: GET) - httpRequest.method.value, - // Status code of the Response (e.g: 200) - httpResponse.status.intValue.toString - ) - private def sendTimingApi(statsDPath: InstrumentationPath, timing: FiniteDuration) = { + private def makeRequestPath(httpRequest: HttpRequest, httpResponse: HttpResponse): InstrumentationPath = + NonEmptyList.of( + // Returns the path of the URI only, without query parameters (e.g: api/engine/workflows/metadata) + httpRequest.uri.path + .toString() + .stripPrefix("/") + // Replace UUIDs with [id] to keep paths same regardless of the workflow + .replaceAll(HttpInstrumentation.UUIDRegex, "[id]"), + // Name of the method (e.g: GET) + httpRequest.method.value, + // Status code of the Response (e.g: 200) + httpResponse.status.intValue.toString + ) + + private def sendTimingApi(statsDPath: InstrumentationPath, timing: FiniteDuration) = sendTiming(statsDPath, timing, ApiPrefix) - } def instrumentRequest: Directive0 = extractRequest flatMap { request => val timeStamp = System.currentTimeMillis mapResponse { response => /* - * Send a metric corresponding to the request response time. - * Note: The current StatsD implementation always pairs a timing metric with a counter metric - * So no need to explicitly send one + * Send a metric corresponding to the request response time. + * Note: The current StatsD implementation always pairs a timing metric with a counter metric + * So no need to explicitly send one */ sendTimingApi(makeRequestPath(request, response), (System.currentTimeMillis - timeStamp).millis) response diff --git a/engine/src/main/scala/cromwell/engine/instrumentation/IoInstrumentation.scala b/engine/src/main/scala/cromwell/engine/instrumentation/IoInstrumentation.scala index 15d799d222b..ba0c4591c91 100644 --- a/engine/src/main/scala/cromwell/engine/instrumentation/IoInstrumentation.scala +++ b/engine/src/main/scala/cromwell/engine/instrumentation/IoInstrumentation.scala @@ -27,6 +27,7 @@ private object IoInstrumentationImplicits { * Augments IoResult to provide instrumentation conversion methods */ implicit class InstrumentedIoResult(val ioResult: IoResult) extends AnyVal { + /** * Returns the instrumentation path of this IoResult */ @@ -34,6 +35,7 @@ private object IoInstrumentationImplicits { case (_: IoSuccess[_], ioCommandContext) => ioCommandContext.request.successPath case (f: IoFailAck[_], ioCommandContext) => ioCommandContext.request.failedPath(f.failure) } + /** * Returns the instrumentation path of this IoResult */ @@ -44,19 +46,22 @@ private object IoInstrumentationImplicits { * Augments IoCommand to provide instrumentation conversion methods */ implicit class InstrumentedIoCommand(val ioCommand: IoCommand[_]) extends AnyVal { + /** * Returns the instrumentation path of this IoCommand */ def toPath: InstrumentationPath = { val path = ioCommand match { - case copy: IoCopyCommand => (copy.source, copy.destination) match { - case (_: GcsPath, _) | (_, _: GcsPath) => GcsPath - case _ => LocalPath - } - case singleFileCommand: SingleFileIoCommand[_] => singleFileCommand.file match { - case _: GcsPath => GcsPath - case _ => LocalPath - } + case copy: IoCopyCommand => + (copy.source, copy.destination) match { + case (_: GcsPath, _) | (_, _: GcsPath) => GcsPath + case _ => LocalPath + } + case singleFileCommand: SingleFileIoCommand[_] => + singleFileCommand.file match { + case _: GcsPath => GcsPath + case _ => LocalPath + } case _ => UnknownFileSystemPath } @@ -71,16 +76,14 @@ private object IoInstrumentationImplicits { /** * Returns a failed instrumentation path for this IoCommand provided a throwable */ - def failedPath(failure: Throwable): InstrumentationPath = { + def failedPath(failure: Throwable): InstrumentationPath = ioCommand.toPath.concatNel(FailureKey).withStatusCodeFailure(GoogleUtil.extractStatusCode(failure)) - } /** * Returns a retried instrumentation path for this IoCommand provided a throwable */ - def retriedPath(failure: Throwable): InstrumentationPath = { + def retriedPath(failure: Throwable): InstrumentationPath = ioCommand.toPath.concatNel(RetryKey).withStatusCodeFailure(GoogleUtil.extractStatusCode(failure)) - } } } @@ -100,7 +103,9 @@ trait IoInstrumentation extends CromwellInstrumentationActor { this: Actor => */ final def instrumentIoResult(ioResult: IoResult): Unit = { incrementIo(ioResult.toCounterPath) - sendTiming(ioResult.toDurationPath, (OffsetDateTime.now.toEpochSecond - ioResult._2.creationTime.toEpochSecond).seconds) + sendTiming(ioResult.toDurationPath, + (OffsetDateTime.now.toEpochSecond - ioResult._2.creationTime.toEpochSecond).seconds + ) } final def incrementBackpressure(): Unit = incrementIo(backpressure) @@ -108,5 +113,7 @@ trait IoInstrumentation extends CromwellInstrumentationActor { this: Actor => /** * Increment an IoCommand to the proper bucket depending on the request type. */ - final def incrementIoRetry(ioCommand: IoCommand[_], failure: Throwable): Unit = incrementIo(ioCommand.retriedPath(failure)) + final def incrementIoRetry(ioCommand: IoCommand[_], failure: Throwable): Unit = incrementIo( + ioCommand.retriedPath(failure) + ) } diff --git a/engine/src/main/scala/cromwell/engine/instrumentation/JobInstrumentation.scala b/engine/src/main/scala/cromwell/engine/instrumentation/JobInstrumentation.scala index 91613a7c2cb..49e479e7b3d 100644 --- a/engine/src/main/scala/cromwell/engine/instrumentation/JobInstrumentation.scala +++ b/engine/src/main/scala/cromwell/engine/instrumentation/JobInstrumentation.scala @@ -35,25 +35,21 @@ trait JobInstrumentation extends CromwellInstrumentationActor { this: Actor => /** * Generic method to add a workflow related timing metric value */ - def setTimingJob(statsDPath: InstrumentationPath, duration: FiniteDuration): Unit = { + def setTimingJob(statsDPath: InstrumentationPath, duration: FiniteDuration): Unit = sendTiming(statsDPath, duration, JobPrefix) - } /** * Generic method to update a job related gauge metric value */ - def sendGaugeJob(statsDPath: InstrumentationPath, value: Long): Unit = { + def sendGaugeJob(statsDPath: InstrumentationPath, value: Long): Unit = sendGauge(statsDPath, value, JobPrefix) - } /** * Add a timing value for the run time of a job in a given state */ - def setJobTimePerState(response: BackendJobExecutionResponse, duration: FiniteDuration): Unit = { + def setJobTimePerState(response: BackendJobExecutionResponse, duration: FiniteDuration): Unit = setTimingJob(backendJobExecutionResponsePaths(response), duration) - } - - def recordExecutionStepTiming(state: String, duration: FiniteDuration): Unit = { + + def recordExecutionStepTiming(state: String, duration: FiniteDuration): Unit = sendTiming(jobTimingKey.concatNel("state").concatNel(state), duration, JobPrefix) - } } diff --git a/engine/src/main/scala/cromwell/engine/instrumentation/WorkflowInstrumentation.scala b/engine/src/main/scala/cromwell/engine/instrumentation/WorkflowInstrumentation.scala index ac9d28a9eb6..a9272bcf7d3 100644 --- a/engine/src/main/scala/cromwell/engine/instrumentation/WorkflowInstrumentation.scala +++ b/engine/src/main/scala/cromwell/engine/instrumentation/WorkflowInstrumentation.scala @@ -13,8 +13,9 @@ import scala.concurrent.duration.FiniteDuration import scala.language.postfixOps object WorkflowInstrumentation { - private val WorkflowStatePaths: Map[WorkflowState, InstrumentationPath] = WorkflowState.WorkflowStateValues map { state => - state -> NonEmptyList.of(state.toString) + private val WorkflowStatePaths: Map[WorkflowState, InstrumentationPath] = WorkflowState.WorkflowStateValues map { + state => + state -> NonEmptyList.of(state.toString) } toMap // Use "Queued" instead of "Submitted" as it seems to reflect better the actual state @@ -28,43 +29,39 @@ object WorkflowInstrumentation { * Provides helper methods for workflow instrumentation */ trait WorkflowInstrumentation extends CromwellInstrumentationActor { this: Actor => - private def workflowStatePath(workflowState: WorkflowState): InstrumentationPath = WorkflowInstrumentation.WorkflowStatePaths(workflowState) + private def workflowStatePath(workflowState: WorkflowState): InstrumentationPath = + WorkflowInstrumentation.WorkflowStatePaths(workflowState) /** * Generic method to increment a workflow related counter metric value */ - def incrementWorkflow(statsDPath: InstrumentationPath): Unit = { + def incrementWorkflow(statsDPath: InstrumentationPath): Unit = increment(statsDPath, WorkflowPrefix) - } /** * Generic method to add a workflow related timing metric value */ - def setTimingWorkflow(statsDPath: InstrumentationPath, duration: FiniteDuration): Unit = { + def setTimingWorkflow(statsDPath: InstrumentationPath, duration: FiniteDuration): Unit = sendTiming(statsDPath, duration, WorkflowPrefix) - } /** * Generic method to update a workflow related gauge metric value */ - def sendGaugeWorkflow(statsDPath: InstrumentationPath, value: Long): Unit = { + def sendGaugeWorkflow(statsDPath: InstrumentationPath, value: Long): Unit = sendGauge(statsDPath, value, WorkflowPrefix) - } /** * Counts every time a workflow enters a given state */ - def incrementWorkflowState(workflowState: WorkflowState): Unit = { + def incrementWorkflowState(workflowState: WorkflowState): Unit = incrementWorkflow(workflowStatePath(workflowState)) - } /** * Add a timing value for the run time of a workflow in a given state */ - //* TODO: enforce a terminal state ? - def setWorkflowTimePerState(workflowState: WorkflowState, duration: FiniteDuration): Unit = { + // * TODO: enforce a terminal state ? + def setWorkflowTimePerState(workflowState: WorkflowState, duration: FiniteDuration): Unit = setTimingWorkflow(workflowStatePath(workflowState), duration) - } /** * Set the current number of submitted workflows (queued but not running) diff --git a/engine/src/main/scala/cromwell/engine/io/IoActor.scala b/engine/src/main/scala/cromwell/engine/io/IoActor.scala index 80a362a7b78..5d69ba844a0 100644 --- a/engine/src/main/scala/cromwell/engine/io/IoActor.scala +++ b/engine/src/main/scala/cromwell/engine/io/IoActor.scala @@ -27,7 +27,6 @@ import java.time.temporal.ChronoUnit import scala.concurrent.ExecutionContext import scala.concurrent.duration._ - /** * Actor that performs IO operations asynchronously using akka streams * @@ -35,26 +34,28 @@ import scala.concurrent.duration._ * @param materializer actor materializer to run the stream * @param serviceRegistryActor actorRef for the serviceRegistryActor */ -final class IoActor(ioConfig: IoConfig, - override val serviceRegistryActor: ActorRef, - applicationName: String)(implicit val materializer: ActorMaterializer) - extends Actor with ActorLogging with StreamActorHelper[IoCommandContext[_]] with IoInstrumentation with Timers { +final class IoActor(ioConfig: IoConfig, override val serviceRegistryActor: ActorRef, applicationName: String)(implicit + val materializer: ActorMaterializer +) extends Actor + with ActorLogging + with StreamActorHelper[IoCommandContext[_]] + with IoInstrumentation + with Timers { implicit val ec: ExecutionContext = context.dispatcher implicit val system: ActorSystem = context.system // IntelliJ disapproves of mutable state in Actors, but this should be safe as long as access occurs only in // the `receive` method. Alternatively IntelliJ does suggest a `become` workaround we might try in the future. - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection private var backpressureExpiration: Option[OffsetDateTime] = None /** * Method for instrumentation to be executed when a IoCommand failed and is being retried. * Can be passed to flows so they can invoke it when necessary. */ - private def onRetry(commandContext: IoCommandContext[_])(throwable: Throwable): Unit = { + private def onRetry(commandContext: IoCommandContext[_])(throwable: Throwable): Unit = incrementIoRetry(commandContext.request, throwable) - } override def preStart(): Unit = { // On start up, let the controller know that the load is normal @@ -62,25 +63,25 @@ final class IoActor(ioConfig: IoConfig, super.preStart() } - private [io] lazy val defaultFlow = + private[io] lazy val defaultFlow = new NioFlow( parallelism = ioConfig.nio.parallelism, onRetryCallback = onRetry, onBackpressure = onBackpressure, numberOfAttempts = ioConfig.numberOfAttempts, - commandBackpressureStaleness = ioConfig.commandBackpressureStaleness) - .flow + commandBackpressureStaleness = ioConfig.commandBackpressureStaleness + ).flow .withAttributes(ActorAttributes.dispatcher(Dispatcher.IoDispatcher)) - private [io] lazy val gcsBatchFlow = + private[io] lazy val gcsBatchFlow = new ParallelGcsBatchFlow( config = ioConfig.gcsBatch, scheduler = context.system.scheduler, onRetry = onRetry, onBackpressure = onBackpressure, applicationName = applicationName, - commandBackpressureStaleness = ioConfig.commandBackpressureStaleness) - .flow + commandBackpressureStaleness = ioConfig.commandBackpressureStaleness + ).flow .withAttributes(ActorAttributes.dispatcher(Dispatcher.IoDispatcher)) private val source = Source.queue[IoCommandContext[_]](ioConfig.queueSize, OverflowStrategy.dropNew) @@ -91,10 +92,14 @@ final class IoActor(ioConfig: IoConfig, val input = builder.add(Flow[IoCommandContext[_]]) // Partitions requests between gcs batch, and single nio requests - val batchPartitioner = builder.add(Partition[IoCommandContext[_]](2, { - case _: GcsBatchCommandContext[_, _] => 0 - case _ => 1 - })) + val batchPartitioner = builder.add( + Partition[IoCommandContext[_]](2, + { + case _: GcsBatchCommandContext[_, _] => 0 + case _ => 1 + } + ) + ) // Sub flow for batched gcs requests val batches = batchPartitioner.out(0) collect { case batch: GcsBatchCommandContext[_, _] => batch } @@ -112,8 +117,8 @@ final class IoActor(ioConfig: IoConfig, val batchFlowPorts = builder.add(gcsBatchFlow) input ~> batchPartitioner - defaults.outlet ~> defaultFlowPorts ~> merger - batches.outlet ~> batchFlowPorts ~> merger + defaults.outlet ~> defaultFlowPorts ~> merger + batches.outlet ~> batchFlowPorts ~> merger FlowShape[IoCommandContext[_], IoResult](input.in, merger.out) } @@ -149,11 +154,11 @@ final class IoActor(ioConfig: IoConfig, /* GCS Batch command with context */ case (clientContext: Any, gcsBatchCommand: GcsBatchIoCommand[_, _]) => val replyTo = sender() - val commandContext = GcsBatchCommandContext( - request = gcsBatchCommand, - maxAttemptsNumber = ioConfig.numberOfAttempts, - replyTo = replyTo, - clientContext = Option(clientContext)) + val commandContext = GcsBatchCommandContext(request = gcsBatchCommand, + maxAttemptsNumber = ioConfig.numberOfAttempts, + replyTo = replyTo, + clientContext = Option(clientContext) + ) sendToStream(commandContext) /* GCS Batch command without context */ @@ -202,7 +207,10 @@ final class IoActor(ioConfig: IoConfig, } val newExpiration = OffsetDateTime.now().until(proposedExpiry, ChronoUnit.MILLIS) - timers.startSingleTimer(BackPressureTimerResetKey, BackPressureTimerResetAction, FiniteDuration(newExpiration, MILLISECONDS)) + timers.startSingleTimer(BackPressureTimerResetKey, + BackPressureTimerResetAction, + FiniteDuration(newExpiration, MILLISECONDS) + ) backpressureExpiration = Option(proposedExpiry) case _ => // Ignore proposed expiries that would be before the current expiry @@ -220,7 +228,8 @@ trait IoCommandContext[T] extends StreamContext { def request: IoCommand[T] def replyTo: ActorRef def fail(failure: Throwable): IoResult = (request.fail(failure), this) - def failReadForbidden(failure: Throwable, forbiddenPath: String): IoResult = (request.failReadForbidden(failure, forbiddenPath), this) + def failReadForbidden(failure: Throwable, forbiddenPath: String): IoResult = + (request.failReadForbidden(failure, forbiddenPath), this) def success(value: T): IoResult = (request.success(value), this) } @@ -234,7 +243,10 @@ object IoActor { /** Result type of an IoFlow, contains the original command context and the final IoAck response. */ type IoResult = (IoAck[_], IoCommandContext[_]) - case class DefaultCommandContext[T](request: IoCommand[T], replyTo: ActorRef, override val clientContext: Option[Any] = None) extends IoCommandContext[T] + case class DefaultCommandContext[T](request: IoCommand[T], + replyTo: ActorRef, + override val clientContext: Option[Any] = None + ) extends IoCommandContext[T] case object BackPressureTimerResetKey @@ -242,13 +254,10 @@ object IoActor { case class BackPressure(duration: FiniteDuration) extends ControlMessage - def props(ioConfig: IoConfig, - serviceRegistryActor: ActorRef, - applicationName: String, - ) - (implicit materializer: ActorMaterializer): Props = { + def props(ioConfig: IoConfig, serviceRegistryActor: ActorRef, applicationName: String)(implicit + materializer: ActorMaterializer + ): Props = Props(new IoActor(ioConfig, serviceRegistryActor, applicationName)).withDispatcher(IoDispatcher) - } case class IoConfig(queueSize: Int, numberOfAttempts: Int, @@ -258,7 +267,8 @@ object IoActor { ioNormalWindowMaximum: FiniteDuration, nio: NioFlowConfig, gcsBatch: GcsBatchFlowConfig, - throttle: Option[Throttle]) + throttle: Option[Throttle] + ) implicit val ioConfigReader: ValueReader[IoConfig] = (config: Config, _: String) => { diff --git a/engine/src/main/scala/cromwell/engine/io/IoActorProxy.scala b/engine/src/main/scala/cromwell/engine/io/IoActorProxy.scala index 5491440a979..c8ed10ca86b 100644 --- a/engine/src/main/scala/cromwell/engine/io/IoActorProxy.scala +++ b/engine/src/main/scala/cromwell/engine/io/IoActorProxy.scala @@ -14,7 +14,8 @@ object IoActorProxy { } class IoActorProxy(ioActor: ActorRef) extends Actor with ActorLogging with GracefulShutdownHelper { - private val cache = CacheBuilder.newBuilder() + private val cache = CacheBuilder + .newBuilder() .build[IoCommandWithPromise[_], IoResult]() private val ioPromiseProxyActor: ActorRef = context.actorOf(IoPromiseProxyActor.props(ioActor), "IoPromiseProxyActor") @@ -31,7 +32,7 @@ class IoActorProxy(ioActor: ActorRef) extends Actor with ActorLogging with Grace case ioCommand: IoCommand[_] => ioActor forward ioCommand case withContext: (Any, IoCommand[_]) @unchecked => ioActor forward withContext - case ShutdownCommand => + case ShutdownCommand => context stop ioPromiseProxyActor waitForActorsAndShutdown(NonEmptyList.one(ioActor)) } diff --git a/engine/src/main/scala/cromwell/engine/io/IoAttempts.scala b/engine/src/main/scala/cromwell/engine/io/IoAttempts.scala index d43e003ecb1..a51112029e9 100644 --- a/engine/src/main/scala/cromwell/engine/io/IoAttempts.scala +++ b/engine/src/main/scala/cromwell/engine/io/IoAttempts.scala @@ -7,18 +7,18 @@ import org.apache.commons.lang3.exception.ExceptionUtils object IoAttempts { object EnhancedCromwellIoException { - def apply[S](state: S, cause: Throwable)(implicit showState: Show[S]): EnhancedCromwellIoException = { + def apply[S](state: S, cause: Throwable)(implicit showState: Show[S]): EnhancedCromwellIoException = EnhancedCromwellIoException(s"[${showState.show(state)}] - ${ExceptionUtils.getMessage(cause)}", cause) - } } - - case class EnhancedCromwellIoException(message: String, cause: Throwable) - extends Throwable(message, cause, true, false) with CromwellFatalExceptionMarker - + + case class EnhancedCromwellIoException(message: String, cause: Throwable) + extends Throwable(message, cause, true, false) + with CromwellFatalExceptionMarker + implicit val showState = new Show[IoAttempts] { override def show(t: IoAttempts) = s"Attempted ${t.attempts} time(s)" } - + implicit val stateToThrowable = new StatefulIoError[IoAttempts] { override def toThrowable(state: IoAttempts, throwable: Throwable) = { state.throwables.foreach(throwable.addSuppressed) @@ -26,9 +26,8 @@ object IoAttempts { } } - val updateState: (Throwable, IoAttempts) => IoAttempts = (throwable, state) => { + val updateState: (Throwable, IoAttempts) => IoAttempts = (throwable, state) => state.copy(attempts = state.attempts + 1, throwables = state.throwables :+ throwable) - } } case class IoAttempts(attempts: Int, throwables: List[Throwable] = List.empty) diff --git a/engine/src/main/scala/cromwell/engine/io/IoCommandStalenessBackpressuring.scala b/engine/src/main/scala/cromwell/engine/io/IoCommandStalenessBackpressuring.scala index e8ceda43656..47591516d7b 100644 --- a/engine/src/main/scala/cromwell/engine/io/IoCommandStalenessBackpressuring.scala +++ b/engine/src/main/scala/cromwell/engine/io/IoCommandStalenessBackpressuring.scala @@ -19,17 +19,18 @@ trait IoCommandStalenessBackpressuring extends StrictLogging { val seconds = millis / 1000.0 logger.info("I/O command {} seconds stale, applying I/O subsystem backpressure with scale {}", - f"$seconds%,.3f", f"$scale%.2f") + f"$seconds%,.3f", + f"$scale%.2f" + ) onBackpressure(Option(scale)) } /** Invokes `onBackpressure` if `ioCommand` is older than the staleness limit returned by `maxStaleness`. */ - def backpressureIfStale(ioCommand: IoCommand[_], onBackpressure: Option[Double] => Unit): Unit = { + def backpressureIfStale(ioCommand: IoCommand[_], onBackpressure: Option[Double] => Unit): Unit = if (ioCommand.creation.isBefore(commandStalenessThreshold)) { logAndBackpressure(ioCommand, onBackpressure) } - } /** Invokes `onBackpressure` if at least one IoCommandContext in `contexts` is older than the * staleness limit returned by `maxStaleness`. */ diff --git a/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala b/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala index d2aa427a567..1a75bdd4c15 100644 --- a/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala +++ b/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala @@ -15,17 +15,19 @@ object RetryableRequestSupport { * The default count is `5` and may be customized with `system.io.number-of-attempts`. */ def isRetryable(failure: Throwable): Boolean = failure match { - case gcs: StorageException => gcs.isRetryable || + case gcs: StorageException => + gcs.isRetryable || isRetryable(gcs.getCause) || AdditionalRetryableHttpCodes.contains(gcs.getCode) || - Option(gcs.getMessage).exists(msg => - AdditionalRetryableErrorMessages.contains(msg.toLowerCase)) + Option(gcs.getMessage).exists(msg => AdditionalRetryableErrorMessages.contains(msg.toLowerCase)) case _: SSLException => true case _: BatchFailedException => true case _: ChecksumFailedException => true case _: SocketException => true case _: SocketTimeoutException => true - case ioE: IOException if Option(ioE.getMessage).exists(_.contains("Error getting access token for service account")) => true + case ioE: IOException + if Option(ioE.getMessage).exists(_.contains("Error getting access token for service account")) => + true case ioE: IOException => isGcs500(ioE) || isGcs503(ioE) || isGcs504(ioE) case other => // Infinitely retryable is a subset of retryable @@ -68,24 +70,21 @@ object RetryableRequestSupport { isGcsRateLimitException(failure) } - def isGcs500(failure: Throwable): Boolean = { + def isGcs500(failure: Throwable): Boolean = Option(failure.getMessage).exists(msg => msg.contains("Could not read from gs") && - msg.contains("500 Internal Server Error") + msg.contains("500 Internal Server Error") ) - } - def isGcs503(failure: Throwable): Boolean = { + def isGcs503(failure: Throwable): Boolean = Option(failure.getMessage).exists(msg => msg.contains("Could not read from gs") && - msg.contains("503 Service Unavailable") + msg.contains("503 Service Unavailable") ) - } - def isGcs504(failure: Throwable): Boolean = { + def isGcs504(failure: Throwable): Boolean = Option(failure.getMessage).exists(msg => msg.contains("Could not read from gs") && - msg.contains("504 Gateway Timeout") + msg.contains("504 Gateway Timeout") ) - } } diff --git a/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala b/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala index d41b0539dc6..27f6afe4299 100644 --- a/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala +++ b/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala @@ -34,7 +34,7 @@ object GcsBatchCommandContext { .setInitialIntervalMillis(1.second.toMillis.toInt) .setMultiplier(4) .setMaxIntervalMillis(30.seconds.toMillis.toInt) - .setRandomizationFactor(0.2D) + .setRandomizationFactor(0.2d) .setMaxElapsedTimeMillis(30.minutes.toMillis.toInt) .build() ) @@ -48,14 +48,14 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], backoff: Backoff = GcsBatchCommandContext.defaultBackoff, currentAttempt: Int = 1, promise: Promise[BatchResponse] = Promise[BatchResponse]() - ) - extends IoCommandContext[T] - with StrictLogging { +) extends IoCommandContext[T] + with StrictLogging { /** * None if no retry should be attempted, Some(timeToWaitBeforeNextAttempt) otherwise */ - lazy val retryIn: Option[FiniteDuration] = if (currentAttempt >= maxAttemptsNumber) None else Option(backoff.backoffMillis milliseconds) + lazy val retryIn: Option[FiniteDuration] = + if (currentAttempt >= maxAttemptsNumber) None else Option(backoff.backoffMillis milliseconds) /** * Json batch call back for a batched request @@ -63,19 +63,20 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], lazy val callback: JsonBatchCallback[U] = new JsonBatchCallback[U]() { // These callbacks are only called once, therefore it's imperative that they set the promise value before exiting. // This tryCallbackOrFail ensures that if the callback function itself errors, we get _some_ result back on the future. - def onSuccess(response: U, httpHeaders: HttpHeaders): Unit = tryCallbackOrFail("onSuccessCallback", onSuccessCallback(response, httpHeaders)) - def onFailure(googleJsonError: GoogleJsonError, httpHeaders: HttpHeaders): Unit = tryCallbackOrFail(callbackName = "onFailureCallback", onFailureCallback(googleJsonError, httpHeaders)) + def onSuccess(response: U, httpHeaders: HttpHeaders): Unit = + tryCallbackOrFail("onSuccessCallback", onSuccessCallback(response, httpHeaders)) + def onFailure(googleJsonError: GoogleJsonError, httpHeaders: HttpHeaders): Unit = + tryCallbackOrFail(callbackName = "onFailureCallback", onFailureCallback(googleJsonError, httpHeaders)) } def tryCallbackOrFail(callbackName: String, callback: () => Unit): Unit = { Try { callback.apply() - }.recover { - case t => - // Ideally we would catch and handle the cases which might lead us here before they actually get this far: - logger.error(s"Programmer Error: Error processing IO response in $callbackName", t) - promise.tryFailure(new Exception(s"Error processing IO response in $callbackName: ${t.getMessage}")) - () + }.recover { case t => + // Ideally we would catch and handle the cases which might lead us here before they actually get this far: + logger.error(s"Programmer Error: Error processing IO response in $callbackName", t) + promise.tryFailure(new Exception(s"Error processing IO response in $callbackName: ${t.getMessage}")) + () } () } @@ -83,16 +84,14 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], /** * Increment backoff time and attempt count */ - lazy val next: GcsBatchCommandContext[T, U] = { + lazy val next: GcsBatchCommandContext[T, U] = this.copy(backoff = backoff.next, currentAttempt = currentAttempt + 1, promise = Promise[BatchResponse]()) - } /** * Only increment backoff. To be used for failures that should be retried infinitely */ - lazy val nextTransient: GcsBatchCommandContext[T, U] = { + lazy val nextTransient: GcsBatchCommandContext[T, U] = this.copy(backoff = backoff.next, promise = Promise[BatchResponse]()) - } /** * Queue the request for batching @@ -106,7 +105,9 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], * On success callback. Transform the request response to a stream-ready response that can complete the promise */ private def onSuccessCallback(response: U, httpHeaders: HttpHeaders)(): Unit = { - request.logIOMsgOverLimit(s"GcsBatchCommandContext.onSuccessCallback '${response.toPrettyElidedString(limit = 1000)}'") + request.logIOMsgOverLimit( + s"GcsBatchCommandContext.onSuccessCallback '${response.toPrettyElidedString(limit = 1000)}'" + ) handleSuccessOrNextRequest(request.onSuccess(response, httpHeaders)) } @@ -131,7 +132,9 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], * On failure callback. Fail the promise with a StorageException */ private def onFailureCallback(googleJsonError: GoogleJsonError, httpHeaders: HttpHeaders)(): Unit = { - request.logIOMsgOverLimit(s"GcsBatchCommandContext.onFailureCallback '${googleJsonError.toPrettyElidedString(limit = 1000)}'") + request.logIOMsgOverLimit( + s"GcsBatchCommandContext.onFailureCallback '${googleJsonError.toPrettyElidedString(limit = 1000)}'" + ) if (isProjectNotProvidedError(googleJsonError)) { // Returning an Either.Right here means that the operation is not complete and that we need to do another request handleSuccessOrNextRequest(Right(request.withUserProject).validNel) diff --git a/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchFlow.scala b/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchFlow.scala index 0007b77a42f..972a52530de 100644 --- a/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchFlow.scala +++ b/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchFlow.scala @@ -35,7 +35,8 @@ object GcsBatchFlow { */ case class BatchFailedException(failure: Throwable) extends IOException(failure) - private val ReadForbiddenPattern = ".*does not have storage\\.objects\\.(?:get|list|copy) access to ([^/]+).*".r.pattern + private val ReadForbiddenPattern = + ".*does not have storage\\.objects\\.(?:get|list|copy) access to ([^/]+).*".r.pattern /* Returns `Some(bucket)` if the specified argument represents a forbidden attempt to read from `bucket`. */ private[gcs] def getReadForbiddenBucket(errorMsg: String): Option[String] = { @@ -60,8 +61,9 @@ class GcsBatchFlow(batchSize: Int, onRetry: IoCommandContext[_] => Throwable => Unit, onBackpressure: Option[Double] => Unit, applicationName: String, - backpressureStaleness: FiniteDuration) - (implicit ec: ExecutionContext) extends IoCommandStalenessBackpressuring { + backpressureStaleness: FiniteDuration +)(implicit ec: ExecutionContext) + extends IoCommandStalenessBackpressuring { // Does not carry any authentication, assumes all underlying requests are properly authenticated private val httpRequestInitializer = new HttpRequestInitializer { @@ -113,9 +115,9 @@ class GcsBatchFlow(batchSize: Int, val flow: Graph[ FlowShape[ GcsBatchCommandContext[_, _], - (IoAck[_], IoCommandContext[_]), + (IoAck[_], IoCommandContext[_]) ], - NotUsed, + NotUsed ] = GraphDSL.create() { implicit builder => import GraphDSL.Implicits._ @@ -129,21 +131,22 @@ class GcsBatchFlow(batchSize: Int, val batchProcessor = builder.add( Flow[GcsBatchCommandContext[_, _]] // Group commands together in batches so they can be processed as such - .groupedWithin(batchSize, batchTimespan) + .groupedWithin(batchSize, batchTimespan) // execute the batch and outputs each sub-response individually, as a Future - .mapConcat[Future[GcsBatchResponse[_]]](executeBatch) + .mapConcat[Future[GcsBatchResponse[_]]](executeBatch) // Wait for each Future to complete - .mapAsyncUnordered[GcsBatchResponse[_]](batchSize) { identity } + .mapAsyncUnordered[GcsBatchResponse[_]](batchSize)(identity) ) // Partitions the responses: Terminal responses exit the flow, others go back to the sourceMerger val responseHandler = builder.add(responseHandlerFlow) // Buffer commands to be retried to avoid backpressuring too rapidly - val nextRequestBuffer = builder.add(Flow[GcsBatchCommandContext[_, _]].buffer(batchSize, OverflowStrategy.backpressure)) + val nextRequestBuffer = + builder.add(Flow[GcsBatchCommandContext[_, _]].buffer(batchSize, OverflowStrategy.backpressure)) source ~> sourceMerger ~> batchProcessor ~> responseHandler.in - sourceMerger.preferred <~ nextRequestBuffer <~ responseHandler.out1 + sourceMerger.preferred <~ nextRequestBuffer <~ responseHandler.out1 FlowShape[GcsBatchCommandContext[_, _], IoResult](source.in, responseHandler.out0) } @@ -156,10 +159,14 @@ class GcsBatchFlow(batchSize: Int, private lazy val responseHandlerFlow = GraphDSL.create() { implicit builder => import GraphDSL.Implicits._ - val source = builder.add(Partition[GcsBatchResponse[_]](2, { - case _: GcsBatchTerminal[_] => 0 - case _ => 1 - })) + val source = builder.add( + Partition[GcsBatchResponse[_]](2, + { + case _: GcsBatchTerminal[_] => 0 + case _ => 1 + } + ) + ) // Terminal responses: output of this flow val terminals = source.out(0) collect { case terminal: GcsBatchTerminal[_] => terminal.ioResult } @@ -170,7 +177,10 @@ class GcsBatchFlow(batchSize: Int, case nextRequest: GcsBatchNextRequest[_] => nextRequest.context } - new FanOutShape2[GcsBatchResponse[_], IoResult, GcsBatchCommandContext[_, _]](source.in, terminals.outlet, nextRequest.outlet) + new FanOutShape2[GcsBatchResponse[_], IoResult, GcsBatchCommandContext[_, _]](source.in, + terminals.outlet, + nextRequest.outlet + ) } private def executeBatch(contexts: Seq[GcsBatchCommandContext[_, _]]): List[Future[GcsBatchResponse[_]]] = { @@ -193,12 +203,16 @@ class GcsBatchFlow(batchSize: Int, // Otherwise fail with the original exception Try(batchRequest.execute()) match { case Failure(failure: IOException) => - logger.info(s"Failed to execute GCS Batch request. Failed request belonged to batch of size ${batchRequest.size()} containing commands: " + - s"${batchCommandNamesList.mkString("\n")}.\n${failure.toPrettyElidedString(limit = 1000)}") + logger.info( + s"Failed to execute GCS Batch request. Failed request belonged to batch of size ${batchRequest.size()} containing commands: " + + s"${batchCommandNamesList.mkString("\n")}.\n${failure.toPrettyElidedString(limit = 1000)}" + ) failAllPromisesWith(BatchFailedException(failure)) case Failure(failure) => - logger.info(s"Failed to execute GCS Batch request. Failed request belonged to batch of size ${batchRequest.size()} containing commands: " + - s"${batchCommandNamesList.mkString("\n")}.\n${failure.toPrettyElidedString(limit = 1000)}") + logger.info( + s"Failed to execute GCS Batch request. Failed request belonged to batch of size ${batchRequest.size()} containing commands: " + + s"${batchCommandNamesList.mkString("\n")}.\n${failure.toPrettyElidedString(limit = 1000)}" + ) failAllPromisesWith(failure) case _ => } @@ -219,7 +233,9 @@ class GcsBatchFlow(batchSize: Int, * Otherwise create a GcsBatchTerminal response with the IoFailure * In both cases, returns a successful Future to avoid failing the stream or dropping elements */ - private def recoverCommand(context: GcsBatchCommandContext[_, _]): PartialFunction[Throwable, Future[GcsBatchResponse[_]]] = { + private def recoverCommand( + context: GcsBatchCommandContext[_, _] + ): PartialFunction[Throwable, Future[GcsBatchResponse[_]]] = { // If the failure is retryable - recover with a GcsBatchRetry so it can be retried in the next batch case failure if isRetryable(failure) => context.retryIn match { @@ -246,22 +262,22 @@ class GcsBatchFlow(batchSize: Int, /** * Fail a command context with a failure. */ - private def fail(context: GcsBatchCommandContext[_, _], failure: Throwable) = { + private def fail(context: GcsBatchCommandContext[_, _], failure: Throwable) = Future.successful( GcsBatchTerminal( context.fail(EnhancedCromwellIoException(IoAttempts(context.currentAttempt), failure)) ) ) - } /** * Fail a command context with a forbidden failure. */ - private def failReadForbidden(context: GcsBatchCommandContext[_, _], failure: Throwable, forbiddenPath: String) = { + private def failReadForbidden(context: GcsBatchCommandContext[_, _], failure: Throwable, forbiddenPath: String) = Future.successful( GcsBatchTerminal( - context.failReadForbidden(EnhancedCromwellIoException(IoAttempts(context.currentAttempt), failure), forbiddenPath) + context.failReadForbidden(EnhancedCromwellIoException(IoAttempts(context.currentAttempt), failure), + forbiddenPath + ) ) ) - } } diff --git a/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala b/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala index 937e2b66063..b8a4cb4a873 100644 --- a/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala +++ b/engine/src/main/scala/cromwell/engine/io/gcs/GcsResponse.scala @@ -6,7 +6,8 @@ import cromwell.engine.io.IoActor._ * ADT used only inside the batch stream * @tparam T final type of the result of the Command */ -private [gcs] sealed trait GcsBatchResponse[T] -private [gcs] case class GcsBatchTerminal[T](ioResult: IoResult) extends GcsBatchResponse[T] -private [gcs] case class GcsBatchRetry[T](context: GcsBatchCommandContext[T, _], failure: Throwable) extends GcsBatchResponse[T] -private [gcs] case class GcsBatchNextRequest[T](context: GcsBatchCommandContext[T, _]) extends GcsBatchResponse[T] +sealed private[gcs] trait GcsBatchResponse[T] +private[gcs] case class GcsBatchTerminal[T](ioResult: IoResult) extends GcsBatchResponse[T] +private[gcs] case class GcsBatchRetry[T](context: GcsBatchCommandContext[T, _], failure: Throwable) + extends GcsBatchResponse[T] +private[gcs] case class GcsBatchNextRequest[T](context: GcsBatchCommandContext[T, _]) extends GcsBatchResponse[T] diff --git a/engine/src/main/scala/cromwell/engine/io/gcs/ParallelGcsBatchFlow.scala b/engine/src/main/scala/cromwell/engine/io/gcs/ParallelGcsBatchFlow.scala index 3ef3548bdec..9fc5c5fc7ea 100644 --- a/engine/src/main/scala/cromwell/engine/io/gcs/ParallelGcsBatchFlow.scala +++ b/engine/src/main/scala/cromwell/engine/io/gcs/ParallelGcsBatchFlow.scala @@ -18,10 +18,10 @@ class ParallelGcsBatchFlow(config: GcsBatchFlowConfig, onRetry: IoCommandContext[_] => Throwable => Unit, onBackpressure: Option[Double] => Unit, applicationName: String, - commandBackpressureStaleness: FiniteDuration) - (implicit ec: ExecutionContext) { + commandBackpressureStaleness: FiniteDuration +)(implicit ec: ExecutionContext) { - //noinspection TypeAnnotation + // noinspection TypeAnnotation val flow = GraphDSL.create() { implicit builder => import GraphDSL.Implicits._ val balancer = builder.add(Balance[GcsBatchCommandContext[_, _]](config.parallelism, waitForAllDownstreams = false)) @@ -35,7 +35,8 @@ class ParallelGcsBatchFlow(config: GcsBatchFlowConfig, onRetry = onRetry, onBackpressure = onBackpressure, applicationName = applicationName, - backpressureStaleness = commandBackpressureStaleness).flow + backpressureStaleness = commandBackpressureStaleness + ).flow // for each worker, add an edge from the balancer to the worker, then wire // it to the merge element balancer ~> workerFlow.async ~> merge diff --git a/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala b/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala index 69e5551b8a9..b1fe8b05462 100644 --- a/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala +++ b/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala @@ -27,7 +27,6 @@ import java.nio.charset.StandardCharsets import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration - /** * Flow that executes IO operations by calling java.nio.Path methods */ @@ -36,7 +35,8 @@ class NioFlow(parallelism: Int, onBackpressure: Option[Double] => Unit, numberOfAttempts: Int, commandBackpressureStaleness: FiniteDuration - )(implicit system: ActorSystem) extends IoCommandStalenessBackpressuring { +)(implicit system: ActorSystem) + extends IoCommandStalenessBackpressuring { implicit private val ec: ExecutionContext = system.dispatcher implicit private val timer: Timer[IO] = IO.timer(ec) @@ -66,18 +66,19 @@ class NioFlow(parallelism: Int, result <- operationResult } yield (result, commandContext) - io handleErrorWith { - failure => IO.pure(commandContext.fail(failure)) + io handleErrorWith { failure => + IO.pure(commandContext.fail(failure)) } } - private [nio] def handleSingleCommand(ioSingleCommand: IoCommand[_]): IO[IoSuccess[_]] = { + private[nio] def handleSingleCommand(ioSingleCommand: IoCommand[_]): IO[IoSuccess[_]] = { val ret = ioSingleCommand match { case copyCommand: IoCopyCommand => copy(copyCommand) map copyCommand.success case writeCommand: IoWriteCommand => write(writeCommand) map writeCommand.success case deleteCommand: IoDeleteCommand => delete(deleteCommand) map deleteCommand.success case sizeCommand: IoSizeCommand => size(sizeCommand) map sizeCommand.success - case readAsStringCommand: IoContentAsStringCommand => readAsString(readAsStringCommand) map readAsStringCommand.success + case readAsStringCommand: IoContentAsStringCommand => + readAsString(readAsStringCommand) map readAsStringCommand.success case hashCommand: IoHashCommand => hash(hashCommand) map hashCommand.success case touchCommand: IoTouchCommand => touch(touchCommand) map touchCommand.success case existsCommand: IoExistsCommand => exists(existsCommand) map existsCommand.success @@ -131,7 +132,7 @@ class NioFlow(parallelism: Int, ) } - def readFileAndChecksum: IO[String] = { + def readFileAndChecksum: IO[String] = for { fileHash <- getStoredHash(command.file) uncheckedValue <- readFile @@ -145,23 +146,25 @@ class NioFlow(parallelism: Int, verifiedValue <- checksumResult match { case _: ChecksumSkipped => IO.pure(uncheckedValue) case _: ChecksumSuccess => IO.pure(uncheckedValue) - case failure: ChecksumFailure => IO.raiseError( - ChecksumFailedException( - fileHash match { - case Some(hash) => s"Failed checksum for '${command.file}'. Expected '${hash.hashType}' hash of '${hash.hash}'. Calculated hash '${failure.calculatedHash}'" - case None => s"Failed checksum for '${command.file}'. Couldn't find stored file hash." // This should never happen - } + case failure: ChecksumFailure => + IO.raiseError( + ChecksumFailedException( + fileHash match { + case Some(hash) => + s"Failed checksum for '${command.file}'. Expected '${hash.hashType}' hash of '${hash.hash}'. Calculated hash '${failure.calculatedHash}'" + case None => + s"Failed checksum for '${command.file}'. Couldn't find stored file hash." // This should never happen + } + ) ) - ) } } yield verifiedValue - } val fileContentIo = command.file match { - case _: DrsPath => readFileAndChecksum + case _: DrsPath => readFileAndChecksum // Temporarily disable since our hashing algorithm doesn't match the stored hash // https://broadworkbench.atlassian.net/browse/WX-1257 - case _: BlobPath => readFile//readFileAndChecksum + case _: BlobPath => readFile // readFileAndChecksum case _ => readFile } fileContentIo.map(_.replaceAll("\\r\\n", "\\\n")) @@ -173,30 +176,32 @@ class NioFlow(parallelism: Int, case nioPath => IO(nioPath.size) } - private def hash(hash: IoHashCommand): IO[String] = { + private def hash(hash: IoHashCommand): IO[String] = // If there is no hash accessible from the file storage system, // we'll read the file and generate the hash ourselves. - getStoredHash(hash.file).flatMap { - case Some(storedHash) => IO.pure(storedHash) - case None => generateMd5FileHashForPath(hash.file) - }.map(_.hash) - } + getStoredHash(hash.file) + .flatMap { + case Some(storedHash) => IO.pure(storedHash) + case None => generateMd5FileHashForPath(hash.file) + } + .map(_.hash) - private def getStoredHash(file: Path): IO[Option[FileHash]] = { + private def getStoredHash(file: Path): IO[Option[FileHash]] = file match { case gcsPath: GcsPath => getFileHashForGcsPath(gcsPath).map(Option(_)) case blobPath: BlobPath => getFileHashForBlobPath(blobPath) - case drsPath: DrsPath => IO { - // We assume all DRS files have a stored hash; this will throw - // if the file does not. - drsPath.getFileHash - }.map(Option(_)) - case s3Path: S3Path => IO { - Option(FileHash(HashType.S3Etag, s3Path.eTag)) - } + case drsPath: DrsPath => + IO { + // We assume all DRS files have a stored hash; this will throw + // if the file does not. + drsPath.getFileHash + }.map(Option(_)) + case s3Path: S3Path => + IO { + Option(FileHash(HashType.S3Etag, s3Path.eTag)) + } case _ => IO.pure(None) } - } private def touch(touch: IoTouchCommand) = IO { touch.file.touch() @@ -224,7 +229,7 @@ class NioFlow(parallelism: Int, * IMPORTANT: Use this instead of IO.fromTry to make sure the Try will be reevaluated if the * IoCommand is retried. */ - private def delayedIoFromTry[A](t: => Try[A]): IO[A] = IO[A] { t.get } + private def delayedIoFromTry[A](t: => Try[A]): IO[A] = IO[A](t.get) private def getFileHashForGcsPath(gcsPath: GcsPath): IO[FileHash] = delayedIoFromTry { gcsPath.objectBlobId.map(id => FileHash(HashType.GcsCrc32c, gcsPath.cloudStorage.get(id).getCrc32c)) diff --git a/engine/src/main/scala/cromwell/engine/package.scala b/engine/src/main/scala/cromwell/engine/package.scala index f4083286612..1bd7a13a244 100644 --- a/engine/src/main/scala/cromwell/engine/package.scala +++ b/engine/src/main/scala/cromwell/engine/package.scala @@ -14,8 +14,8 @@ package object engine { } implicit class EnhancedCallOutputMap[A](val m: Map[A, JobOutput]) extends AnyVal { - def mapToValues: Map[A, WomValue] = m map { - case (k, JobOutput(womValue)) => (k, womValue) + def mapToValues: Map[A, WomValue] = m map { case (k, JobOutput(womValue)) => + (k, womValue) } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala b/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala index 91be71fbf35..8b72d7eaaba 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/SingleWorkflowRunnerActor.scala @@ -20,8 +20,13 @@ import cromwell.engine.workflow.workflowstore.WorkflowStoreActor.SubmitWorkflow import cromwell.engine.workflow.workflowstore.{InMemoryWorkflowStore, WorkflowStoreSubmitActor} import cromwell.jobstore.EmptyJobStoreActor import cromwell.server.CromwellRootActor -import cromwell.services.{SuccessfulMetadataJsonResponse, FailedMetadataJsonResponse} -import cromwell.services.metadata.MetadataService.{GetSingleWorkflowMetadataAction, GetStatus, ListenToMetadataWriteActor, WorkflowOutputs} +import cromwell.services.{FailedMetadataJsonResponse, SuccessfulMetadataJsonResponse} +import cromwell.services.metadata.MetadataService.{ + GetSingleWorkflowMetadataAction, + GetStatus, + ListenToMetadataWriteActor, + WorkflowOutputs +} import cromwell.subworkflowstore.EmptySubWorkflowStoreActor import spray.json._ @@ -40,8 +45,8 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean, config: Config - )(implicit materializer: ActorMaterializer) - extends CromwellRootActor(terminator, gracefulShutdown, abortJobsOnTerminate, false, config) +)(implicit materializer: ActorMaterializer) + extends CromwellRootActor(terminator, gracefulShutdown, abortJobsOnTerminate, false, config) with LoggingFSM[RunnerState, SwraData] { import SingleWorkflowRunnerActor._ @@ -54,14 +59,13 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, log.info("{}: Version {}", Tag, VersionUtil.getVersion("cromwell-engine")) startWith(NotStarted, EmptySwraData) - when (NotStarted) { - case Event(RunWorkflow, EmptySwraData) => - log.info(s"$Tag: Submitting workflow") - workflowStoreActor ! SubmitWorkflow(source) - goto(SubmittedWorkflow) using SubmittedSwraData(sender()) + when(NotStarted) { case Event(RunWorkflow, EmptySwraData) => + log.info(s"$Tag: Submitting workflow") + workflowStoreActor ! SubmitWorkflow(source) + goto(SubmittedWorkflow) using SubmittedSwraData(sender()) } - when (SubmittedWorkflow) { + when(SubmittedWorkflow) { case Event(WorkflowStoreSubmitActor.WorkflowSubmittedToStore(id, WorkflowSubmitted), SubmittedSwraData(replyTo)) => log.info(s"$Tag: Workflow submitted UUID($id)") // Since we only have a single workflow, force the WorkflowManagerActor's hand in case the polling rate is long @@ -72,51 +76,57 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, goto(RunningWorkflow) using RunningSwraData(replyTo, id) } - when (RunningWorkflow) { + when(RunningWorkflow) { case Event(IssuePollRequest, RunningSwraData(_, id)) => requestStatus(id) stay() - case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(_, _)) if !jsObject.state.isTerminal => + case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(_, _)) + if !jsObject.state.isTerminal => schedulePollRequest() stay() - case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowSucceeded => + case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(replyTo, id)) + if jsObject.state == WorkflowSucceeded => log.info(s"$Tag workflow finished with status '$WorkflowSucceeded'.") serviceRegistryActor ! ListenToMetadataWriteActor goto(WaitingForFlushedMetadata) using SucceededSwraData(replyTo, id) - case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowFailed => + case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(replyTo, id)) + if jsObject.state == WorkflowFailed => log.info(s"$Tag workflow finished with status '$WorkflowFailed'.") serviceRegistryActor ! ListenToMetadataWriteActor - goto(WaitingForFlushedMetadata) using FailedSwraData(replyTo, id, new RuntimeException(s"Workflow $id transitioned to state $WorkflowFailed")) - case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(replyTo, id)) if jsObject.state == WorkflowAborted => + goto(WaitingForFlushedMetadata) using FailedSwraData( + replyTo, + id, + new RuntimeException(s"Workflow $id transitioned to state $WorkflowFailed") + ) + case Event(SuccessfulMetadataJsonResponse(_, jsObject: JsObject), RunningSwraData(replyTo, id)) + if jsObject.state == WorkflowAborted => log.info(s"$Tag workflow finished with status '$WorkflowAborted'.") serviceRegistryActor ! ListenToMetadataWriteActor goto(WaitingForFlushedMetadata) using AbortedSwraData(replyTo, id) } - - when (WaitingForFlushedMetadata) { + + when(WaitingForFlushedMetadata) { case Event(QueueWeight(weight), _) if weight > 0 => stay() case Event(QueueWeight(_), data: SucceededSwraData) => - serviceRegistryActor ! WorkflowOutputs(data.id) goto(RequestingOutputs) - case Event(QueueWeight(_), data : TerminalSwraData) => + case Event(QueueWeight(_), data: TerminalSwraData) => requestMetadataOrIssueReply(data) } - when (RequestingOutputs) { - case Event(SuccessfulMetadataJsonResponse(_, outputs: JsObject), data: TerminalSwraData) => - outputOutputs(outputs) - requestMetadataOrIssueReply(data) + when(RequestingOutputs) { case Event(SuccessfulMetadataJsonResponse(_, outputs: JsObject), data: TerminalSwraData) => + outputOutputs(outputs) + requestMetadataOrIssueReply(data) } - when (RequestingMetadata) { + when(RequestingMetadata) { case Event(SuccessfulMetadataJsonResponse(_, metadata: JsObject), data: TerminalSwraData) => outputMetadata(metadata) issueReply(data) } - onTransition { - case NotStarted -> RunningWorkflow => schedulePollRequest() + onTransition { case NotStarted -> RunningWorkflow => + schedulePollRequest() } whenUnhandled { @@ -137,11 +147,12 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, stay() } - private def requestMetadataOrIssueReply(newData: TerminalSwraData) = if (metadataOutputPath.isDefined) requestMetadata(newData) else issueReply(newData) - + private def requestMetadataOrIssueReply(newData: TerminalSwraData) = + if (metadataOutputPath.isDefined) requestMetadata(newData) else issueReply(newData) + private def requestMetadata(newData: TerminalSwraData): State = { serviceRegistryActor ! GetSingleWorkflowMetadataAction(newData.id, None, None, expandSubWorkflows = true) - goto (RequestingMetadata) using newData + goto(RequestingMetadata) using newData } private def schedulePollRequest(): Unit = { @@ -150,12 +161,11 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, () } - private def requestStatus(id: WorkflowId): Unit = { + private def requestStatus(id: WorkflowId): Unit = // This requests status via the metadata service rather than instituting an FSM watch on the underlying workflow actor. // Cromwell's eventual consistency means it isn't safe to use an FSM transition to a terminal state as the signal for // when outputs or metadata have stabilized. serviceRegistryActor ! GetStatus(id) - } private def issueSuccessReply(replyTo: ActorRef): State = { replyTo.tell(msg = (), sender = self) // Because replyTo ! () is the parameterless call replyTo.!() @@ -168,17 +178,16 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, done() stay() } - - private [workflow] def done() = {} - private def issueReply(data: TerminalSwraData) = { + private[workflow] def done() = {} + + private def issueReply(data: TerminalSwraData) = data match { case s: SucceededSwraData => issueSuccessReply(s.replyTo) case f: FailedSwraData => issueFailureReply(f.replyTo, f.failure) case a: AbortedSwraData => issueSuccessReply(a.replyTo) } - } private def failAndFinish(e: Throwable, data: SwraData): State = { log.error(e, s"$Tag received Failure message: ${e.getMessage}") @@ -199,11 +208,10 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, /** * Outputs the outputs to stdout, and then requests the metadata. */ - private def outputOutputs(outputs: JsObject): Unit = { + private def outputOutputs(outputs: JsObject): Unit = println(outputs.prettyPrint) - } - private def outputMetadata(metadata: JsObject): Try[Unit] = { + private def outputMetadata(metadata: JsObject): Try[Unit] = Try { val path = metadataOutputPath.get if (path.isDirectory) { @@ -213,7 +221,6 @@ class SingleWorkflowRunnerActor(source: WorkflowSourceFilesCollection, path.createIfNotExists(createParents = true).write(metadata.prettyPrint) } } void - } } object SingleWorkflowRunnerActor { @@ -222,8 +229,8 @@ object SingleWorkflowRunnerActor { terminator: CromwellTerminator, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean, - config: Config) - (implicit materializer: ActorMaterializer): Props = { + config: Config + )(implicit materializer: ActorMaterializer): Props = Props( new SingleWorkflowRunnerActor( source = source, @@ -234,7 +241,6 @@ object SingleWorkflowRunnerActor { config = config ) ).withDispatcher(EngineDispatcher) - } sealed trait RunnerMessage // The message to actually run the workflow is made explicit so the non-actor Main can `ask` this actor to do the @@ -255,16 +261,20 @@ object SingleWorkflowRunnerActor { final case class SubmittedSwraData(replyTo: ActorRef) extends SwraData final case class RunningSwraData(replyTo: ActorRef, id: WorkflowId) extends SwraData - sealed trait TerminalSwraData extends SwraData { def replyTo: ActorRef; def terminalState: WorkflowState; def id: WorkflowId } - final case class SucceededSwraData(replyTo: ActorRef, - id: WorkflowId) extends TerminalSwraData { override val terminalState = WorkflowSucceeded } + sealed trait TerminalSwraData extends SwraData { + def replyTo: ActorRef; def terminalState: WorkflowState; def id: WorkflowId + } + final case class SucceededSwraData(replyTo: ActorRef, id: WorkflowId) extends TerminalSwraData { + override val terminalState = WorkflowSucceeded + } - final case class FailedSwraData(replyTo: ActorRef, - id: WorkflowId, - failure: Throwable) extends TerminalSwraData { override val terminalState = WorkflowFailed } + final case class FailedSwraData(replyTo: ActorRef, id: WorkflowId, failure: Throwable) extends TerminalSwraData { + override val terminalState = WorkflowFailed + } - final case class AbortedSwraData(replyTo: ActorRef, - id: WorkflowId) extends TerminalSwraData { override val terminalState = WorkflowAborted } + final case class AbortedSwraData(replyTo: ActorRef, id: WorkflowId) extends TerminalSwraData { + override val terminalState = WorkflowAborted + } implicit class EnhancedJsObject(val jsObject: JsObject) extends AnyVal { def state: WorkflowState = WorkflowState.withName(jsObject.fields("status").asInstanceOf[JsString].value) diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala index 3061e2e8a74..8bf936fa88b 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala @@ -19,19 +19,49 @@ import cromwell.engine.workflow.WorkflowActor._ import cromwell.engine.workflow.WorkflowManagerActor.WorkflowActorWorkComplete import cromwell.engine.workflow.lifecycle._ import cromwell.engine.workflow.lifecycle.deletion.DeleteWorkflowFilesActor -import cromwell.engine.workflow.lifecycle.deletion.DeleteWorkflowFilesActor.{DeleteWorkflowFilesFailedResponse, DeleteWorkflowFilesSucceededResponse, StartWorkflowFilesDeletion} +import cromwell.engine.workflow.lifecycle.deletion.DeleteWorkflowFilesActor.{ + DeleteWorkflowFilesFailedResponse, + DeleteWorkflowFilesSucceededResponse, + StartWorkflowFilesDeletion +} import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor._ import cromwell.engine.workflow.lifecycle.finalization.WorkflowCallbackActor.PerformCallbackCommand -import cromwell.engine.workflow.lifecycle.finalization.WorkflowFinalizationActor.{StartFinalizationCommand, WorkflowFinalizationFailedResponse, WorkflowFinalizationSucceededResponse} -import cromwell.engine.workflow.lifecycle.finalization.{CopyWorkflowLogsActor, CopyWorkflowOutputsActor, WorkflowFinalizationActor} +import cromwell.engine.workflow.lifecycle.finalization.WorkflowFinalizationActor.{ + StartFinalizationCommand, + WorkflowFinalizationFailedResponse, + WorkflowFinalizationSucceededResponse +} +import cromwell.engine.workflow.lifecycle.finalization.{ + CopyWorkflowLogsActor, + CopyWorkflowOutputsActor, + WorkflowFinalizationActor +} import cromwell.engine.workflow.lifecycle.initialization.WorkflowInitializationActor -import cromwell.engine.workflow.lifecycle.initialization.WorkflowInitializationActor.{StartInitializationCommand, WorkflowInitializationFailedResponse, WorkflowInitializationResponse, WorkflowInitializationSucceededResponse} +import cromwell.engine.workflow.lifecycle.initialization.WorkflowInitializationActor.{ + StartInitializationCommand, + WorkflowInitializationFailedResponse, + WorkflowInitializationResponse, + WorkflowInitializationSucceededResponse +} import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor -import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.{MaterializeWorkflowDescriptorCommand, MaterializeWorkflowDescriptorFailureResponse, MaterializeWorkflowDescriptorSuccessResponse} +import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.{ + MaterializeWorkflowDescriptorCommand, + MaterializeWorkflowDescriptorFailureResponse, + MaterializeWorkflowDescriptorSuccessResponse +} import cromwell.engine.workflow.workflowstore.WorkflowStoreActor.WorkflowStoreWriteHeartbeatCommand -import cromwell.engine.workflow.workflowstore.{RestartableAborting, StartableState, WorkflowHeartbeatConfig, WorkflowToStart} -import cromwell.services.metadata.MetadataService.{MetadataWriteFailure, MetadataWriteSuccess, PutMetadataActionAndRespond} +import cromwell.engine.workflow.workflowstore.{ + RestartableAborting, + StartableState, + WorkflowHeartbeatConfig, + WorkflowToStart +} +import cromwell.services.metadata.MetadataService.{ + MetadataWriteFailure, + MetadataWriteSuccess, + PutMetadataActionAndRespond +} import cromwell.subworkflowstore.SubWorkflowStoreActor.WorkflowComplete import cromwell.webservice.EngineStatsActor import org.apache.commons.lang3.exception.ExceptionUtils @@ -152,14 +182,16 @@ object WorkflowActor { workflowFinalOutputs: Option[CallOutputs] = None, workflowAllOutputs: Set[WomValue] = Set.empty, rootAndSubworkflowIds: Set[WorkflowId] = Set.empty, - failedInitializationAttempts: Int = 0) + failedInitializationAttempts: Int = 0 + ) object WorkflowActorData { def apply(startableState: StartableState): WorkflowActorData = WorkflowActorData( currentLifecycleStateActor = None, workflowDescriptor = None, initializationData = AllBackendInitializationData.empty, lastStateReached = StateCheckpoint(WorkflowUnstartedState), - effectiveStartableState = startableState) + effectiveStartableState = startableState + ) } /** @@ -190,7 +222,8 @@ object WorkflowActor { workflowHeartbeatConfig: WorkflowHeartbeatConfig, totalJobsByRootWf: AtomicInteger, fileHashCacheActorProps: Option[Props], - blacklistCache: Option[BlacklistCache]): Props = { + blacklistCache: Option[BlacklistCache] + ): Props = Props( new WorkflowActor( workflowToStart = workflowToStart, @@ -214,8 +247,9 @@ object WorkflowActor { workflowHeartbeatConfig = workflowHeartbeatConfig, totalJobsByRootWf = totalJobsByRootWf, fileHashCacheActorProps = fileHashCacheActorProps, - blacklistCache = blacklistCache)).withDispatcher(EngineDispatcher) - } + blacklistCache = blacklistCache + ) + ).withDispatcher(EngineDispatcher) } /** @@ -224,7 +258,7 @@ object WorkflowActor { class WorkflowActor(workflowToStart: WorkflowToStart, conf: Config, callCachingEnabled: Boolean, - invalidateBadCacheResults:Boolean, + invalidateBadCacheResults: Boolean, ioActor: ActorRef, override val serviceRegistryActor: ActorRef, workflowLogCopyRouter: ActorRef, @@ -245,9 +279,12 @@ class WorkflowActor(workflowToStart: WorkflowToStart, // child of this actor. The sbt subproject of `RootWorkflowFileHashCacheActor` is not visible from // the subproject this class belongs to so the `Props` are passed in. fileHashCacheActorProps: Option[Props], - blacklistCache: Option[BlacklistCache]) - extends LoggingFSM[WorkflowActorState, WorkflowActorData] with WorkflowLogging with WorkflowMetadataHelper - with WorkflowInstrumentation with Timers { + blacklistCache: Option[BlacklistCache] +) extends LoggingFSM[WorkflowActorState, WorkflowActorData] + with WorkflowLogging + with WorkflowMetadataHelper + with WorkflowInstrumentation + with Timers { implicit val ec = context.dispatcher private val WorkflowToStart(workflowId, submissionTime, sources, initialStartableState, hogGroup) = workflowToStart @@ -261,7 +298,9 @@ class WorkflowActor(workflowToStart: WorkflowToStart, private val deleteWorkflowFiles = conf.getBoolean("system.delete-workflow-files") private val workflowDockerLookupActor = context.actorOf( - WorkflowDockerLookupActor.props(workflowId, dockerHashActor, initialStartableState.restarted), s"WorkflowDockerLookupActor-$workflowId") + WorkflowDockerLookupActor.props(workflowId, dockerHashActor, initialStartableState.restarted), + s"WorkflowDockerLookupActor-$workflowId" + ) protected val pathBuilderFactories: List[PathBuilderFactory] = EngineFilesystems.configuredPathBuilderFactories @@ -297,8 +336,15 @@ class WorkflowActor(workflowToStart: WorkflowToStart, when(WorkflowUnstartedState) { case Event(StartWorkflowCommand, _) => - val actor = context.actorOf(MaterializeWorkflowDescriptorActor.props(serviceRegistryActor, workflowId, importLocalFilesystem = !serverMode, ioActorProxy = ioActor, hogGroup = hogGroup), - "MaterializeWorkflowDescriptorActor") + val actor = context.actorOf( + MaterializeWorkflowDescriptorActor.props(serviceRegistryActor, + workflowId, + importLocalFilesystem = !serverMode, + ioActorProxy = ioActor, + hogGroup = hogGroup + ), + "MaterializeWorkflowDescriptorActor" + ) pushWorkflowStart(workflowId) actor ! MaterializeWorkflowDescriptorCommand(sources, conf, callCachingEnabled, invalidateBadCacheResults) goto(MaterializingWorkflowDescriptorState) using stateData.copy(currentLifecycleStateActor = Option(actor)) @@ -315,7 +361,9 @@ class WorkflowActor(workflowToStart: WorkflowToStart, self ! StartInitializing goto(InitializingWorkflowState) using data.copy(workflowDescriptor = Option(workflowDescriptor)) case Event(MaterializeWorkflowDescriptorFailureResponse(reason: Throwable), data) => - goto(WorkflowFailedState) using data.copy(lastStateReached = StateCheckpoint(MaterializingWorkflowDescriptorState, Option(List(reason)))) + goto(WorkflowFailedState) using data.copy(lastStateReached = + StateCheckpoint(MaterializingWorkflowDescriptorState, Option(List(reason))) + ) // If the workflow is not being restarted then we can abort it immediately as nothing happened yet case Event(AbortWorkflowCommand, _) if !restarting => goto(WorkflowAbortedState) } @@ -323,25 +371,31 @@ class WorkflowActor(workflowToStart: WorkflowToStart, /* ************************** */ /* ****** Initializing ****** */ /* ************************** */ - protected def createInitializationActor(workflowDescriptor: EngineWorkflowDescriptor, name: String): ActorRef = { - context.actorOf( - WorkflowInitializationActor.props( - workflowIdForLogging, - rootWorkflowIdForLogging, - workflowDescriptor, - ioActor, - serviceRegistryActor, - restarting - ), - name) - } + protected def createInitializationActor(workflowDescriptor: EngineWorkflowDescriptor, name: String): ActorRef = + context.actorOf(WorkflowInitializationActor.props( + workflowIdForLogging, + rootWorkflowIdForLogging, + workflowDescriptor, + ioActor, + serviceRegistryActor, + restarting + ), + name + ) when(InitializingWorkflowState) { case Event(StartInitializing, data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => - val initializerActor = createInitializationActor(workflowDescriptor, s"WorkflowInitializationActor-$workflowId-${data.failedInitializationAttempts + 1}") + val initializerActor = + createInitializationActor(workflowDescriptor, + s"WorkflowInitializationActor-$workflowId-${data.failedInitializationAttempts + 1}" + ) initializerActor ! StartInitializationCommand - goto(InitializingWorkflowState) using data.copy(currentLifecycleStateActor = Option(initializerActor), workflowDescriptor = Option(workflowDescriptor)) - case Event(WorkflowInitializationSucceededResponse(initializationData), data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + goto(InitializingWorkflowState) using data.copy(currentLifecycleStateActor = Option(initializerActor), + workflowDescriptor = Option(workflowDescriptor) + ) + case Event(WorkflowInitializationSucceededResponse(initializationData), + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => val dataWithInitializationData = data.copy(initializationData = initializationData) val executionActor = createWorkflowExecutionActor(workflowDescriptor, dataWithInitializationData) executionActor ! ExecuteWorkflowCommand @@ -350,18 +404,26 @@ class WorkflowActor(workflowToStart: WorkflowToStart, case _ => ExecutingWorkflowState } goto(nextState) using dataWithInitializationData.copy(currentLifecycleStateActor = Option(executionActor)) - case Event(WorkflowInitializationFailedResponse(reason), data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + case Event(WorkflowInitializationFailedResponse(reason), + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => val failedInitializationAttempts = data.failedInitializationAttempts + 1 if (failedInitializationAttempts < maxInitializationAttempts) { - workflowLogger.info(s"Initialization failed on attempt $failedInitializationAttempts. Will retry up to $maxInitializationAttempts times. Next retry is in $initializationRetryInterval", CromwellAggregatedException(reason, "Initialization Failure")) - context.system.scheduler.scheduleOnce(initializationRetryInterval) { self ! StartInitializing} - stay() using data.copy(currentLifecycleStateActor = None, failedInitializationAttempts = failedInitializationAttempts) + workflowLogger.info( + s"Initialization failed on attempt $failedInitializationAttempts. Will retry up to $maxInitializationAttempts times. Next retry is in $initializationRetryInterval", + CromwellAggregatedException(reason, "Initialization Failure") + ) + context.system.scheduler.scheduleOnce(initializationRetryInterval)(self ! StartInitializing) + stay() using data.copy(currentLifecycleStateActor = None, + failedInitializationAttempts = failedInitializationAttempts + ) } else { finalizeWorkflow(data, workflowDescriptor, Map.empty, CallOutputs.empty, Option(reason.toList)) } // If the workflow is not restarting, handle the Abort command normally and send an abort message to the init actor - case Event(AbortWorkflowCommand, data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) if !restarting => + case Event(AbortWorkflowCommand, data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) + if !restarting => handleAbortCommand(data, workflowDescriptor) } @@ -369,43 +431,51 @@ class WorkflowActor(workflowToStart: WorkflowToStart, /* ****** Running ****** */ /* ********************* */ - def createWorkflowExecutionActor(workflowDescriptor: EngineWorkflowDescriptor, data: WorkflowActorData): ActorRef = { - context.actorOf(WorkflowExecutionActor.props( - workflowDescriptor, - ioActor = ioActor, - serviceRegistryActor = serviceRegistryActor, - jobStoreActor = jobStoreActor, - subWorkflowStoreActor = subWorkflowStoreActor, - callCacheReadActor = callCacheReadActor, - callCacheWriteActor = callCacheWriteActor, - workflowDockerLookupActor = workflowDockerLookupActor, - jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, - jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, - backendSingletonCollection, - data.initializationData, - startState = data.effectiveStartableState, - rootConfig = conf, - totalJobsByRootWf = totalJobsByRootWf, - fileHashCacheActor = fileHashCacheActorProps map context.system.actorOf, - blacklistCache = blacklistCache), name = s"WorkflowExecutionActor-$workflowId") - } + def createWorkflowExecutionActor(workflowDescriptor: EngineWorkflowDescriptor, data: WorkflowActorData): ActorRef = + context.actorOf( + WorkflowExecutionActor.props( + workflowDescriptor, + ioActor = ioActor, + serviceRegistryActor = serviceRegistryActor, + jobStoreActor = jobStoreActor, + subWorkflowStoreActor = subWorkflowStoreActor, + callCacheReadActor = callCacheReadActor, + callCacheWriteActor = callCacheWriteActor, + workflowDockerLookupActor = workflowDockerLookupActor, + jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, + jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, + backendSingletonCollection, + data.initializationData, + startState = data.effectiveStartableState, + rootConfig = conf, + totalJobsByRootWf = totalJobsByRootWf, + fileHashCacheActor = fileHashCacheActorProps map context.system.actorOf, + blacklistCache = blacklistCache + ), + name = s"WorkflowExecutionActor-$workflowId" + ) // Handles workflow completion events from the WEA and abort command val executionResponseHandler: StateFunction = { // Workflow responses case Event(WorkflowExecutionSucceededResponse(jobKeys, rootAndSubworklowIds, finalOutputs, allOutputs), - data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => finalizeWorkflow(data, workflowDescriptor, jobKeys, finalOutputs, None, allOutputs, rootAndSubworklowIds) case Event(WorkflowExecutionFailedResponse(jobKeys, failures), - data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => finalizeWorkflow(data, workflowDescriptor, jobKeys, CallOutputs.empty, Option(List(failures))) case Event(WorkflowExecutionAbortedResponse(jobKeys), - data @ WorkflowActorData(_, Some(workflowDescriptor), _, StateCheckpoint(_, failures), _, _, _, _, _)) => + data @ WorkflowActorData(_, Some(workflowDescriptor), _, StateCheckpoint(_, failures), _, _, _, _, _) + ) => finalizeWorkflow(data, workflowDescriptor, jobKeys, CallOutputs.empty, failures) // Whether we're running or aborting, restarting or not, pass along the abort command. // Note that aborting a workflow multiple times will result in as many abort commands sent to the execution actor - case Event(AbortWorkflowWithExceptionCommand(ex), data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + case Event(AbortWorkflowWithExceptionCommand(ex), + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => handleAbortCommand(data, workflowDescriptor, Option(ex)) case Event(AbortWorkflowCommand, data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => handleAbortCommand(data, workflowDescriptor) @@ -420,11 +490,15 @@ class WorkflowActor(workflowToStart: WorkflowToStart, // Handles initialization responses we can get if the abort came in when we were initializing the workflow val abortHandler: StateFunction = { // If the initialization failed, record the failure in the data and finalize the workflow - case Event(WorkflowInitializationFailedResponse(reason), data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + case Event(WorkflowInitializationFailedResponse(reason), + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => finalizeWorkflow(data, workflowDescriptor, Map.empty, CallOutputs.empty, Option(reason.toList)) // Otherwise (success or abort), finalize the workflow without failures - case Event(_: WorkflowInitializationResponse, data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _)) => + case Event(_: WorkflowInitializationResponse, + data @ WorkflowActorData(_, Some(workflowDescriptor), _, _, _, _, _, _, _) + ) => finalizeWorkflow(data, workflowDescriptor, Map.empty, CallOutputs.empty, failures = None) case Event(StartInitializing, _) => @@ -444,7 +518,9 @@ class WorkflowActor(workflowToStart: WorkflowToStart, case Event(WorkflowFinalizationSucceededResponse, data) => finalizationSucceeded(data) case Event(WorkflowFinalizationFailedResponse(finalizationFailures), data) => val failures = data.lastStateReached.failures.getOrElse(List.empty) ++ finalizationFailures - goto(WorkflowFailedState) using data.copy(lastStateReached = StateCheckpoint(FinalizingWorkflowState, Option(failures))) + goto(WorkflowFailedState) using data.copy(lastStateReached = + StateCheckpoint(FinalizingWorkflowState, Option(failures)) + ) case Event(AbortWorkflowCommand, _) => stay() case Event(StartInitializing, _) => // An initialization trigger we no longer need to action. Ignore: @@ -461,12 +537,18 @@ class WorkflowActor(workflowToStart: WorkflowToStart, // better luck. If we continue to be unable to write the completion message to the DB it's better to leave the // workflow in its current state in the DB than to let the WMA delete it // Note: this is an infinite retry right now, but it doesn't consume much in terms of resources and could help us successfully weather maintenance downtime on the DB - workflowLogger.error(reason, "Unable to complete workflow due to inability to write concluding metadata status. Retrying...") + workflowLogger.error( + reason, + "Unable to complete workflow due to inability to write concluding metadata status. Retrying..." + ) PutMetadataActionAndRespond(msgs, self) stay() } - def handleAbortCommand(data: WorkflowActorData, workflowDescriptor: EngineWorkflowDescriptor, exceptionCausedAbortOpt: Option[Throwable] = None) = { + def handleAbortCommand(data: WorkflowActorData, + workflowDescriptor: EngineWorkflowDescriptor, + exceptionCausedAbortOpt: Option[Throwable] = None + ) = { val updatedData = data.copy(lastStateReached = StateCheckpoint(stateName, exceptionCausedAbortOpt.map(List(_)))) data.currentLifecycleStateActor match { case Some(currentActor) => @@ -474,11 +556,21 @@ class WorkflowActor(workflowToStart: WorkflowToStart, goto(WorkflowAbortingState) using updatedData case None => if (stateName == InitializingWorkflowState) { - workflowLogger.info(s"Received an abort command in state $stateName (while awaiting an initialization retry). Finalizing the workflow.") + workflowLogger.info( + s"Received an abort command in state $stateName (while awaiting an initialization retry). Finalizing the workflow." + ) } else { - workflowLogger.warn(s"Received an abort command in state $stateName but there's no lifecycle actor associated. This is an abnormal state, finalizing the workflow anyway.") + workflowLogger.warn( + s"Received an abort command in state $stateName but there's no lifecycle actor associated. This is an abnormal state, finalizing the workflow anyway." + ) } - finalizeWorkflow(updatedData, workflowDescriptor, Map.empty, CallOutputs.empty, failures = None, lastStateOverride = Option(WorkflowAbortingState)) + finalizeWorkflow(updatedData, + workflowDescriptor, + Map.empty, + CallOutputs.empty, + failures = None, + lastStateOverride = Option(WorkflowAbortingState) + ) } } @@ -490,19 +582,23 @@ class WorkflowActor(workflowToStart: WorkflowToStart, // since deletion happens only if the workflow and finalization succeeded we can directly goto Succeeded state when(DeletingFilesState) { case Event(DeleteWorkflowFilesSucceededResponse(filesNotFound, callCacheInvalidationErrors), data) => - workflowLogger.info(s"Successfully deleted intermediate output file(s) for root workflow $rootWorkflowIdForLogging." + - deleteFilesAdditionalError(filesNotFound, callCacheInvalidationErrors)) + workflowLogger.info( + s"Successfully deleted intermediate output file(s) for root workflow $rootWorkflowIdForLogging." + + deleteFilesAdditionalError(filesNotFound, callCacheInvalidationErrors) + ) goto(WorkflowSucceededState) using data.copy(currentLifecycleStateActor = None) case Event(DeleteWorkflowFilesFailedResponse(errors, filesNotFound, callCacheInvalidationErrors), data) => - workflowLogger.info(s"Failed to delete ${errors.size} intermediate output file(s) for root workflow $rootWorkflowIdForLogging." + - deleteFilesAdditionalError(filesNotFound, callCacheInvalidationErrors) + s" Errors: ${errors.map(ExceptionUtils.getMessage)}") + workflowLogger.info( + s"Failed to delete ${errors.size} intermediate output file(s) for root workflow $rootWorkflowIdForLogging." + + deleteFilesAdditionalError(filesNotFound, callCacheInvalidationErrors) + s" Errors: ${errors.map(ExceptionUtils.getMessage)}" + ) goto(WorkflowSucceededState) using data.copy(currentLifecycleStateActor = None) } // Let these messages fall through to the whenUnhandled handler: - when(WorkflowAbortedState) { FSM.NullFunction } - when(WorkflowFailedState) { FSM.NullFunction } - when(WorkflowSucceededState) { FSM.NullFunction } + when(WorkflowAbortedState)(FSM.NullFunction) + when(WorkflowFailedState)(FSM.NullFunction) + when(WorkflowSucceededState)(FSM.NullFunction) whenUnhandled { case Event(SendWorkflowHeartbeatCommand, _) => @@ -514,11 +610,14 @@ class WorkflowActor(workflowToStart: WorkflowToStart, case Event(msg @ EngineStatsActor.JobCountQuery, data) => data.currentLifecycleStateActor match { case Some(a) => a forward msg - case None => sender() ! EngineStatsActor.NoJobs // This should be impossible, but if somehow here it's technically correct + case None => + sender() ! EngineStatsActor.NoJobs // This should be impossible, but if somehow here it's technically correct } stay() case Event(AwaitMetadataIntegrity, data) => - goto(MetadataIntegrityValidationState) using data.copy(lastStateReached = data.lastStateReached.copy(state = stateName)) + goto(MetadataIntegrityValidationState) using data.copy(lastStateReached = + data.lastStateReached.copy(state = stateName) + ) } onTransition { @@ -527,7 +626,11 @@ class WorkflowActor(workflowToStart: WorkflowToStart, setWorkflowTimePerState(terminalState.workflowState, (System.currentTimeMillis() - startTime).millis) workflowLogger.debug(s"transition from {} to {}. Stopping self.", arg1 = oldState, arg2 = terminalState) pushWorkflowEnd(workflowId) - WorkflowProcessingEventPublishing.publish(workflowId, workflowHeartbeatConfig.cromwellId, Finished, serviceRegistryActor) + WorkflowProcessingEventPublishing.publish(workflowId, + workflowHeartbeatConfig.cromwellId, + Finished, + serviceRegistryActor + ) subWorkflowStoreActor ! WorkflowComplete(workflowId) terminalState match { case WorkflowFailedState => @@ -546,10 +649,11 @@ class WorkflowActor(workflowToStart: WorkflowToStart, val system = context.system val ec = context.system.dispatcher - def bruteForcePathBuilders: Future[List[PathBuilder]] = { + def bruteForcePathBuilders: Future[List[PathBuilder]] = // Protect against path builders that may throw an exception instead of returning a failed future - Future(EngineFilesystems.pathBuildersForWorkflow(bruteForceWorkflowOptions, pathBuilderFactories)(system))(ec).flatten - } + Future(EngineFilesystems.pathBuildersForWorkflow(bruteForceWorkflowOptions, pathBuilderFactories)(system))( + ec + ).flatten val (workflowOptions, pathBuilders) = stateData.workflowDescriptor match { case Some(wd) => (wd.backendDescriptor.workflowOptions, Future.successful(wd.pathBuilders)) @@ -561,12 +665,17 @@ class WorkflowActor(workflowToStart: WorkflowToStart, workflowOptions.get(FinalWorkflowLogDir).toOption match { case Some(destinationDir) => pathBuilders - .map(pb => workflowLogCopyRouter ! CopyWorkflowLogsActor.Copy(workflowId, PathFactory.buildPath(destinationDir, pb)))(ec) + .map(pb => + workflowLogCopyRouter ! CopyWorkflowLogsActor.Copy(workflowId, + PathFactory.buildPath(destinationDir, pb) + ) + )(ec) .recover { case e => log.error(e, "Failed to copy workflow log") }(ec) - case None => workflowLogger.close(andDelete = WorkflowLogger.isTemporary) match { - case Failure(f) => log.error(f, "Failed to delete workflow log") - case _ => - } + case None => + workflowLogger.close(andDelete = WorkflowLogger.isTemporary) match { + case Failure(f) => log.error(f, "Failed to delete workflow log") + case _ => + } } } @@ -598,7 +707,9 @@ class WorkflowActor(workflowToStart: WorkflowToStart, } } - private def deleteFilesAdditionalError(filesNotFound: List[Path], callCacheInvalidationErrors: List[Throwable]): String = { + private def deleteFilesAdditionalError(filesNotFound: List[Path], + callCacheInvalidationErrors: List[Throwable] + ): String = { val filesNotFoundMsg = if (filesNotFound.nonEmpty) { s" File(s) not found during deletion: ${filesNotFound.mkString(",")}" @@ -634,30 +745,35 @@ class WorkflowActor(workflowToStart: WorkflowToStart, it instantiates the DeleteWorkflowFilesActor and waits for it to respond. Note: We can't start deleting files before finalization succeeds as we don't want to start deleting them as they are being copied to another location. - */ + */ private def deleteFilesOrGotoFinalState(data: WorkflowActorData) = { def deleteFiles() = { val rootWorkflowId = data.workflowDescriptor.get.rootWorkflowId - val deleteActor = context.actorOf(DeleteWorkflowFilesActor.props( - rootWorkflowId = rootWorkflowId, - rootWorkflowRootPaths = data.initializationData.getWorkflowRoots(), - rootAndSubworkflowIds = data.rootAndSubworkflowIds, - workflowFinalOutputs = data.workflowFinalOutputs.map(out => out.outputs.values.toSet).getOrElse(Set.empty), - workflowAllOutputs = data.workflowAllOutputs, - pathBuilders = data.workflowDescriptor.get.pathBuilders, - serviceRegistryActor = serviceRegistryActor, - ioActor = ioActor), - name = s"DeleteWorkflowFilesActor-${rootWorkflowId.id}") + val deleteActor = context.actorOf( + DeleteWorkflowFilesActor.props( + rootWorkflowId = rootWorkflowId, + rootWorkflowRootPaths = data.initializationData.getWorkflowRoots(), + rootAndSubworkflowIds = data.rootAndSubworkflowIds, + workflowFinalOutputs = data.workflowFinalOutputs.map(out => out.outputs.values.toSet).getOrElse(Set.empty), + workflowAllOutputs = data.workflowAllOutputs, + pathBuilders = data.workflowDescriptor.get.pathBuilders, + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor + ), + name = s"DeleteWorkflowFilesActor-${rootWorkflowId.id}" + ) deleteActor ! StartWorkflowFilesDeletion goto(DeletingFilesState) using data } - val userDeleteFileWfOption = data.workflowDescriptor.flatMap( - _.backendDescriptor.workflowOptions.getBoolean("delete_intermediate_output_files").toOption - ).getOrElse(false) + val userDeleteFileWfOption = data.workflowDescriptor + .flatMap( + _.backendDescriptor.workflowOptions.getBoolean("delete_intermediate_output_files").toOption + ) + .getOrElse(false) (deleteWorkflowFiles, userDeleteFileWfOption, data.workflowAllOutputs.nonEmpty) match { case (true, true, true) => deleteFiles() @@ -668,26 +784,42 @@ class WorkflowActor(workflowToStart: WorkflowToStart, // user has not enabled delete intermediate outputs option, so go to succeeded status goto(WorkflowSucceededState) using data.copy(currentLifecycleStateActor = None) case (false, true, _) => - log.info(s"User wants to delete intermediate files but it is not enabled in Cromwell config. To use it system.delete-workflow-files to true.") + log.info( + s"User wants to delete intermediate files but it is not enabled in Cromwell config. To use it system.delete-workflow-files to true." + ) goto(WorkflowSucceededState) using data.copy(currentLifecycleStateActor = None) case (false, false, _) => goto(WorkflowSucceededState) using data.copy(currentLifecycleStateActor = None) } } - private[workflow] def makeFinalizationActor(workflowDescriptor: EngineWorkflowDescriptor, jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs) = { + private[workflow] def makeFinalizationActor(workflowDescriptor: EngineWorkflowDescriptor, + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs + ) = { val copyWorkflowOutputsActorProps = stateName match { case InitializingWorkflowState => None - case _ => Option(CopyWorkflowOutputsActor.props(workflowIdForLogging, ioActor, workflowDescriptor, workflowOutputs, stateData.initializationData)) + case _ => + Option( + CopyWorkflowOutputsActor.props(workflowIdForLogging, + ioActor, + workflowDescriptor, + workflowOutputs, + stateData.initializationData + ) + ) } - context.actorOf(WorkflowFinalizationActor.props( - workflowDescriptor = workflowDescriptor, - ioActor = ioActor, - jobExecutionMap = jobExecutionMap, - workflowOutputs = workflowOutputs, - initializationData = stateData.initializationData, - copyWorkflowOutputsActor = copyWorkflowOutputsActorProps - ), name = s"WorkflowFinalizationActor") + context.actorOf( + WorkflowFinalizationActor.props( + workflowDescriptor = workflowDescriptor, + ioActor = ioActor, + jobExecutionMap = jobExecutionMap, + workflowOutputs = workflowOutputs, + initializationData = stateData.initializationData, + copyWorkflowOutputsActor = copyWorkflowOutputsActorProps + ), + name = s"WorkflowFinalizationActor" + ) } /** @@ -700,17 +832,19 @@ class WorkflowActor(workflowToStart: WorkflowToStart, failures: Option[List[Throwable]], workflowAllOutputs: Set[WomValue] = Set.empty, rootAndSubworkflowIds: Set[WorkflowId] = Set.empty, - lastStateOverride: Option[WorkflowActorState] = None) = { + lastStateOverride: Option[WorkflowActorState] = None + ) = { val finalizationActor = makeFinalizationActor(workflowDescriptor, jobExecutionMap, workflowFinalOutputs) finalizationActor ! StartFinalizationCommand goto(FinalizingWorkflowState) using data.copy( - lastStateReached = StateCheckpoint (lastStateOverride.getOrElse(stateName), failures), + lastStateReached = StateCheckpoint(lastStateOverride.getOrElse(stateName), failures), workflowFinalOutputs = Option(workflowFinalOutputs), workflowAllOutputs = workflowAllOutputs, rootAndSubworkflowIds = rootAndSubworkflowIds ) } - private def sendHeartbeat(): Unit = workflowStoreActor ! WorkflowStoreWriteHeartbeatCommand(workflowId, submissionTime) + private def sendHeartbeat(): Unit = + workflowStoreActor ! WorkflowStoreWriteHeartbeatCommand(workflowId, submissionTime) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala index 32889f7f849..8c3afe375a1 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala @@ -8,7 +8,12 @@ import cromwell.core.Dispatcher.EngineDispatcher import cromwell.core.{Dispatcher, WorkflowId} import cromwell.database.sql.EngineSqlDatabase import cromwell.database.sql.tables.DockerHashStoreEntry -import cromwell.docker.DockerInfoActor.{DockerHashFailureResponse, DockerInfoSuccessResponse, DockerInformation, DockerSize} +import cromwell.docker.DockerInfoActor.{ + DockerHashFailureResponse, + DockerInformation, + DockerInfoSuccessResponse, + DockerSize +} import cromwell.docker.{DockerClientHelper, DockerHashResult, DockerImageIdentifier, DockerInfoRequest} import cromwell.engine.workflow.WorkflowDockerLookupActor._ import cromwell.services.EngineServicesStore @@ -37,11 +42,12 @@ import scala.util.{Failure, Success} * for this tag will be attempted again. */ -class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, - val dockerHashingActor: ActorRef, - isRestart: Boolean, - databaseInterface: EngineSqlDatabase) - extends LoggingFSM[WorkflowDockerLookupActorState, WorkflowDockerLookupActorData] with DockerClientHelper { +class WorkflowDockerLookupActor private[workflow] (workflowId: WorkflowId, + val dockerHashingActor: ActorRef, + isRestart: Boolean, + databaseInterface: EngineSqlDatabase +) extends LoggingFSM[WorkflowDockerLookupActorState, WorkflowDockerLookupActorData] + with DockerClientHelper { implicit val ec = context.system.dispatchers.lookup(Dispatcher.EngineDispatcher) @@ -55,10 +61,9 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, // `AwaitingFirstRequestOnRestart` is only used in restart scenarios. This state waits until there's at least one hash // request before trying to load the docker hash mappings. This is so we'll have at least one `JobPreparationActor` // reference available to message with a terminal failure in case the reading or parsing of these mappings fails. - when(AwaitingFirstRequestOnRestart) { - case Event(request: DockerInfoRequest, data) => - loadDockerHashStoreEntries() - goto(LoadingCache) using data.addHashRequest(request, sender()) + when(AwaitingFirstRequestOnRestart) { case Event(request: DockerInfoRequest, data) => + loadDockerHashStoreEntries() + goto(LoadingCache) using data.addHashRequest(request, sender()) } // Waiting for a response from the database with the hash mapping for this workflow. @@ -89,7 +94,10 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, case Event(DockerHashStoreSuccess(response), data) => recordMappingAndRespond(response, data) case Event(DockerHashStoreFailure(request, e), data) => - handleStoreFailure(request, new Exception(s"Failure storing docker hash for ${request.dockerImageID.fullName}", e), data) + handleStoreFailure(request, + new Exception(s"Failure storing docker hash for ${request.dockerImageID.fullName}", e), + data + ) } when(Terminal) { @@ -97,7 +105,10 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, // In the Terminal state we reject all requests with the cause set in the state data. sender() ! WorkflowDockerLookupFailure(data.failureCause.orNull, request) stay() - case Event(_ @ (_: DockerInfoSuccessResponse | _: DockerHashFailureResponse | _: DockerHashStoreSuccess | _: DockerHashStoreFailure), _) => + case Event(_ @(_: DockerInfoSuccessResponse | _: DockerHashFailureResponse | _: DockerHashStoreSuccess | + _: DockerHashStoreFailure), + _ + ) => // Other expected message types are unsurprising in the Terminal state and can be swallowed. Unexpected message // types will be handled by `whenUnhandled`. stay() @@ -110,7 +121,8 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, whenUnhandled { case Event(DockerHashActorTimeout(request), data) => - val reason = new Exception(s"Timeout looking up hash for Docker image ${request.dockerImageID} in state $stateName") + val reason = + new Exception(s"Timeout looking up hash for Docker image ${request.dockerImageID} in state $stateName") data.hashRequests.get(request.dockerImageID) match { case Some(requestsAndReplyTos) => requestsAndReplyTos foreach { case RequestAndReplyTo(_, replyTo) => @@ -119,9 +131,16 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, val updatedData = data.copy(hashRequests = data.hashRequests - request.dockerImageID) stay() using updatedData case None => - val headline = s"Unable to find requesters for timed out lookup of Docker image '${request.dockerImageID}' in state $stateName" - val pendingImageIdsAndCounts = stateData.hashRequests.toList map { case (imageId, requestAndReplyTos) => s"$imageId -> ${requestAndReplyTos.size}" } - val message = pendingImageIdsAndCounts.mkString(headline + "\n" + "Pending image ID requests with requester counts: ", ", ", "") + val headline = + s"Unable to find requesters for timed out lookup of Docker image '${request.dockerImageID}' in state $stateName" + val pendingImageIdsAndCounts = stateData.hashRequests.toList map { case (imageId, requestAndReplyTos) => + s"$imageId -> ${requestAndReplyTos.size}" + } + val message = + pendingImageIdsAndCounts.mkString(headline + "\n" + "Pending image ID requests with requester counts: ", + ", ", + "" + ) fail(new RuntimeException(message) with NoStackTrace) } case Event(TransitionToFailed(cause), data) => @@ -137,27 +156,37 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, * Load mappings from the database into the state data, reply to queued requests which have mappings, and initiate * hash lookups for requests which don't have mappings. */ - private def loadCacheAndHandleHashRequests(hashEntries: Map[String, DockerHashStoreEntry], data: WorkflowDockerLookupActorData): State = { - val dockerMappingsTry = hashEntries map { - case (dockerTag, entry) => ( + private def loadCacheAndHandleHashRequests(hashEntries: Map[String, DockerHashStoreEntry], + data: WorkflowDockerLookupActorData + ): State = { + val dockerMappingsTry = hashEntries map { case (dockerTag, entry) => + ( DockerImageIdentifier.fromString(dockerTag), - DockerHashResult.fromString(entry.dockerHash) map { hash => DockerInformation(hash, entry.dockerSize.map(DockerSize.apply)) } + DockerHashResult.fromString(entry.dockerHash) map { hash => + DockerInformation(hash, entry.dockerSize.map(DockerSize.apply)) + } ) } TryUtil.sequenceKeyValues(dockerMappingsTry) match { case Success(dockerMappings) => // Figure out which of the queued requests already have established mappings. - val (hasMappings, doesNotHaveMappings) = data.hashRequests.partition { case (dockerImageId, _) => dockerMappings.contains(dockerImageId) } + val (hasMappings, doesNotHaveMappings) = data.hashRequests.partition { case (dockerImageId, _) => + dockerMappings.contains(dockerImageId) + } // The requests which have mappings receive success responses. hasMappings foreach { case (dockerImageId, requestAndReplyTos) => val result = dockerMappings(dockerImageId) - requestAndReplyTos foreach { case RequestAndReplyTo(request, replyTo) => replyTo ! DockerInfoSuccessResponse(result, request)} + requestAndReplyTos foreach { case RequestAndReplyTo(request, replyTo) => + replyTo ! DockerInfoSuccessResponse(result, request) + } } // The requests without mappings need to be looked up. - doesNotHaveMappings foreach { case (_, requestAndReplyTos) => sendDockerCommand(requestAndReplyTos.head.request) } + doesNotHaveMappings foreach { case (_, requestAndReplyTos) => + sendDockerCommand(requestAndReplyTos.head.request) + } // Update state data accordingly. val newData = data.copy(hashRequests = doesNotHaveMappings, mappings = dockerMappings, failureCause = None) @@ -171,55 +200,83 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, private def requestDockerHash(request: DockerInfoRequest, data: WorkflowDockerLookupActorData): State = { sendDockerCommand(request) val replyTo = sender() - val updatedData = data.copy(hashRequests = data.hashRequests + (request.dockerImageID -> NonEmptyList.of(RequestAndReplyTo(request, replyTo)))) + val updatedData = data.copy(hashRequests = + data.hashRequests + (request.dockerImageID -> NonEmptyList.of(RequestAndReplyTo(request, replyTo))) + ) stay() using updatedData } - private def recordMappingAndRespond(response: DockerInfoSuccessResponse, data: WorkflowDockerLookupActorData): State = { + private def recordMappingAndRespond(response: DockerInfoSuccessResponse, + data: WorkflowDockerLookupActorData + ): State = { // Add the new label to hash mapping to the current set of mappings. val request = response.request data.hashRequests.get(request.dockerImageID) match { - case Some(actors) => actors foreach { case RequestAndReplyTo(_, replyTo) => replyTo ! DockerInfoSuccessResponse(response.dockerInformation, request) } - case None => fail(new Exception(s"Could not find the actors associated with $request. Available requests are ${data.hashRequests.keys.mkString(", ")}") with NoStackTrace) + case Some(actors) => + actors foreach { case RequestAndReplyTo(_, replyTo) => + replyTo ! DockerInfoSuccessResponse(response.dockerInformation, request) + } + case None => + fail( + new Exception( + s"Could not find the actors associated with $request. Available requests are ${data.hashRequests.keys.mkString(", ")}" + ) with NoStackTrace + ) } - val updatedData = data.copy(hashRequests = data.hashRequests - request.dockerImageID, mappings = data.mappings + (request.dockerImageID -> response.dockerInformation)) + val updatedData = data.copy(hashRequests = data.hashRequests - request.dockerImageID, + mappings = data.mappings + (request.dockerImageID -> response.dockerInformation) + ) stay() using updatedData } private def respondToAllRequests(reason: Throwable, data: WorkflowDockerLookupActorData, - messageBuilder: (Throwable, DockerInfoRequest) => WorkflowDockerLookupResponse): WorkflowDockerLookupActorData = { + messageBuilder: (Throwable, DockerInfoRequest) => WorkflowDockerLookupResponse + ): WorkflowDockerLookupActorData = { data.hashRequests foreach { case (_, replyTos) => replyTos foreach { case RequestAndReplyTo(request, replyTo) => replyTo ! messageBuilder(reason, request) } } data.clearHashRequests } - private def respondToAllRequestsWithTerminalFailure(reason: Throwable, data: WorkflowDockerLookupActorData): WorkflowDockerLookupActorData = { + private def respondToAllRequestsWithTerminalFailure(reason: Throwable, + data: WorkflowDockerLookupActorData + ): WorkflowDockerLookupActorData = respondToAllRequests(reason, data, WorkflowDockerTerminalFailure.apply) - } private def persistDockerHash(response: DockerInfoSuccessResponse, data: WorkflowDockerLookupActorData): State = { // BA-6495 if there are actors awaiting for this data, then proceed, otherwise - don't bother to persist if (data.hashRequests.contains(response.request.dockerImageID)) { - val dockerHashStoreEntry = DockerHashStoreEntry(workflowId.toString, response.request.dockerImageID.fullName, response.dockerInformation.dockerHash.algorithmAndHash, response.dockerInformation.dockerCompressedSize.map(_.compressedSize)) + val dockerHashStoreEntry = DockerHashStoreEntry( + workflowId.toString, + response.request.dockerImageID.fullName, + response.dockerInformation.dockerHash.algorithmAndHash, + response.dockerInformation.dockerCompressedSize.map(_.compressedSize) + ) databaseInterface.addDockerHashStoreEntry(dockerHashStoreEntry) onComplete { case Success(_) => self ! DockerHashStoreSuccess(response) case Failure(ex) => self ! DockerHashStoreFailure(response.request, ex) } } else { - log.debug(s"Unable to find requesters for succeeded lookup of Docker image " + - s"'${response.request.dockerImageID}'. Most likely reason is that requesters have already been cleaned out " + - s"earlier by the timeout.") + log.debug( + s"Unable to find requesters for succeeded lookup of Docker image " + + s"'${response.request.dockerImageID}'. Most likely reason is that requesters have already been cleaned out " + + s"earlier by the timeout." + ) } stay() } - private def handleLookupFailure(dockerResponse: DockerHashFailureResponse, data: WorkflowDockerLookupActorData): State = { + private def handleLookupFailure(dockerResponse: DockerHashFailureResponse, + data: WorkflowDockerLookupActorData + ): State = { // Fail all pending requests. This logic does not blacklist the tag, which will allow lookups to be attempted // again in the future. - val exceptionMessage = s"Failed Docker lookup '${dockerResponse.request.dockerImageID}' '${dockerResponse.request.credentialDetails.mkString("[", ", ", "]")}'" - val failureResponse = WorkflowDockerLookupFailure(new Exception(dockerResponse.reason), dockerResponse.request, exceptionMessage) + val exceptionMessage = + s"Failed Docker lookup '${dockerResponse.request.dockerImageID}' '${dockerResponse.request.credentialDetails + .mkString("[", ", ", "]")}'" + val failureResponse = + WorkflowDockerLookupFailure(new Exception(dockerResponse.reason), dockerResponse.request, exceptionMessage) val request = dockerResponse.request data.hashRequests.get(request.dockerImageID) match { case Some(requestAndReplyTos) => @@ -227,27 +284,35 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, val updatedData = data.copy(hashRequests = data.hashRequests - request.dockerImageID) stay() using updatedData case None => - log.debug(s"Unable to find requesters for failed lookup of Docker image '${request.dockerImageID}'. " + - s"Most likely reason is that requesters have already been cleaned out earlier by the timeout.") + log.debug( + s"Unable to find requesters for failed lookup of Docker image '${request.dockerImageID}'. " + + s"Most likely reason is that requesters have already been cleaned out earlier by the timeout." + ) stay() } } - private def handleStoreFailure(dockerHashRequest: DockerInfoRequest, reason: Throwable, data: WorkflowDockerLookupActorData): State = { + private def handleStoreFailure(dockerHashRequest: DockerInfoRequest, + reason: Throwable, + data: WorkflowDockerLookupActorData + ): State = data.hashRequests.get(dockerHashRequest.dockerImageID) match { case Some(requestAndReplyTos) => - requestAndReplyTos foreach { case RequestAndReplyTo(_, replyTo) => replyTo ! WorkflowDockerLookupFailure(reason, dockerHashRequest) } + requestAndReplyTos foreach { case RequestAndReplyTo(_, replyTo) => + replyTo ! WorkflowDockerLookupFailure(reason, dockerHashRequest) + } // Remove these requesters from the collection of those awaiting hashes. stay() using data.copy(hashRequests = data.hashRequests - dockerHashRequest.dockerImageID) case None => - log.debug(s"Unable to find requesters for failed store of hash for Docker image " + - s"'${dockerHashRequest.dockerImageID}'. Most likely reason is that requesters have already been cleaned " + - s"out earlier by the timeout.") + log.debug( + s"Unable to find requesters for failed store of hash for Docker image " + + s"'${dockerHashRequest.dockerImageID}'. Most likely reason is that requesters have already been cleaned " + + s"out earlier by the timeout." + ) stay() } - } - def loadDockerHashStoreEntries(): Unit = { + def loadDockerHashStoreEntries(): Unit = databaseInterface.queryDockerHashStoreEntries(workflowId.toString) onComplete { case Success(dockerHashEntries) => val dockerMappings = dockerHashEntries.map(entry => entry.dockerTag -> entry).toMap @@ -255,13 +320,11 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, case Failure(ex) => fail(new RuntimeException("Failed to load docker tag -> hash mappings from DB", ex)) } - } - override protected def onTimeout(message: Any, to: ActorRef): Unit = { + override protected def onTimeout(message: Any, to: ActorRef): Unit = message match { case r: DockerInfoRequest => self ! DockerHashActorTimeout(r) } - } } object WorkflowDockerLookupActor { @@ -272,41 +335,52 @@ object WorkflowDockerLookupActor { case object Running extends WorkflowDockerLookupActorState case object Terminal extends WorkflowDockerLookupActorState private val FailedException = - new Exception(s"The WorkflowDockerLookupActor has failed. Subsequent docker tags for this workflow will not be resolved.") + new Exception( + s"The WorkflowDockerLookupActor has failed. Subsequent docker tags for this workflow will not be resolved." + ) /* Internal ADTs */ final case class DockerRequestContext(dockerHashRequest: DockerInfoRequest, replyTo: ActorRef) sealed trait DockerHashStoreResponse final case class DockerHashStoreSuccess(successResponse: DockerInfoSuccessResponse) extends DockerHashStoreResponse - final case class DockerHashStoreFailure(dockerHashRequest: DockerInfoRequest, reason: Throwable) extends DockerHashStoreResponse + final case class DockerHashStoreFailure(dockerHashRequest: DockerInfoRequest, reason: Throwable) + extends DockerHashStoreResponse final case class DockerHashStoreLoadingSuccess(dockerMappings: Map[String, DockerHashStoreEntry]) final case class DockerHashActorTimeout(request: DockerInfoRequest) /* Messages */ sealed trait WorkflowDockerLookupActorMessage - private final case class TransitionToFailed(cause: Throwable) extends WorkflowDockerLookupActorMessage + final private case class TransitionToFailed(cause: Throwable) extends WorkflowDockerLookupActorMessage /* Responses */ sealed trait WorkflowDockerLookupResponse - final case class WorkflowDockerLookupFailure(reason: Throwable, request: DockerInfoRequest, additionalLoggingMessage: String = "") extends WorkflowDockerLookupResponse - final case class WorkflowDockerTerminalFailure(reason: Throwable, request: DockerInfoRequest) extends WorkflowDockerLookupResponse + final case class WorkflowDockerLookupFailure(reason: Throwable, + request: DockerInfoRequest, + additionalLoggingMessage: String = "" + ) extends WorkflowDockerLookupResponse + final case class WorkflowDockerTerminalFailure(reason: Throwable, request: DockerInfoRequest) + extends WorkflowDockerLookupResponse case class RequestAndReplyTo(request: DockerInfoRequest, replyTo: ActorRef) def props(workflowId: WorkflowId, dockerHashingActor: ActorRef, isRestart: Boolean, - databaseInterface: EngineSqlDatabase = EngineServicesStore.engineDatabaseInterface): Props = { - Props(new WorkflowDockerLookupActor(workflowId, dockerHashingActor, isRestart, databaseInterface)).withDispatcher(EngineDispatcher) - } + databaseInterface: EngineSqlDatabase = EngineServicesStore.engineDatabaseInterface + ): Props = + Props(new WorkflowDockerLookupActor(workflowId, dockerHashingActor, isRestart, databaseInterface)) + .withDispatcher(EngineDispatcher) object WorkflowDockerLookupActorData { def empty = WorkflowDockerLookupActorData(hashRequests = Map.empty, mappings = Map.empty, failureCause = None) } - final case class WorkflowDockerLookupActorData(hashRequests: Map[DockerImageIdentifier, NonEmptyList[RequestAndReplyTo]], - mappings: Map[DockerImageIdentifier, DockerInformation], - failureCause: Option[Throwable]) { + final case class WorkflowDockerLookupActorData( + hashRequests: Map[DockerImageIdentifier, NonEmptyList[RequestAndReplyTo]], + mappings: Map[DockerImageIdentifier, DockerInformation], + failureCause: Option[Throwable] + ) { + /** * Add the specified request and replyTo to this state data. * diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala index c3b1b998a94..abad4961211 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala @@ -37,6 +37,7 @@ object WorkflowManagerActor { class WorkflowNotFoundException(s: String) extends Exception(s) sealed trait WorkflowManagerActorMessage + /** * Commands */ @@ -48,7 +49,8 @@ object WorkflowManagerActor { final case class SubscribeToWorkflowCommand(id: WorkflowId) extends WorkflowManagerActorCommand case object EngineStatsCommand extends WorkflowManagerActorCommand case class AbortWorkflowsCommand(ids: Set[WorkflowId]) extends WorkflowManagerActorCommand - final case class WorkflowActorWorkComplete(id: WorkflowId, actor: ActorRef, finalState: WorkflowState) extends WorkflowManagerActorCommand + final case class WorkflowActorWorkComplete(id: WorkflowId, actor: ActorRef, finalState: WorkflowState) + extends WorkflowManagerActorCommand def props(config: Config, callCachingEnabled: Boolean, @@ -67,7 +69,8 @@ object WorkflowManagerActor { jobExecutionTokenDispenserActor: ActorRef, backendSingletonCollection: BackendSingletonCollection, serverMode: Boolean, - workflowHeartbeatConfig: WorkflowHeartbeatConfig): Props = { + workflowHeartbeatConfig: WorkflowHeartbeatConfig + ): Props = { val params = WorkflowManagerActorParams( config = config, callCachingEnabled = callCachingEnabled, @@ -86,7 +89,8 @@ object WorkflowManagerActor { jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, backendSingletonCollection = backendSingletonCollection, serverMode = serverMode, - workflowHeartbeatConfig = workflowHeartbeatConfig) + workflowHeartbeatConfig = workflowHeartbeatConfig + ) Props(new WorkflowManagerActor(params)).withDispatcher(EngineDispatcher) } @@ -111,11 +115,10 @@ object WorkflowManagerActor { } def without(id: WorkflowId): WorkflowManagerData = this.copy(workflows = workflows - id) - def without(actor: ActorRef): WorkflowManagerData = { + def without(actor: ActorRef): WorkflowManagerData = // If the ID was found in the lookup return a modified copy of the state data, otherwise just return // the same state data. idFromActor(actor) map without getOrElse this - } } } @@ -136,25 +139,30 @@ case class WorkflowManagerActorParams(config: Config, jobExecutionTokenDispenserActor: ActorRef, backendSingletonCollection: BackendSingletonCollection, serverMode: Boolean, - workflowHeartbeatConfig: WorkflowHeartbeatConfig) + workflowHeartbeatConfig: WorkflowHeartbeatConfig +) class WorkflowManagerActor(params: WorkflowManagerActorParams) - extends LoggingFSM[WorkflowManagerState, WorkflowManagerData] with WorkflowMetadataHelper with Timers { + extends LoggingFSM[WorkflowManagerState, WorkflowManagerData] + with WorkflowMetadataHelper + with Timers { private val config = params.config private val callCachingEnabled: Boolean = params.callCachingEnabled private val invalidateBadCacheResults = params.invalidateBadCacheResults override val serviceRegistryActor = params.serviceRegistryActor - private val maxWorkflowsRunning = config.getConfig("system").as[Option[Int]]("max-concurrent-workflows").getOrElse(DefaultMaxWorkflowsToRun) - private val maxWorkflowsToLaunch = config.getConfig("system").as[Option[Int]]("max-workflow-launch-count").getOrElse(DefaultMaxWorkflowsToLaunch) - private val newWorkflowPollRate = config.getConfig("system").as[Option[Int]]("new-workflow-poll-rate").getOrElse(DefaultNewWorkflowPollRate).seconds + private val maxWorkflowsRunning = + config.getConfig("system").as[Option[Int]]("max-concurrent-workflows").getOrElse(DefaultMaxWorkflowsToRun) + private val maxWorkflowsToLaunch = + config.getConfig("system").as[Option[Int]]("max-workflow-launch-count").getOrElse(DefaultMaxWorkflowsToLaunch) + private val newWorkflowPollRate = + config.getConfig("system").as[Option[Int]]("new-workflow-poll-rate").getOrElse(DefaultNewWorkflowPollRate).seconds private val fileHashCacheEnabled = config.as[Option[Boolean]]("system.file-hash-cache").getOrElse(false) private val logger = Logging(context.system, this) private val tag = self.path.name - override def preStart(): Unit = { // Starts the workflow polling cycle timers.startSingleTimer(RetrieveNewWorkflowsKey, RetrieveNewWorkflows, Duration.Zero) @@ -182,7 +190,8 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) val wfCount = stateData.workflows.size val swfCount = stateData.subWorkflows.size val maxNewWorkflows = maxWorkflowsToLaunch min (maxWorkflowsRunning - wfCount - swfCount) - val fetchCountLog = s"Fetching $maxNewWorkflows new workflows ($wfCount workflows and $swfCount subworkflows in flight)" + val fetchCountLog = + s"Fetching $maxNewWorkflows new workflows ($wfCount workflows and $swfCount subworkflows in flight)" if (groups.nonEmpty) log.info(s"${fetchCountLog}, excluding groups: ${groups.mkString(", ")}") else if (maxNewWorkflows < 1) @@ -199,13 +208,13 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) log.info("Retrieved {} workflows from the WorkflowStoreActor", newSubmissions.toList.size) stay() using stateData.withAddition(newSubmissions) case Event(SubscribeToWorkflowCommand(id), data) => - data.workflows.get(id) foreach {_ ! SubscribeTransitionCallBack(sender())} + data.workflows.get(id) foreach { _ ! SubscribeTransitionCallBack(sender()) } stay() case Event(AbortAllWorkflowsCommand, data) if data.workflows.isEmpty => goto(Done) case Event(AbortAllWorkflowsCommand, data) => log.info(s"$tag: Aborting all workflows") - data.workflows.values.foreach { _ ! WorkflowActor.AbortWorkflowCommand } + data.workflows.values.foreach(_ ! WorkflowActor.AbortWorkflowCommand) goto(Aborting) /* Responses from services @@ -214,7 +223,9 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) log.info(s"$tag: Workflow $workflowId failed (during $inState): ${expandFailureReasons(reasons)}") stay() case Event(WorkflowActorWorkComplete(id: WorkflowId, workflowActor: ActorRef, finalState: WorkflowState), data) => - log.info(s"$tag: Workflow actor for $id completed with status '$finalState'. The workflow will be removed from the workflow store.") + log.info( + s"$tag: Workflow actor for $id completed with status '$finalState'. The workflow will be removed from the workflow store." + ) // This silently fails if idFromActor is None, but data.without call right below will as well data.idFromActor(workflowActor) foreach { workflowId => params.jobStoreActor ! RegisterWorkflowCompleted(workflowId) @@ -223,16 +234,18 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) } val scheduleNextNewWorkflowPollStateFunction: StateFunction = { - case event @ Event(WorkflowStoreEngineActor.NoNewWorkflowsToStart | _: WorkflowStoreEngineActor.NewWorkflowsToStart, _) => + case event @ Event(WorkflowStoreEngineActor.NoNewWorkflowsToStart | _: WorkflowStoreEngineActor.NewWorkflowsToStart, + _ + ) => scheduleNextNewWorkflowPoll() runningAndNotStartingNewWorkflowsStateFunction(event) } - when (Running) (scheduleNextNewWorkflowPollStateFunction.orElse(runningAndNotStartingNewWorkflowsStateFunction)) + when(Running)(scheduleNextNewWorkflowPollStateFunction.orElse(runningAndNotStartingNewWorkflowsStateFunction)) - when (RunningAndNotStartingNewWorkflows) (runningAndNotStartingNewWorkflowsStateFunction) + when(RunningAndNotStartingNewWorkflows)(runningAndNotStartingNewWorkflowsStateFunction) - when (Aborting) { + when(Aborting) { case Event(Transition(workflowActor, _, _: WorkflowActorTerminalState), data) => // Remove this terminal actor from the workflowStore and log a progress message. val updatedData = data.without(workflowActor) @@ -249,7 +262,7 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) case Event(_, _) => stay() } - when (Done) { FSM.NullFunction } + when(Done)(FSM.NullFunction) whenUnhandled { case Event(AbortWorkflowsCommand(ids), stateData) => @@ -280,7 +293,9 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) stay() case Event(EngineStatsCommand, data) => val sndr = sender() - context.actorOf(EngineStatsActor.props(data.workflows.values.toList, sndr), s"EngineStatsActor-${sndr.hashCode()}") + context.actorOf(EngineStatsActor.props(data.workflows.values.toList, sndr), + s"EngineStatsActor-${sndr.hashCode()}" + ) stay() // Anything else certainly IS interesting: case Event(unhandled, _) => @@ -311,12 +326,13 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) logger.info(s"$tag: Starting workflow UUID($workflowId)") } - val fileHashCacheActorProps: Option[Props] = fileHashCacheEnabled.option(RootWorkflowFileHashCacheActor.props(params.ioActor, workflowId)) + val fileHashCacheActorProps: Option[Props] = + fileHashCacheEnabled.option(RootWorkflowFileHashCacheActor.props(params.ioActor, workflowId)) val wfProps = WorkflowActor.props( workflowToStart = workflow, conf = config, - callCachingEnabled = callCachingEnabled , + callCachingEnabled = callCachingEnabled, invalidateBadCacheResults = invalidateBadCacheResults, ioActor = params.ioActor, serviceRegistryActor = params.serviceRegistryActor, @@ -335,7 +351,8 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) workflowHeartbeatConfig = params.workflowHeartbeatConfig, totalJobsByRootWf = new AtomicInteger(), fileHashCacheActorProps = fileHashCacheActorProps, - blacklistCache = callCachingBlacklistManager.blacklistCacheFor(workflow)) + blacklistCache = callCachingBlacklistManager.blacklistCacheFor(workflow) + ) val wfActor = context.actorOf(wfProps, name = s"WorkflowActor-$workflowId") wfActor ! SubscribeTransitionCallBack(self) @@ -344,9 +361,8 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) WorkflowIdToActorRef(workflowId, wfActor) } - private def scheduleNextNewWorkflowPoll() = { + private def scheduleNextNewWorkflowPoll() = timers.startSingleTimer(RetrieveNewWorkflowsKey, RetrieveNewWorkflows, newWorkflowPollRate) - } private def expandFailureReasons(reasons: Seq[Throwable]): String = { @@ -356,9 +372,9 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) case reason: ThrowableAggregation => expandFailureReasons(reason.throwables.toSeq) case reason: KnownJobFailureException => val stderrMessage = reason.stderrPath map { path => - val content = Try(path.annotatedContentAsStringWithLimit(3000)).recover({ - case e => s"Could not retrieve content: ${e.getMessage}" - }).get + val content = Try(path.annotatedContentAsStringWithLimit(3000)).recover { case e => + s"Could not retrieve content: ${e.getMessage}" + }.get s"\nCheck the content of stderr for potential additional information: ${path.pathAsString}.\n $content" } getOrElse "" reason.getMessage + stderrMessage diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowMetadataHelper.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowMetadataHelper.scala index e26dfaf4eea..784604be614 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowMetadataHelper.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowMetadataHelper.scala @@ -10,7 +10,7 @@ import cromwell.services.metadata.MetadataService._ trait WorkflowMetadataHelper { def serviceRegistryActor: ActorRef - + def pushWorkflowStart(workflowId: WorkflowId) = { val startEvent = MetadataEvent( MetadataKey(workflowId, None, WorkflowMetadataKeys.StartTime), @@ -18,7 +18,7 @@ trait WorkflowMetadataHelper { ) serviceRegistryActor ! PutMetadataAction(startEvent) } - + def pushWorkflowEnd(workflowId: WorkflowId) = { val metadataEventMsg = MetadataEvent( MetadataKey(workflowId, None, WorkflowMetadataKeys.EndTime), @@ -26,19 +26,25 @@ trait WorkflowMetadataHelper { ) serviceRegistryActor ! PutMetadataAction(metadataEventMsg) } - + def pushWorkflowFailures(workflowId: WorkflowId, failures: List[Throwable]) = { - val failureEvents = failures flatMap { r => throwableToMetadataEvents(MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Failures}"), r) } + val failureEvents = failures flatMap { r => + throwableToMetadataEvents(MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Failures}"), r) + } serviceRegistryActor ! PutMetadataAction(failureEvents) } - - def pushCurrentStateToMetadataService(workflowId: WorkflowId, workflowState: WorkflowState, confirmTo: Option[ActorRef] = None): Unit = { - val metadataEventMsg = MetadataEvent(MetadataKey(workflowId, None, WorkflowMetadataKeys.Status), MetadataValue(workflowState)) + + def pushCurrentStateToMetadataService(workflowId: WorkflowId, + workflowState: WorkflowState, + confirmTo: Option[ActorRef] = None + ): Unit = { + val metadataEventMsg = + MetadataEvent(MetadataKey(workflowId, None, WorkflowMetadataKeys.Status), MetadataValue(workflowState)) confirmTo match { case None => serviceRegistryActor ! PutMetadataAction(metadataEventMsg) case Some(actorRef) => serviceRegistryActor ! PutMetadataActionAndRespond(List(metadataEventMsg), actorRef) } } - + } diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowProcessingEventPublishing.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowProcessingEventPublishing.scala index 67b5626f018..83bac473d26 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowProcessingEventPublishing.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowProcessingEventPublishing.scala @@ -17,7 +17,11 @@ import scala.util.Random object WorkflowProcessingEventPublishing { private lazy val cromwellVersion = VersionUtil.getVersion("cromwell") - def publish(workflowId: WorkflowId, cromwellId: String, descriptionValue: DescriptionEventValue.Value, serviceRegistry: ActorRef): Unit = { + def publish(workflowId: WorkflowId, + cromwellId: String, + descriptionValue: DescriptionEventValue.Value, + serviceRegistry: ActorRef + ): Unit = { def randomNumberString: String = Random.nextInt(Int.MaxValue).toString def metadataKey(workflowId: WorkflowId, randomNumberString: String, key: String) = @@ -41,18 +45,16 @@ object WorkflowProcessingEventPublishing { def publishLabelsToMetadata(workflowId: WorkflowId, labels: Map[String, String], - serviceRegistry: ActorRef): IOChecked[Unit] = { + serviceRegistry: ActorRef + ): IOChecked[Unit] = { val defaultLabel = "cromwell-workflow-id" -> s"cromwell-$workflowId" Monad[IOChecked].pure(labelsToMetadata(workflowId, labels + defaultLabel, serviceRegistry)) } - private def labelsToMetadata(workflowId: WorkflowId, - labels: Map[String, String], - serviceRegistry: ActorRef): Unit = { + private def labelsToMetadata(workflowId: WorkflowId, labels: Map[String, String], serviceRegistry: ActorRef): Unit = labels foreach { case (labelKey, labelValue) => val metadataKey = MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Labels}:$labelKey") val metadataValue = MetadataValue(labelValue) serviceRegistry ! PutMetadataAction(MetadataEvent(metadataKey, metadataValue)) } - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/TimedFSM.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/TimedFSM.scala index 8b29fe561c5..9367ba6e3cc 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/TimedFSM.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/TimedFSM.scala @@ -11,10 +11,9 @@ trait TimedFSM[S] { this: FSM[S, _] => def currentStateDuration: FiniteDuration = (System.currentTimeMillis() - lastTransitionTime).milliseconds - onTransition { - case from -> to => - val now = System.currentTimeMillis() - onTimedTransition(from, to, (now - lastTransitionTime).milliseconds) - lastTransitionTime = now + onTransition { case from -> to => + val now = System.currentTimeMillis() + onTimedTransition(from, to, (now - lastTransitionTime).milliseconds) + lastTransitionTime = now } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala index c5657e9a60a..0e76c4fb900 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala @@ -27,17 +27,18 @@ object WorkflowLifecycleActor { case class WorkflowLifecycleActorData(actors: Set[ActorRef], successes: Seq[BackendActorAndInitializationData], failures: Map[ActorRef, Throwable], - aborted: Seq[ActorRef]) { + aborted: Seq[ActorRef] + ) { def withActors(actors: Set[ActorRef]) = this.copy( actors = this.actors ++ actors ) def withSuccess(successfulActor: ActorRef, data: Option[BackendInitializationData] = None) = this.copy( actors = this.actors - successfulActor, - successes = successes :+ BackendActorAndInitializationData(successfulActor, data)) - def withFailure(failedActor: ActorRef, reason: Throwable) = this.copy( - actors = this.actors - failedActor, - failures = failures + (failedActor -> reason)) + successes = successes :+ BackendActorAndInitializationData(successfulActor, data) + ) + def withFailure(failedActor: ActorRef, reason: Throwable) = + this.copy(actors = this.actors - failedActor, failures = failures + (failedActor -> reason)) def withAborted(abortedActor: ActorRef) = this.copy( actors = this.actors - abortedActor, aborted = aborted :+ abortedActor @@ -51,7 +52,7 @@ trait AbortableWorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends def abortedResponse: EngineLifecycleActorAbortedResponse - override protected def checkForDoneAndTransition(newData: WorkflowLifecycleActorData): State = { + override protected def checkForDoneAndTransition(newData: WorkflowLifecycleActorData): State = if (checkForDone(newData)) { if (stateName == abortingState) { context.parent ! abortedResponse @@ -60,10 +61,11 @@ trait AbortableWorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends } else { stay() using newData } - } } -trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends LoggingFSM[S, WorkflowLifecycleActorData] with WorkflowLogging { +trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] + extends LoggingFSM[S, WorkflowLifecycleActorData] + with WorkflowLogging { val successState: S val failureState: S @@ -78,10 +80,9 @@ trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends LoggingFS case t => super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate) } - whenUnhandled { - case unhandledMessage => - workflowLogger.warn(s"received an unhandled message: $unhandledMessage") - stay() + whenUnhandled { case unhandledMessage => + workflowLogger.warn(s"received an unhandled message: $unhandledMessage") + stay() } onTransition { @@ -92,7 +93,7 @@ trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends LoggingFS workflowLogger.debug(s"State is transitioning from $fromState to $toState.") } - protected def checkForDoneAndTransition(newData: WorkflowLifecycleActorData): State = { + protected def checkForDoneAndTransition(newData: WorkflowLifecycleActorData): State = if (checkForDone(newData)) { if (newData.failures.isEmpty) { context.parent ! successResponse(newData) @@ -104,7 +105,6 @@ trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends LoggingFS } else { stay() using newData } - } - protected final def checkForDone(stateData: WorkflowLifecycleActorData) = stateData.actors.isEmpty + final protected def checkForDone(stateData: WorkflowLifecycleActorData) = stateData.actors.isEmpty } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala index 56d6012125d..3b32aa1feaf 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala @@ -34,16 +34,15 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, pathBuilders: List[PathBuilder], serviceRegistryActor: ActorRef, ioActorRef: ActorRef, - gcsCommandBuilder: IoCommandBuilder, - ) - extends LoggingFSM[DeleteWorkflowFilesActorState, DeleteWorkflowFilesActorStateData] with IoClientHelper { + gcsCommandBuilder: IoCommandBuilder +) extends LoggingFSM[DeleteWorkflowFilesActorState, DeleteWorkflowFilesActorStateData] + with IoClientHelper { implicit val ec: ExecutionContext = context.dispatcher val asyncIO = new AsyncIo(ioActorRef, gcsCommandBuilder) val callCache = new CallCache(EngineServicesStore.engineDatabaseInterface) - startWith(Pending, NoData) when(Pending) { @@ -55,8 +54,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, if (intermediateOutputs.nonEmpty) { self ! DeleteFiles goto(DeleteIntermediateFiles) using DeletingIntermediateFilesData(intermediateOutputs) - } - else { + } else { log.info(s"Root workflow ${rootWorkflowId.id} does not have any intermediate output files to delete.") respondAndStop(Nil, Nil, Nil) } @@ -112,7 +110,8 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, In both these cases, we consider the deletion process a success, but warn the users of such files not found. */ val newDataWithErrorUpdates = error match { - case EnhancedCromwellIoException(_, _: FileNotFoundException) => newData.copy(filesNotFound = newData.filesNotFound :+ command.file) + case EnhancedCromwellIoException(_, _: FileNotFoundException) => + newData.copy(filesNotFound = newData.filesNotFound :+ command.file) case _ => newData.copy(deleteErrors = newData.deleteErrors :+ error) } commandState match { @@ -120,7 +119,9 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, case AllCommandsDone => // once deletion is complete, invalidate call cache entries self ! InvalidateCallCache - goto(InvalidatingCallCache) using InvalidateCallCacheData(newDataWithErrorUpdates.deleteErrors, newDataWithErrorUpdates.filesNotFound) + goto(InvalidatingCallCache) using InvalidateCallCacheData(newDataWithErrorUpdates.deleteErrors, + newDataWithErrorUpdates.filesNotFound + ) } } @@ -159,14 +160,17 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, val (newData: WaitingForInvalidateCCResponsesData, invalidateState) = data.commandComplete(cacheId.id) invalidateState match { case StillWaiting => stay() using newData - case AllCommandsDone => respondAndStop(newData.deleteErrors, newData.filesNotFound, newData.callCacheInvalidationErrors) + case AllCommandsDone => + respondAndStop(newData.deleteErrors, newData.filesNotFound, newData.callCacheInvalidationErrors) } case Event(CallCacheInvalidatedFailure(cacheId, error), data: WaitingForInvalidateCCResponsesData) => val (newData: WaitingForInvalidateCCResponsesData, invalidateState) = data.commandComplete(cacheId.id) - val updatedDataWithError = newData.copy(callCacheInvalidationErrors = newData.callCacheInvalidationErrors :+ error) + val updatedDataWithError = + newData.copy(callCacheInvalidationErrors = newData.callCacheInvalidationErrors :+ error) invalidateState match { case StillWaiting => stay() using updatedDataWithError - case AllCommandsDone => respondAndStop(newData.deleteErrors, newData.filesNotFound, updatedDataWithError.callCacheInvalidationErrors) + case AllCommandsDone => + respondAndStop(newData.deleteErrors, newData.filesNotFound, updatedDataWithError.callCacheInvalidationErrors) } } @@ -176,28 +180,37 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, stay() case Event(ShutdownCommand, _) => stopSelf() case other => - log.error(s"Programmer Error: Unexpected message to ${getClass.getSimpleName} ${self.path.name} in state $stateName with $stateData: ${other.toPrettyElidedString(1000)}") + log.error( + s"Programmer Error: Unexpected message to ${getClass.getSimpleName} ${self.path.name} in state $stateName with $stateData: ${other + .toPrettyElidedString(1000)}" + ) stay() } - private def stopSelf() = { context stop self stay() } - - private def respondAndStop(errors: List[Throwable], filesNotFound: List[Path], callCacheInvalidationErrors: List[Throwable]) = { + private def respondAndStop(errors: List[Throwable], + filesNotFound: List[Path], + callCacheInvalidationErrors: List[Throwable] + ) = { val (metadataEvent, response) = - if (errors.isEmpty) (metadataEventForDeletionStatus(Succeeded), DeleteWorkflowFilesSucceededResponse(filesNotFound, callCacheInvalidationErrors)) - else (metadataEventForDeletionStatus(Failed), DeleteWorkflowFilesFailedResponse(errors, filesNotFound, callCacheInvalidationErrors)) + if (errors.isEmpty) + (metadataEventForDeletionStatus(Succeeded), + DeleteWorkflowFilesSucceededResponse(filesNotFound, callCacheInvalidationErrors) + ) + else + (metadataEventForDeletionStatus(Failed), + DeleteWorkflowFilesFailedResponse(errors, filesNotFound, callCacheInvalidationErrors) + ) serviceRegistryActor ! PutMetadataAction(metadataEvent) context.parent ! response stopSelf() } - private def metadataEventForDeletionStatus(status: FileDeletionStatus): MetadataEvent = { val key = MetadataKey(rootWorkflowId, None, WorkflowMetadataKeys.FileDeletionStatus) val value = MetadataValue(FileDeletionStatus.toDatabaseValue(status)) @@ -205,27 +218,28 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, MetadataEvent(key, value) } - private def fetchCallCacheEntries(callCache: CallCache): Future[Set[Long]] = { - val callCacheEntryIdsFuture = rootAndSubworkflowIds.map(x => callCache.callCacheEntryIdsForWorkflowId(x.toString)).map { f => - f.map { Success(_) }.recover { case t => Failure(t) }} - - Future.sequence(callCacheEntryIdsFuture).map { _.flatMap { - case Success(callCacheEntryIds) => - Option(callCacheEntryIds) - case Failure(e) => - log.error(s"Failed to fetch call cache entry ids for workflow. Error: ${ExceptionUtils.getMessage(e)}") - None - }.flatten} + val callCacheEntryIdsFuture = + rootAndSubworkflowIds.map(x => callCache.callCacheEntryIdsForWorkflowId(x.toString)).map { f => + f.map(Success(_)).recover { case t => Failure(t) } + } + + Future.sequence(callCacheEntryIdsFuture).map { + _.flatMap { + case Success(callCacheEntryIds) => + Option(callCacheEntryIds) + case Failure(e) => + log.error(s"Failed to fetch call cache entry ids for workflow. Error: ${ExceptionUtils.getMessage(e)}") + None + }.flatten + } } - private def toPath(womSingleFile: WomSingleFile): Option[Path] = { + private def toPath(womSingleFile: WomSingleFile): Option[Path] = Try(PathFactory.buildPath(womSingleFile.valueString, pathBuilders)).toOption - } - private def getWomSingleFiles(womValue: WomValue): Seq[WomSingleFile] = { - womValue.collectAsSeq({ case womSingleFile: WomSingleFile => womSingleFile }) - } + private def getWomSingleFiles(womValue: WomValue): Seq[WomSingleFile] = + womValue.collectAsSeq { case womSingleFile: WomSingleFile => womSingleFile } /** * Returns Paths for WomSingleFiles in allOutputs that are not in finalOutputs, verifying that the Paths are contained @@ -236,27 +250,33 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, val allOutputFiles = allOutputs.flatMap(getWomSingleFiles) val finalOutputFiles = finalOutputs.flatMap(getWomSingleFiles) val potentialIntermediaries = allOutputFiles.diff(finalOutputFiles).flatMap(toPath) - val checkedIntermediaries = potentialIntermediaries.filter(p => rootWorkflowRootPaths.exists(r => p.toAbsolutePath.startsWith(r.toAbsolutePath))) - for ( path <- potentialIntermediaries.diff(checkedIntermediaries) ) { - log.info(s"Did not delete $path because it is not contained within a workflow root directory for $rootWorkflowId.") - } + val checkedIntermediaries = potentialIntermediaries.filter(p => + rootWorkflowRootPaths.exists(r => p.toAbsolutePath.startsWith(r.toAbsolutePath)) + ) + for (path <- potentialIntermediaries.diff(checkedIntermediaries)) + log.info( + s"Did not delete $path because it is not contained within a workflow root directory for $rootWorkflowId." + ) checkedIntermediaries } override def ioActor: ActorRef = ioActorRef - - override protected def onTimeout(message: Any, to: ActorRef): Unit = { + override protected def onTimeout(message: Any, to: ActorRef): Unit = message match { - case delete: IoDeleteCommand => log.error(s"The DeleteWorkflowFilesActor for root workflow $rootWorkflowId timed out " + - s"waiting for a response for deleting file ${delete.file}.") - case other => log.error(s"The DeleteWorkflowFilesActor for root workflow $rootWorkflowId timed out " + - s"waiting for a response for unknown operation: $other.") + case delete: IoDeleteCommand => + log.error( + s"The DeleteWorkflowFilesActor for root workflow $rootWorkflowId timed out " + + s"waiting for a response for deleting file ${delete.file}." + ) + case other => + log.error( + s"The DeleteWorkflowFilesActor for root workflow $rootWorkflowId timed out " + + s"waiting for a response for unknown operation: $other." + ) } - } } - object DeleteWorkflowFilesActor { //@formatter:off @@ -289,63 +309,68 @@ object DeleteWorkflowFilesActor { def setCommandsToWaitFor(updatedCommandsToWaitFor: Set[A]): WaitingForResponseFromActorData[A] - def commandComplete(command: A): (WaitingForResponseFromActorData[A], WaitingForResponseState) = { + def commandComplete(command: A): (WaitingForResponseFromActorData[A], WaitingForResponseState) = if (commandsToWaitFor.isEmpty) (this, AllCommandsDone) else { val updatedCommandsSet = commandsToWaitFor - command val expectedCommandSetSize = updatedCommandsSet.size val requiredCommandSetSize = commandsToWaitFor.size - 1 - require(expectedCommandSetSize == requiredCommandSetSize, assertionFailureMsg(expectedCommandSetSize, requiredCommandSetSize)) + require(expectedCommandSetSize == requiredCommandSetSize, + assertionFailureMsg(expectedCommandSetSize, requiredCommandSetSize) + ) if (updatedCommandsSet.isEmpty) (setCommandsToWaitFor(Set.empty), AllCommandsDone) else (setCommandsToWaitFor(updatedCommandsSet), StillWaiting) } - } } case class WaitingForIoResponsesData(commandsToWaitFor: Set[IoDeleteCommand], deleteErrors: List[Throwable] = List.empty, - filesNotFound: List[Path] = List.empty) - extends WaitingForResponseFromActorData[IoDeleteCommand](commandsToWaitFor) with DeleteWorkflowFilesActorStateData { + filesNotFound: List[Path] = List.empty + ) extends WaitingForResponseFromActorData[IoDeleteCommand](commandsToWaitFor) + with DeleteWorkflowFilesActorStateData { - override def assertionFailureMsg(expectedSize: Int, requiredSize: Int): String = { + override def assertionFailureMsg(expectedSize: Int, requiredSize: Int): String = s"Found updated command set size as $expectedSize instead of $requiredSize. The updated set of commands that " + s"DeleteWorkflowFilesActor has to wait for should be 1 less after removing a completed command." - } - override def setCommandsToWaitFor(updatedCommandsToWaitFor: Set[IoDeleteCommand]): WaitingForResponseFromActorData[IoDeleteCommand] = { + override def setCommandsToWaitFor( + updatedCommandsToWaitFor: Set[IoDeleteCommand] + ): WaitingForResponseFromActorData[IoDeleteCommand] = this.copy(commandsToWaitFor = updatedCommandsToWaitFor) - } } case class WaitingForInvalidateCCResponsesData(commandsToWaitFor: Set[Long], deleteErrors: List[Throwable], filesNotFound: List[Path], - callCacheInvalidationErrors: List[Throwable] = List.empty) - extends WaitingForResponseFromActorData[Long](commandsToWaitFor) with DeleteWorkflowFilesActorStateData { + callCacheInvalidationErrors: List[Throwable] = List.empty + ) extends WaitingForResponseFromActorData[Long](commandsToWaitFor) + with DeleteWorkflowFilesActorStateData { - override def assertionFailureMsg(expectedSize: Int, requiredSize: Int): String = { + override def assertionFailureMsg(expectedSize: Int, requiredSize: Int): String = s"Found updated call cache entries set size as $expectedSize instead of $requiredSize. The updated set of call cache entries" + s" that DeleteWorkflowFilesActor has to wait for should be 1 less after a call cache entry is invalidated." - } - override def setCommandsToWaitFor(updatedCommandsToWaitFor: Set[Long]): WaitingForResponseFromActorData[Long] = { + override def setCommandsToWaitFor(updatedCommandsToWaitFor: Set[Long]): WaitingForResponseFromActorData[Long] = this.copy(commandsToWaitFor = updatedCommandsToWaitFor) - } } // Responses sealed trait DeleteWorkflowFilesResponse - case class DeleteWorkflowFilesSucceededResponse(filesNotFound: List[Path], callCacheInvalidationErrors: List[Throwable]) extends DeleteWorkflowFilesResponse - case class DeleteWorkflowFilesFailedResponse(errors: List[Throwable], filesNotFound: List[Path], callCacheInvalidationErrors: List[Throwable]) extends DeleteWorkflowFilesResponse + case class DeleteWorkflowFilesSucceededResponse(filesNotFound: List[Path], + callCacheInvalidationErrors: List[Throwable] + ) extends DeleteWorkflowFilesResponse + case class DeleteWorkflowFilesFailedResponse(errors: List[Throwable], + filesNotFound: List[Path], + callCacheInvalidationErrors: List[Throwable] + ) extends DeleteWorkflowFilesResponse // internal state to keep track of deletion of files and call cache invalidation sealed trait WaitingForResponseState private[deletion] case object StillWaiting extends WaitingForResponseState private[deletion] case object AllCommandsDone extends WaitingForResponseState - def props(rootWorkflowId: RootWorkflowId, rootAndSubworkflowIds: Set[WorkflowId], rootWorkflowRootPaths: Set[Path], @@ -354,18 +379,19 @@ object DeleteWorkflowFilesActor { pathBuilders: List[PathBuilder], serviceRegistryActor: ActorRef, ioActor: ActorRef, - gcsCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder, - ): Props = { - Props(new DeleteWorkflowFilesActor( - rootWorkflowId = rootWorkflowId, - rootAndSubworkflowIds = rootAndSubworkflowIds, - rootWorkflowRootPaths = rootWorkflowRootPaths, - workflowFinalOutputs = workflowFinalOutputs, - workflowAllOutputs = workflowAllOutputs, - pathBuilders = pathBuilders, - serviceRegistryActor = serviceRegistryActor, - ioActorRef = ioActor, - gcsCommandBuilder = gcsCommandBuilder, - )) - } + gcsCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder + ): Props = + Props( + new DeleteWorkflowFilesActor( + rootWorkflowId = rootWorkflowId, + rootAndSubworkflowIds = rootAndSubworkflowIds, + rootWorkflowRootPaths = rootWorkflowRootPaths, + workflowFinalOutputs = workflowFinalOutputs, + workflowAllOutputs = workflowAllOutputs, + pathBuilders = pathBuilders, + serviceRegistryActor = serviceRegistryActor, + ioActorRef = ioActor, + gcsCommandBuilder = gcsCommandBuilder + ) + ) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala index 0a7e3760a67..7df49b106e3 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala @@ -21,15 +21,23 @@ trait CallMetadataHelper { def pushNewCallMetadata(callKey: CallKey, backendName: Option[String], serviceRegistryActor: ActorRef) = { val startEvents = List( Option(MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.Start), MetadataValue(OffsetDateTime.now))), - Option(MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.ExecutionStatus), MetadataValue(ExecutionStatus.QueuedInCromwell))), - backendName map { name => MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.Backend), MetadataValue(name)) } + Option( + MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.ExecutionStatus), + MetadataValue(ExecutionStatus.QueuedInCromwell) + ) + ), + backendName map { name => + MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.Backend), MetadataValue(name)) + } ).flatten serviceRegistryActor ! PutMetadataAction(startEvents) } def pushStartingCallMetadata(callKey: CallKey) = { - val statusChange = MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.ExecutionStatus), MetadataValue(ExecutionStatus.Starting)) + val statusChange = MetadataEvent(metadataKeyForCall(callKey, CallMetadataKeys.ExecutionStatus), + MetadataValue(ExecutionStatus.Starting) + ) serviceRegistryActor ! PutMetadataAction(statusChange) } @@ -42,13 +50,16 @@ trait CallMetadataHelper { case empty if empty.isEmpty => List(MetadataEvent.empty(metadataKeyForCall(key, s"${CallMetadataKeys.Inputs}"))) case inputs => - inputs flatMap { - case (inputName, inputValue) => - womValueToMetadataEvents(metadataKeyForCall(key, s"${CallMetadataKeys.Inputs}:${inputName.name}"), inputValue) + inputs flatMap { case (inputName, inputValue) => + womValueToMetadataEvents(metadataKeyForCall(key, s"${CallMetadataKeys.Inputs}:${inputName.name}"), + inputValue + ) } } - val runningEvent = List(MetadataEvent(metadataKeyForCall(key, CallMetadataKeys.ExecutionStatus), MetadataValue(ExecutionStatus.Running))) + val runningEvent = List( + MetadataEvent(metadataKeyForCall(key, CallMetadataKeys.ExecutionStatus), MetadataValue(ExecutionStatus.Running)) + ) serviceRegistryActor ! PutMetadataAction(runningEvent ++ inputEvents) alreadyPushedRunningCallMetadata += metadataKeyForUniqueness } @@ -59,7 +70,10 @@ trait CallMetadataHelper { List(MetadataEvent.empty(MetadataKey(workflowIdForCallMetadata, None, WorkflowMetadataKeys.Outputs))) } else { outputs flatMap { case (outputName, outputValue) => - womValueToMetadataEvents(MetadataKey(workflowIdForCallMetadata, None, s"${WorkflowMetadataKeys.Outputs}:$outputName"), outputValue) + womValueToMetadataEvents( + MetadataKey(workflowIdForCallMetadata, None, s"${WorkflowMetadataKeys.Outputs}:$outputName"), + outputValue + ) } } @@ -74,7 +88,10 @@ trait CallMetadataHelper { List(MetadataEvent.empty(metadataKeyForCall(jobKey, s"${CallMetadataKeys.Outputs}"))) case _ => outputs.outputs flatMap { case (outputPort, outputValue) => - womValueToMetadataEvents(metadataKeyForCall(jobKey, s"${CallMetadataKeys.Outputs}:${outputPort.internalName}"), outputValue) + womValueToMetadataEvents( + metadataKeyForCall(jobKey, s"${CallMetadataKeys.Outputs}:${outputPort.internalName}"), + outputValue + ) } } @@ -84,13 +101,15 @@ trait CallMetadataHelper { def pushFailedCallMetadata(jobKey: JobKey, returnCode: Option[Int], failure: Throwable, retryableFailure: Boolean) = { val failedState = if (retryableFailure) ExecutionStatus.RetryableFailure else ExecutionStatus.Failed val completionEvents = completedCallMetadataEvents(jobKey, failedState, returnCode) - val retryableFailureEvent = MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.RetryableFailure), MetadataValue(retryableFailure)) + val retryableFailureEvent = + MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.RetryableFailure), MetadataValue(retryableFailure)) val failureEvents = failure match { // If the job was already failed, don't republish the failure reasons, they're already there case _: JobAlreadyFailedInJobStore => List.empty case _ => - throwableToMetadataEvents(metadataKeyForCall(jobKey, s"${CallMetadataKeys.Failures}"), failure).+:(retryableFailureEvent) + throwableToMetadataEvents(metadataKeyForCall(jobKey, s"${CallMetadataKeys.Failures}"), failure) + .+:(retryableFailureEvent) } serviceRegistryActor ! PutMetadataAction(completionEvents ++ failureEvents) @@ -117,14 +136,13 @@ trait CallMetadataHelper { val now = OffsetDateTime.now.withOffsetSameInstant(offset) val lastEvent = ExecutionEvent("!!Bring Back the Monarchy!!", now) val tailedEventList = sortedEvents :+ lastEvent - val events = tailedEventList.sliding(2) flatMap { - case Seq(eventCurrent, eventNext) => - val eventKey = s"${CallMetadataKeys.ExecutionEvents}[$randomNumberString]" - List( - metadataEvent(s"$eventKey:description", eventCurrent.name), - metadataEvent(s"$eventKey:startTime", eventCurrent.offsetDateTime), - metadataEvent(s"$eventKey:endTime", eventNext.offsetDateTime) - ) ++ (eventCurrent.grouping map { g => metadataEvent(s"$eventKey:grouping", g) }) + val events = tailedEventList.sliding(2) flatMap { case Seq(eventCurrent, eventNext) => + val eventKey = s"${CallMetadataKeys.ExecutionEvents}[$randomNumberString]" + List( + metadataEvent(s"$eventKey:description", eventCurrent.name), + metadataEvent(s"$eventKey:startTime", eventCurrent.offsetDateTime), + metadataEvent(s"$eventKey:endTime", eventNext.offsetDateTime) + ) ++ (eventCurrent.grouping map { g => metadataEvent(s"$eventKey:grouping", g) }) } serviceRegistryActor ! PutMetadataAction(events.toList) @@ -144,5 +162,9 @@ trait CallMetadataHelper { private def randomNumberString: String = Random.nextInt().toString.stripPrefix("-") - def metadataKeyForCall(jobKey: JobKey, myKey: String) = MetadataKey(workflowIdForCallMetadata, Option(MetadataJobKey(jobKey.node.fullyQualifiedName, jobKey.index, jobKey.attempt)), myKey) + def metadataKeyForCall(jobKey: JobKey, myKey: String) = MetadataKey( + workflowIdForCallMetadata, + Option(MetadataJobKey(jobKey.node.fullyQualifiedName, jobKey.index, jobKey.attempt)), + myKey + ) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActor.scala index 1cd48695ba2..861989de5f6 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActor.scala @@ -48,7 +48,11 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, rootConfig: Config, totalJobsByRootWf: AtomicInteger, fileHashCacheActor: Option[ActorRef], - blacklistCache: Option[BlacklistCache]) extends LoggingFSM[SubWorkflowExecutionActorState, SubWorkflowExecutionActorData] with JobLogging with WorkflowMetadataHelper with CallMetadataHelper { + blacklistCache: Option[BlacklistCache] +) extends LoggingFSM[SubWorkflowExecutionActorState, SubWorkflowExecutionActorData] + with JobLogging + with WorkflowMetadataHelper + with CallMetadataHelper { override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() { case _ => Escalate } @@ -66,14 +70,13 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, private var eventList: Seq[ExecutionEvent] = Seq(ExecutionEvent(stateName.toString)) - when(SubWorkflowPendingState) { - case Event(Execute, _) => - if (startState.restarted) { - subWorkflowStoreActor ! QuerySubWorkflow(parentWorkflow.id, key) - goto(SubWorkflowCheckingStoreState) - } else { - requestValueStore(createSubWorkflowId()) - } + when(SubWorkflowPendingState) { case Event(Execute, _) => + if (startState.restarted) { + subWorkflowStoreActor ! QuerySubWorkflow(parentWorkflow.id, key) + goto(SubWorkflowCheckingStoreState) + } else { + requestValueStore(createSubWorkflowId()) + } } when(SubWorkflowCheckingStoreState) { @@ -87,18 +90,23 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, } /* - * ! Hot Potato Warning ! - * We ask explicitly for the `ValueStore` so we can use it on the fly and more importantly not store it as a - * variable in this actor, which would prevent it from being garbage collected for the duration of the - * subworkflow and would lead to memory leaks. - */ + * ! Hot Potato Warning ! + * We ask explicitly for the `ValueStore` so we can use it on the fly and more importantly not store it as a + * variable in this actor, which would prevent it from being garbage collected for the duration of the + * subworkflow and would lead to memory leaks. + */ when(WaitingForValueStore) { case Event(valueStore: ValueStore, SubWorkflowExecutionActorLiveData(Some(subWorkflowId), _)) => prepareSubWorkflow(subWorkflowId, valueStore) case Event(_: ValueStore, _) => - context.parent ! SubWorkflowFailedResponse(key, Map.empty, new IllegalStateException( - "This is a programmer error, we're ready to prepare the job and should have" + - " a SubWorkflowId to use by now but somehow haven't. Failing the workflow.")) + context.parent ! SubWorkflowFailedResponse( + key, + Map.empty, + new IllegalStateException( + "This is a programmer error, we're ready to prepare the job and should have" + + " a SubWorkflowId to use by now but somehow haven't. Failing the workflow." + ) + ) context stop self stay() } @@ -108,16 +116,36 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, startSubWorkflow(subWorkflowEngineDescriptor, inputs, data) case Event(failure: CallPreparationFailed, data) => // No subworkflow ID yet, so no need to record the status. Fail here and let the parent handle the fallout: - recordTerminalState(SubWorkflowFailedState, SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowFailedResponse(key, Map.empty, failure.throwable))) + recordTerminalState( + SubWorkflowFailedState, + SubWorkflowExecutionActorTerminalData(data.subWorkflowId, + SubWorkflowFailedResponse(key, Map.empty, failure.throwable) + ) + ) } when(SubWorkflowRunningState) { - case Event(WorkflowExecutionSucceededResponse(executedJobKeys, rootAndSubworklowIds, outputs, cumulativeOutputs), data) => - recordTerminalState(SubWorkflowSucceededState, SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowSucceededResponse(key, executedJobKeys, rootAndSubworklowIds, outputs, cumulativeOutputs))) + case Event(WorkflowExecutionSucceededResponse(executedJobKeys, rootAndSubworklowIds, outputs, cumulativeOutputs), + data + ) => + recordTerminalState( + SubWorkflowSucceededState, + SubWorkflowExecutionActorTerminalData( + data.subWorkflowId, + SubWorkflowSucceededResponse(key, executedJobKeys, rootAndSubworklowIds, outputs, cumulativeOutputs) + ) + ) case Event(WorkflowExecutionFailedResponse(executedJobKeys, reason), data) => - recordTerminalState(SubWorkflowFailedState, SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowFailedResponse(key, executedJobKeys, reason))) + recordTerminalState(SubWorkflowFailedState, + SubWorkflowExecutionActorTerminalData(data.subWorkflowId, + SubWorkflowFailedResponse(key, executedJobKeys, reason) + ) + ) case Event(WorkflowExecutionAbortedResponse(executedJobKeys), data) => - recordTerminalState(SubWorkflowAbortedState, SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowAbortedResponse(key, executedJobKeys))) + recordTerminalState( + SubWorkflowAbortedState, + SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowAbortedResponse(key, executedJobKeys)) + ) case Event(EngineLifecycleActorAbortCommand, SubWorkflowExecutionActorLiveData(_, Some(actorRef))) => actorRef ! EngineLifecycleActorAbortCommand stay() @@ -134,14 +162,22 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, // If the final state can't be written, it's fairly likely that other metadata has also been lost, so // the best answer may unfortunately be to fail the workflow rather than retry. Assuming the call cache is working // correctly, a retry of the workflow should at least cache-hit instead of having to re-run the work: - recordTerminalState(SubWorkflowFailedState, terminalData.copy(terminalStateResponse = SubWorkflowFailedResponse(key, - terminalData.terminalStateResponse.jobExecutionMap, - new Exception("Sub workflow execution actor unable to write final state to metadata", reason) with NoStackTrace))) + recordTerminalState( + SubWorkflowFailedState, + terminalData.copy(terminalStateResponse = + SubWorkflowFailedResponse( + key, + terminalData.terminalStateResponse.jobExecutionMap, + new Exception("Sub workflow execution actor unable to write final state to metadata", reason) + with NoStackTrace + ) + ) + ) } - when(SubWorkflowSucceededState) { terminalMetadataWriteResponseHandler } - when(SubWorkflowFailedState) { terminalMetadataWriteResponseHandler } - when(SubWorkflowAbortedState) { terminalMetadataWriteResponseHandler } + when(SubWorkflowSucceededState)(terminalMetadataWriteResponseHandler) + when(SubWorkflowFailedState)(terminalMetadataWriteResponseHandler) + when(SubWorkflowAbortedState)(terminalMetadataWriteResponseHandler) whenUnhandled { case Event(SubWorkflowStoreRegisterSuccess(_), _) => @@ -151,10 +187,15 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, jobLogger.error(reason, s"SubWorkflowStore failure for command $command") stay() case Event(EngineLifecycleActorAbortCommand, data) => - recordTerminalState(SubWorkflowAbortedState, SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowAbortedResponse(key, Map.empty))) + recordTerminalState( + SubWorkflowAbortedState, + SubWorkflowExecutionActorTerminalData(data.subWorkflowId, SubWorkflowAbortedResponse(key, Map.empty)) + ) } - def recordTerminalState(terminalState: SubWorkflowTerminalState, newStateData: SubWorkflowExecutionActorTerminalData): State = { + def recordTerminalState(terminalState: SubWorkflowTerminalState, + newStateData: SubWorkflowExecutionActorTerminalData + ): State = newStateData.subWorkflowId match { case Some(id) => pushWorkflowEnd(id) @@ -165,23 +206,27 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, jobLogger.error("Programmer Error: Sub workflow completed without ever having a Sub Workflow UUID.") // Same situation as if we fail to write the final state metadata. Bail out and hope the workflow is more // successful next time: - context.parent ! SubWorkflowFailedResponse(key, + context.parent ! SubWorkflowFailedResponse( + key, Map.empty, - new Exception("Programmer Error: Sub workflow completed without ever having a Sub Workflow UUID") with NoStackTrace) + new Exception("Programmer Error: Sub workflow completed without ever having a Sub Workflow UUID") + with NoStackTrace + ) context.stop(self) stay() } - } - onTransition { - case (_, toState) => - eventList :+= ExecutionEvent(toState.toString) - if (!toState.isInstanceOf[SubWorkflowTerminalState]) { - stateData.subWorkflowId foreach { id => pushCurrentStateToMetadataService(id, toState.workflowState) } - } + onTransition { case (_, toState) => + eventList :+= ExecutionEvent(toState.toString) + if (!toState.isInstanceOf[SubWorkflowTerminalState]) { + stateData.subWorkflowId foreach { id => pushCurrentStateToMetadataService(id, toState.workflowState) } + } } - private def startSubWorkflow(subWorkflowEngineDescriptor: EngineWorkflowDescriptor, inputs: WomEvaluatedCallInputs, data: SubWorkflowExecutionActorData) = { + private def startSubWorkflow(subWorkflowEngineDescriptor: EngineWorkflowDescriptor, + inputs: WomEvaluatedCallInputs, + data: SubWorkflowExecutionActorData + ) = { val subWorkflowActor = createSubWorkflowActor(subWorkflowEngineDescriptor) subWorkflowActor ! WorkflowExecutionActor.ExecuteWorkflowCommand @@ -204,14 +249,13 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, goto(WaitingForValueStore) using SubWorkflowExecutionActorLiveData(Option(workflowId), None) } - def createSubWorkflowPreparationActor(subWorkflowId: WorkflowId) = { + def createSubWorkflowPreparationActor(subWorkflowId: WorkflowId) = context.actorOf( SubWorkflowPreparationActor.props(parentWorkflow, expressionLanguageFunctions, key, subWorkflowId), s"$subWorkflowId-SubWorkflowPreparationActor-${key.tag}" ) - } - def createSubWorkflowActor(subWorkflowEngineDescriptor: EngineWorkflowDescriptor) = { + def createSubWorkflowActor(subWorkflowEngineDescriptor: EngineWorkflowDescriptor) = context.actorOf( WorkflowExecutionActor.props( subWorkflowEngineDescriptor, @@ -234,25 +278,37 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, ), s"${subWorkflowEngineDescriptor.id}-SubWorkflowActor-${key.tag}" ) - } - private def pushWorkflowRunningMetadata(subWorkflowDescriptor: BackendWorkflowDescriptor, workflowInputs: WomEvaluatedCallInputs) = { + private def pushWorkflowRunningMetadata(subWorkflowDescriptor: BackendWorkflowDescriptor, + workflowInputs: WomEvaluatedCallInputs + ) = { val subWorkflowId = subWorkflowDescriptor.id - val parentWorkflowMetadataKey = MetadataKey(parentWorkflow.id, Option(MetadataJobKey(key.node.fullyQualifiedName, key.index, key.attempt)), CallMetadataKeys.SubWorkflowId) + val parentWorkflowMetadataKey = MetadataKey( + parentWorkflow.id, + Option(MetadataJobKey(key.node.fullyQualifiedName, key.index, key.attempt)), + CallMetadataKeys.SubWorkflowId + ) val events = List( MetadataEvent(parentWorkflowMetadataKey, MetadataValue(subWorkflowId)), MetadataEvent(MetadataKey(subWorkflowId, None, WorkflowMetadataKeys.Name), MetadataValue(key.node.localName)), - MetadataEvent(MetadataKey(subWorkflowId, None, WorkflowMetadataKeys.ParentWorkflowId), MetadataValue(parentWorkflow.id)), - MetadataEvent(MetadataKey(subWorkflowId, None, WorkflowMetadataKeys.RootWorkflowId), MetadataValue(parentWorkflow.rootWorkflow.id)) + MetadataEvent(MetadataKey(subWorkflowId, None, WorkflowMetadataKeys.ParentWorkflowId), + MetadataValue(parentWorkflow.id) + ), + MetadataEvent(MetadataKey(subWorkflowId, None, WorkflowMetadataKeys.RootWorkflowId), + MetadataValue(parentWorkflow.rootWorkflow.id) + ) ) val inputEvents = workflowInputs match { case empty if empty.isEmpty => - List(MetadataEvent.empty(MetadataKey(subWorkflowId, None,WorkflowMetadataKeys.Inputs))) + List(MetadataEvent.empty(MetadataKey(subWorkflowId, None, WorkflowMetadataKeys.Inputs))) case inputs => inputs flatMap { case (inputName, womValue) => - womValueToMetadataEvents(MetadataKey(subWorkflowId, None, s"${WorkflowMetadataKeys.Inputs}:${inputName.name}"), womValue) + womValueToMetadataEvents( + MetadataKey(subWorkflowId, None, s"${WorkflowMetadataKeys.Inputs}:${inputName.name}"), + womValue + ) } } @@ -264,14 +320,17 @@ class SubWorkflowExecutionActor(key: SubWorkflowKey, private def buildWorkflowRootMetadataEvents(subWorkflowDescriptor: BackendWorkflowDescriptor) = { val subWorkflowId = subWorkflowDescriptor.id - factories flatMap { - case (backendName, factory) => - BackendConfiguration.backendConfigurationDescriptor(backendName).toOption map { config => - backendName -> factory.getWorkflowExecutionRootPath(subWorkflowDescriptor, config.backendConfig, initializationData.get(backendName)) - } - } map { - case (backend, wfRoot) => - MetadataEvent(MetadataKey(subWorkflowId, None, s"${WorkflowMetadataKeys.WorkflowRoot}[$backend]"), MetadataValue(wfRoot.toAbsolutePath)) + factories flatMap { case (backendName, factory) => + BackendConfiguration.backendConfigurationDescriptor(backendName).toOption map { config => + backendName -> factory.getWorkflowExecutionRootPath(subWorkflowDescriptor, + config.backendConfig, + initializationData.get(backendName) + ) + } + } map { case (backend, wfRoot) => + MetadataEvent(MetadataKey(subWorkflowId, None, s"${WorkflowMetadataKeys.WorkflowRoot}[$backend]"), + MetadataValue(wfRoot.toAbsolutePath) + ) } } @@ -324,8 +383,12 @@ object SubWorkflowExecutionActor { sealed trait SubWorkflowExecutionActorData { def subWorkflowId: Option[WorkflowId] } - final case class SubWorkflowExecutionActorLiveData(subWorkflowId: Option[WorkflowId], subWorkflowActor: Option[ActorRef]) extends SubWorkflowExecutionActorData - final case class SubWorkflowExecutionActorTerminalData(subWorkflowId: Option[WorkflowId], terminalStateResponse: SubWorkflowTerminalStateResponse) extends SubWorkflowExecutionActorData + final case class SubWorkflowExecutionActorLiveData(subWorkflowId: Option[WorkflowId], + subWorkflowActor: Option[ActorRef] + ) extends SubWorkflowExecutionActorData + final case class SubWorkflowExecutionActorTerminalData(subWorkflowId: Option[WorkflowId], + terminalStateResponse: SubWorkflowTerminalStateResponse + ) extends SubWorkflowExecutionActorData sealed trait EngineWorkflowExecutionActorCommand case object Execute @@ -349,28 +412,30 @@ object SubWorkflowExecutionActor { rootConfig: Config, totalJobsByRootWf: AtomicInteger, fileHashCacheActor: Option[ActorRef], - blacklistCache: Option[BlacklistCache]) = { - Props(new SubWorkflowExecutionActor( - key, - parentWorkflow, - expressionLanguageFunctions, - factories, - ioActor = ioActor, - serviceRegistryActor = serviceRegistryActor, - jobStoreActor = jobStoreActor, - subWorkflowStoreActor = subWorkflowStoreActor, - callCacheReadActor = callCacheReadActor, - callCacheWriteActor = callCacheWriteActor, - workflowDockerLookupActor = workflowDockerLookupActor, - jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, - jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, - backendSingletonCollection, - initializationData, - startState, - rootConfig, - totalJobsByRootWf, - fileHashCacheActor = fileHashCacheActor, - blacklistCache = blacklistCache) + blacklistCache: Option[BlacklistCache] + ) = + Props( + new SubWorkflowExecutionActor( + key, + parentWorkflow, + expressionLanguageFunctions, + factories, + ioActor = ioActor, + serviceRegistryActor = serviceRegistryActor, + jobStoreActor = jobStoreActor, + subWorkflowStoreActor = subWorkflowStoreActor, + callCacheReadActor = callCacheReadActor, + callCacheWriteActor = callCacheWriteActor, + workflowDockerLookupActor = workflowDockerLookupActor, + jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, + jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, + backendSingletonCollection, + initializationData, + startState, + rootConfig, + totalJobsByRootWf, + fileHashCacheActor = fileHashCacheActor, + blacklistCache = blacklistCache + ) ).withDispatcher(EngineDispatcher) - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala index dd6f0a42835..106b14a9a0c 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala @@ -28,7 +28,10 @@ import cromwell.engine.backend.{BackendSingletonCollection, CromwellBackends} import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor._ import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActorData.DataStoreUpdate import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor -import cromwell.engine.workflow.lifecycle.execution.keys.ExpressionKey.{ExpressionEvaluationFailedResponse, ExpressionEvaluationSucceededResponse} +import cromwell.engine.workflow.lifecycle.execution.keys.ExpressionKey.{ + ExpressionEvaluationFailedResponse, + ExpressionEvaluationSucceededResponse +} import cromwell.engine.workflow.lifecycle.execution.keys._ import cromwell.engine.workflow.lifecycle.execution.stores.{ActiveExecutionStore, ExecutionStore} import cromwell.engine.workflow.lifecycle.{EngineLifecycleActorAbortCommand, EngineLifecycleActorAbortedResponse} @@ -52,7 +55,7 @@ import scala.language.postfixOps import scala.util.control.NoStackTrace case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) - extends LoggingFSM[WorkflowExecutionActorState, WorkflowExecutionActorData] + extends LoggingFSM[WorkflowExecutionActorState, WorkflowExecutionActorData] with WorkflowLogging with CallMetadataHelper with StopAndLogSupervisor @@ -71,23 +74,25 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) private val DefaultTotalMaxJobsPerRootWf = 1000000 private val DefaultMaxScatterSize = 1000000 - private val TotalMaxJobsPerRootWf = params.rootConfig.getOrElse("system.total-max-jobs-per-root-workflow", DefaultTotalMaxJobsPerRootWf) - private val MaxScatterWidth = params.rootConfig.getOrElse("system.max-scatter-width-per-scatter", DefaultMaxScatterSize) + private val TotalMaxJobsPerRootWf = + params.rootConfig.getOrElse("system.total-max-jobs-per-root-workflow", DefaultTotalMaxJobsPerRootWf) + private val MaxScatterWidth = + params.rootConfig.getOrElse("system.max-scatter-width-per-scatter", DefaultMaxScatterSize) private val FileHashBatchSize: Int = params.rootConfig.as[Int]("system.file-hash-batch-size") private val backendFactories: Map[String, BackendLifecycleActorFactory] = { val factoriesValidation = workflowDescriptor.backendAssignments.values.toList - .traverse[ErrorOr, (String, BackendLifecycleActorFactory)] { - backendName => CromwellBackends.backendLifecycleFactoryActorByName(backendName) map { backendName -> _ } - } + .traverse[ErrorOr, (String, BackendLifecycleActorFactory)] { backendName => + CromwellBackends.backendLifecycleFactoryActorByName(backendName) map { backendName -> _ } + } factoriesValidation .map(_.toMap) .valueOr(errors => throw AggregatedMessageException("Could not instantiate backend factories", errors.toList)) } - - val executionStore: ErrorOr[ActiveExecutionStore] = ExecutionStore(workflowDescriptor.callable, params.totalJobsByRootWf, TotalMaxJobsPerRootWf) + val executionStore: ErrorOr[ActiveExecutionStore] = + ExecutionStore(workflowDescriptor.callable, params.totalJobsByRootWf, TotalMaxJobsPerRootWf) // If executionStore returns a Failure about root workflow creating jobs more than total jobs per root workflow limit, // the WEA will fail by sending WorkflowExecutionFailedResponse to its parent and kill itself @@ -95,7 +100,12 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) case Valid(validExecutionStore) => startWith( WorkflowExecutionPendingState, - WorkflowExecutionActorData(workflowDescriptor, ioEc, new AsyncIo(params.ioActor, GcsBatchCommandBuilder), params.totalJobsByRootWf, validExecutionStore) + WorkflowExecutionActorData(workflowDescriptor, + ioEc, + new AsyncIo(params.ioActor, GcsBatchCommandBuilder), + params.totalJobsByRootWf, + validExecutionStore + ) ) case Invalid(e) => val errorMsg = s"Failed to initialize WorkflowExecutionActor. Error: $e" @@ -106,7 +116,12 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) // it throws NullPointerException as FSM.goto can't find the currentState startWith( WorkflowExecutionFailedState, - WorkflowExecutionActorData(workflowDescriptor, ioEc, new AsyncIo(params.ioActor, GcsBatchCommandBuilder), params.totalJobsByRootWf, ExecutionStore.empty) + WorkflowExecutionActorData(workflowDescriptor, + ioEc, + new AsyncIo(params.ioActor, GcsBatchCommandBuilder), + params.totalJobsByRootWf, + ExecutionStore.empty + ) ) workflowLogger.debug("Actor failed to initialize. Stopping self.") @@ -124,14 +139,14 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) sendHeartBeat() /* - * Note that we don't record the fact that a workflow was failing, therefore we either restart in running or aborting state. - * If the workflow was failing, it means at least one job failed in which case it'll still be failed when we get to it. - * When that happens, we'll go to failing state. - * - * An effect of that is that up until the moment when we come across the failed job, - * all backend jobs will be restarted with a Recover command which could potentially re-execute the job if the backend doesn't support - * job recovery. A better way would be to record that the workflow was failing an restart in failing mode. However there will always be a gap - * between when a job has failed and Cromwell is aware of it, or has time to persist that information. + * Note that we don't record the fact that a workflow was failing, therefore we either restart in running or aborting state. + * If the workflow was failing, it means at least one job failed in which case it'll still be failed when we get to it. + * When that happens, we'll go to failing state. + * + * An effect of that is that up until the moment when we come across the failed job, + * all backend jobs will be restarted with a Recover command which could potentially re-execute the job if the backend doesn't support + * job recovery. A better way would be to record that the workflow was failing an restart in failing mode. However there will always be a gap + * between when a job has failed and Cromwell is aware of it, or has time to persist that information. */ params.startState match { case RestartableAborting => goto(WorkflowExecutionAbortingState) @@ -165,8 +180,9 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) // A job not found here means we were trying to reconnect to a job that was likely never started. Indicate this in the message. case Event(JobFailedNonRetryableResponse(jobKey, _: JobNotFoundException, _), _) if restarting => val benignException = - new Exception("Cromwell server was restarted while this workflow was running. As part of the restart process, Cromwell attempted to reconnect to this job, however it was never started in the first place. This is a benign failure and not the cause of failure for this workflow, it can be safely ignored.") - with NoStackTrace + new Exception( + "Cromwell server was restarted while this workflow was running. As part of the restart process, Cromwell attempted to reconnect to this job, however it was never started in the first place. This is a benign failure and not the cause of failure for this workflow, it can be safely ignored." + ) with NoStackTrace handleNonRetryableFailure(stateData, jobKey, benignException) } @@ -187,20 +203,28 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) handleCallAborted(stateData, jobKey, executedKeys) // Here we can't really know what the status of the job is. For now declare it aborted anyway but add some info in the metadata - case Event(JobFailedNonRetryableResponse(jobKey: BackendJobDescriptorKey, failure: JobReconnectionNotSupportedException, _), _) if restarting => + case Event(JobFailedNonRetryableResponse(jobKey: BackendJobDescriptorKey, + failure: JobReconnectionNotSupportedException, + _ + ), + _ + ) if restarting => pushBackendStatusUnknown(jobKey) handleNonRetryableFailure(stateData, jobKey, failure, None) } - when(WorkflowExecutionSuccessfulState) { FSM.NullFunction } - when(WorkflowExecutionFailedState) { FSM.NullFunction } - when(WorkflowExecutionAbortedState) { FSM.NullFunction } + when(WorkflowExecutionSuccessfulState)(FSM.NullFunction) + when(WorkflowExecutionFailedState)(FSM.NullFunction) + when(WorkflowExecutionAbortedState)(FSM.NullFunction) var previousHeartbeatTime: Option[OffsetDateTime] = None def measureTimeBetweenHeartbeats(): Unit = { val now = OffsetDateTime.now previousHeartbeatTime foreach { previous => - sendGauge(NonEmptyList("workflows", List("workflowexecutionactor", "heartbeat", "interval_millis", "set")), now.toInstant.toEpochMilli - previous.toInstant.toEpochMilli) + sendGauge( + NonEmptyList("workflows", List("workflowexecutionactor", "heartbeat", "interval_millis", "set")), + now.toInstant.toEpochMilli - previous.toInstant.toEpochMilli + ) } previousHeartbeatTime = Option(now) } @@ -225,15 +249,20 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) stay() using stateData .mergeExecutionDiff(WorkflowExecutionDiff(Map(key -> ExecutionStatus.Running))) - //Success + // Success // Job case Event(r: JobSucceededResponse, stateData) => if (r.resultGenerationMode != RunOnBackend) { - workflowLogger.info(s"Job results retrieved (${r.resultGenerationMode}): '${r.jobKey.call.fullyQualifiedName}' (scatter index: ${r.jobKey.index}, attempt ${r.jobKey.attempt})") + workflowLogger.info( + s"Job results retrieved (${r.resultGenerationMode}): '${r.jobKey.call.fullyQualifiedName}' (scatter index: ${r.jobKey.index}, attempt ${r.jobKey.attempt})" + ) } handleCallSuccessful(r.jobKey, r.jobOutputs, r.returnCode, stateData, Map.empty, Set(workflowDescriptor.id)) // Sub Workflow - case Event(SubWorkflowSucceededResponse(jobKey, descendantJobKeys, rootAndSubworklowIds, callOutputs, cumulativeOutputs), currentStateData) => + case Event( + SubWorkflowSucceededResponse(jobKey, descendantJobKeys, rootAndSubworklowIds, callOutputs, cumulativeOutputs), + currentStateData + ) => // Update call outputs to come from sub-workflow output ports: val subworkflowOutputs: Map[OutputPort, WomValue] = callOutputs.outputs flatMap { case (port, value) => jobKey.node.subworkflowCallOutputPorts collectFirst { @@ -241,16 +270,30 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) } } - if(subworkflowOutputs.size == callOutputs.outputs.size) { - handleCallSuccessful(jobKey, CallOutputs(subworkflowOutputs), None, currentStateData, descendantJobKeys, rootAndSubworklowIds, cumulativeOutputs) + if (subworkflowOutputs.size == callOutputs.outputs.size) { + handleCallSuccessful(jobKey, + CallOutputs(subworkflowOutputs), + None, + currentStateData, + descendantJobKeys, + rootAndSubworklowIds, + cumulativeOutputs + ) } else { - handleNonRetryableFailure(currentStateData, jobKey, new Exception(s"Subworkflow produced outputs: [${callOutputs.outputs.keys.mkString(", ")}], but we expected all of [${jobKey.node.subworkflowCallOutputPorts.map(_.internalName)}]")) + handleNonRetryableFailure( + currentStateData, + jobKey, + new Exception(s"Subworkflow produced outputs: [${callOutputs.outputs.keys + .mkString(", ")}], but we expected all of [${jobKey.node.subworkflowCallOutputPorts.map(_.internalName)}]") + ) } // Expression case Event(ExpressionEvaluationSucceededResponse(expressionKey, callOutputs), stateData) => expressionKey.node match { case _: ExposedExpressionNode | _: ExpressionBasedGraphOutputNode => - workflowLogger.debug(s"Expression evaluation succeeded: '${expressionKey.node.fullyQualifiedName}' (scatter index: ${expressionKey.index}, attempt: ${expressionKey.attempt})") + workflowLogger.debug( + s"Expression evaluation succeeded: '${expressionKey.node.fullyQualifiedName}' (scatter index: ${expressionKey.index}, attempt: ${expressionKey.attempt})" + ) case _ => // No logging; anonymous node } @@ -314,11 +357,23 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) override protected def onFailure(actorRef: ActorRef, throwable: => Throwable) = { // Both of these Should Never Happen (tm), assuming the state data is set correctly on EJEA creation. // If they do, it's a big programmer error and the workflow execution fails. - val jobKey = stateData.jobKeyActorMappings.getOrElse(actorRef, throw new RuntimeException("Programmer Error: A job or sub workflow actor has terminated but was not assigned a jobKey")) - val jobStatus = stateData.executionStore.jobStatus(jobKey).getOrElse(throw new RuntimeException(s"Programmer Error: An actor representing ${jobKey.tag} which this workflow is not running has sent up a terminated message.")) + val jobKey = stateData.jobKeyActorMappings.getOrElse( + actorRef, + throw new RuntimeException( + "Programmer Error: A job or sub workflow actor has terminated but was not assigned a jobKey" + ) + ) + val jobStatus = stateData.executionStore + .jobStatus(jobKey) + .getOrElse( + throw new RuntimeException( + s"Programmer Error: An actor representing ${jobKey.tag} which this workflow is not running has sent up a terminated message." + ) + ) if (!jobStatus.isTerminalOrRetryable) { - val terminationException = new RuntimeException(s"Unexpected failure or termination of the actor monitoring ${jobKey.tag}", throwable) + val terminationException = + new RuntimeException(s"Unexpected failure or termination of the actor monitoring ${jobKey.tag}", throwable) self ! JobFailedNonRetryableResponse(jobKey, terminationException, None) } } @@ -328,7 +383,9 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) // Send the abort to all children context.children foreach { _ ! EngineLifecycleActorAbortCommand } // As well as all backend singleton actors - params.backendSingletonCollection.backendSingletonActors.values.flatten.foreach { _ ! BackendSingletonActorAbortWorkflow(workflowIdForLogging) } + params.backendSingletonCollection.backendSingletonActors.values.flatten.foreach { + _ ! BackendSingletonActorAbortWorkflow(workflowIdForLogging) + } // Only seal the execution store if we're not restarting, otherwise we could miss some jobs that have been started before the // restart but are not started yet at this point @@ -344,8 +401,8 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) import spray.json._ def handleSuccessfulWorkflowOutputs(outputs: Map[GraphOutputNode, WomValue]) = { - val fullyQualifiedOutputs = outputs map { - case (outputNode, value) => outputNode.identifier.fullyQualifiedName.value -> value + val fullyQualifiedOutputs = outputs map { case (outputNode, value) => + outputNode.identifier.fullyQualifiedName.value -> value } // Publish fully qualified workflow outputs to log and metadata workflowLogger.info( @@ -354,15 +411,19 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) ) pushWorkflowOutputMetadata(fullyQualifiedOutputs) - val localOutputs = CallOutputs(outputs map { - case (outputNode, value) => outputNode.graphOutputPort -> value + val localOutputs = CallOutputs(outputs map { case (outputNode, value) => + outputNode.graphOutputPort -> value }) val currentCumulativeOutputs = data.cumulativeOutputs ++ localOutputs.outputs.values val currentRootAndSubworkflowIds = data.rootAndSubworkflowIds + workflowDescriptor.id - context.parent ! WorkflowExecutionSucceededResponse(data.jobExecutionMap, currentRootAndSubworkflowIds, localOutputs, currentCumulativeOutputs) + context.parent ! WorkflowExecutionSucceededResponse(data.jobExecutionMap, + currentRootAndSubworkflowIds, + localOutputs, + currentCumulativeOutputs + ) goto(WorkflowExecutionSuccessfulState) using data } @@ -380,14 +441,13 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) val workflowOutputValuesValidation = workflowOutputNodes // Try to find a value for each port in the value store - .map(outputNode => - outputNode -> data.valueStore.get(outputNode.graphOutputPort, None) - ) - .toList.traverse[IOChecked, (GraphOutputNode, WomValue)]({ - case (name, Some(value)) => value.initialize(data.expressionLanguageFunctions).map(name -> _) - case (name, None) => - s"Cannot find an output value for ${name.identifier.fullyQualifiedName.value}".invalidIOChecked - }) + .map(outputNode => outputNode -> data.valueStore.get(outputNode.graphOutputPort, None)) + .toList + .traverse[IOChecked, (GraphOutputNode, WomValue)] { + case (name, Some(value)) => value.initialize(data.expressionLanguageFunctions).map(name -> _) + case (name, None) => + s"Cannot find an output value for ${name.identifier.fullyQualifiedName.value}".invalidIOChecked + } // Convert the list of tuples to a Map .map(_.toMap) @@ -410,7 +470,8 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) failedJobKey: JobKey, reason: Throwable, returnCode: Option[Int] = None, - jobExecutionMap: JobExecutionMap = Map.empty) = { + jobExecutionMap: JobExecutionMap = Map.empty + ) = { pushFailedCallMetadata(failedJobKey, returnCode, reason, retryableFailure = false) val dataWithFailure = stateData.executionFailure(failedJobKey, reason, jobExecutionMap) @@ -442,7 +503,7 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) * and for which reconnection will fail. When that happens we'll fail the job (see failing state). * * This guarantees that we'll try to reconnect to all potentially running jobs on restart. - */ + */ val newData = if (workflowDescriptor.failureMode.allowNewCallsAfterFailure || restarting) { dataWithFailure } else { @@ -458,7 +519,10 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) goto(nextState) using newData } - private def handleCallAborted(data: WorkflowExecutionActorData, jobKey: JobKey, jobExecutionMap: JobExecutionMap = Map.empty) = { + private def handleCallAborted(data: WorkflowExecutionActorData, + jobKey: JobKey, + jobExecutionMap: JobExecutionMap = Map.empty + ) = { pushAbortedCallMetadata(jobKey) workflowLogger.info(s"$tag aborted: ${jobKey.tag}") val newStateData = data @@ -469,19 +533,20 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) } private def pushBackendStatusUnknown(jobKey: BackendJobDescriptorKey): Unit = { - val unknownBackendStatus = MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.BackendStatus), MetadataValue("Unknown")) + val unknownBackendStatus = + MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.BackendStatus), MetadataValue("Unknown")) serviceRegistryActor ! PutMetadataAction(unknownBackendStatus) } - private def handleRetryableFailure(jobKey: BackendJobDescriptorKey, - reason: Throwable, - returnCode: Option[Int]) = { + private def handleRetryableFailure(jobKey: BackendJobDescriptorKey, reason: Throwable, returnCode: Option[Int]) = { pushFailedCallMetadata(jobKey, returnCode, reason, retryableFailure = true) val newJobKey = jobKey.copy(attempt = jobKey.attempt + 1) workflowLogger.info(s"Retrying job execution for ${newJobKey.tag}") // Update current key to RetryableFailure status and add new key with attempt incremented and NotStarted status - val executionDiff = WorkflowExecutionDiff(Map(jobKey -> ExecutionStatus.RetryableFailure, newJobKey -> ExecutionStatus.NotStarted)) + val executionDiff = WorkflowExecutionDiff( + Map(jobKey -> ExecutionStatus.RetryableFailure, newJobKey -> ExecutionStatus.NotStarted) + ) stay() using stateData.mergeExecutionDiff(executionDiff) } @@ -492,20 +557,30 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) data: WorkflowExecutionActorData, jobExecutionMap: JobExecutionMap, rootAndSubworkflowIds: Set[WorkflowId], - cumulativeOutputs: Set[WomValue] = Set.empty) = { + cumulativeOutputs: Set[WomValue] = Set.empty + ) = { pushSuccessfulCallMetadata(jobKey, returnCode, outputs) - stay() using data.callExecutionSuccess(jobKey, outputs, cumulativeOutputs, rootAndSubworkflowIds).addExecutions(jobExecutionMap) + stay() using data + .callExecutionSuccess(jobKey, outputs, cumulativeOutputs, rootAndSubworkflowIds) + .addExecutions(jobExecutionMap) } - private def handleDeclarationEvaluationSuccessful(key: ExpressionKey, values: Map[OutputPort, WomValue], data: WorkflowExecutionActorData) = { + private def handleDeclarationEvaluationSuccessful(key: ExpressionKey, + values: Map[OutputPort, WomValue], + data: WorkflowExecutionActorData + ) = stay() using data.expressionEvaluationSuccess(key, values) - } - override def receive: Receive = { - case msg => - val starttime = OffsetDateTime.now - super[LoggingFSM].receive(msg) - sendGauge(NonEmptyList("workflows", List("workflowexecutionactor", this.stateName.toString, msg.getClass.getSimpleName , "processing_millis", "set")), OffsetDateTime.now.toInstant.toEpochMilli - starttime.toInstant.toEpochMilli) + override def receive: Receive = { case msg => + val starttime = OffsetDateTime.now + super[LoggingFSM].receive(msg) + sendGauge( + NonEmptyList( + "workflows", + List("workflowexecutionactor", this.stateName.toString, msg.getClass.getSimpleName, "processing_millis", "set") + ), + OffsetDateTime.now.toInstant.toEpochMilli - starttime.toInstant.toEpochMilli + ) } /** @@ -517,10 +592,14 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) val startRunnableStartTimestamp = OffsetDateTime.now - def updateExecutionStore(diffs: List[WorkflowExecutionDiff], updatedData: WorkflowExecutionActorData): WorkflowExecutionActorData = { - val notStartedBackendJobs = diffs.flatMap(d => d.executionStoreChanges.collect{ - case (key: BackendJobDescriptorKey, status: ExecutionStatus.NotStarted.type) => (key, status) - }.keys) + def updateExecutionStore(diffs: List[WorkflowExecutionDiff], + updatedData: WorkflowExecutionActorData + ): WorkflowExecutionActorData = { + val notStartedBackendJobs = diffs.flatMap(d => + d.executionStoreChanges.collect { + case (key: BackendJobDescriptorKey, status: ExecutionStatus.NotStarted.type) => (key, status) + }.keys + ) val notStartedBackendJobsCt = notStartedBackendJobs.size // this limits the total max jobs that can be created by a root workflow @@ -528,56 +607,66 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) // Since the root workflow tried creating jobs more than the total max jobs allowed per root workflow // we fail all the BackendJobDescriptorKey which are in 'Not Started' state, and update the execution // store with the status update of remaining keys - val updatedDiffs = diffs.map(d => d.copy(executionStoreChanges = d.executionStoreChanges -- notStartedBackendJobs)) + val updatedDiffs = + diffs.map(d => d.copy(executionStoreChanges = d.executionStoreChanges -- notStartedBackendJobs)) - notStartedBackendJobs.foreach(key => { - val errorMsg = s"Job $key failed to be created! Error: Root workflow tried creating ${data.totalJobsByRootWf.get} jobs, which is more than $TotalMaxJobsPerRootWf, the max cumulative jobs allowed per root workflow" + notStartedBackendJobs.foreach { key => + val errorMsg = + s"Job $key failed to be created! Error: Root workflow tried creating ${data.totalJobsByRootWf.get} jobs, which is more than $TotalMaxJobsPerRootWf, the max cumulative jobs allowed per root workflow" workflowLogger.error(errorMsg) self ! JobFailedNonRetryableResponse(key, new Exception(errorMsg), None) - }) + } updatedData.mergeExecutionDiffs(updatedDiffs) - } - else updatedData.mergeExecutionDiffs(diffs) + } else updatedData.mergeExecutionDiffs(diffs) } val DataStoreUpdate(runnableKeys, _, updatedData) = data.executionStoreUpdate val runnableCalls = runnableKeys.view - .collect({ case k: BackendJobDescriptorKey => k }) + .collect { case k: BackendJobDescriptorKey => k } .groupBy(_.node) - .map({ - case (node, keys) => - val tag = node.fullyQualifiedName - val shardCount = keys.map(_.index).toList.distinct.size - if (shardCount == 1) tag - else s"$tag ($shardCount shards)" - }) + .map { case (node, keys) => + val tag = node.fullyQualifiedName + val shardCount = keys.map(_.index).toList.distinct.size + if (shardCount == 1) tag + else s"$tag ($shardCount shards)" + } val mode = if (restarting) "Restarting" else "Starting" if (runnableCalls.nonEmpty) workflowLogger.info(s"$mode " + runnableCalls.mkString(", ")) - val keyStartDiffs: List[WorkflowExecutionDiff] = runnableKeys map { k => k -> (k match { - case key: BackendJobDescriptorKey => processRunnableJob(key, data) - case key: SubWorkflowKey => processRunnableSubWorkflow(key, data) - case key: ConditionalCollectorKey => key.processRunnable(data) - case key: ConditionalKey => key.processRunnable(data, workflowLogger) - case key @ ExpressionKey(expr: TaskCallInputExpressionNode, _) => processRunnableTaskCallInputExpression(key, data, expr) - case key: ExpressionKey => key.processRunnable(data.expressionLanguageFunctions, data.valueStore, self) - case key: ScatterCollectorKey => key.processRunnable(data) - case key: ScatteredCallCompletionKey => key.processRunnable(data) - case key: ScatterKey => key.processRunnable(data, self, MaxScatterWidth) - case other => - workflowLogger.error(s"${other.tag} is not a runnable key") - WorkflowExecutionDiff.empty.validNel - })} map { - case (key: JobKey, value: ErrorOr[WorkflowExecutionDiff]) => value.valueOr(errors => { - self ! JobFailedNonRetryableResponse(key, new Exception(errors.toList.mkString(System.lineSeparator)) with NoStackTrace, None) + val keyStartDiffs: List[WorkflowExecutionDiff] = runnableKeys map { k => + k -> (k match { + case key: BackendJobDescriptorKey => processRunnableJob(key, data) + case key: SubWorkflowKey => processRunnableSubWorkflow(key, data) + case key: ConditionalCollectorKey => key.processRunnable(data) + case key: ConditionalKey => key.processRunnable(data, workflowLogger) + case key @ ExpressionKey(expr: TaskCallInputExpressionNode, _) => + processRunnableTaskCallInputExpression(key, data, expr) + case key: ExpressionKey => key.processRunnable(data.expressionLanguageFunctions, data.valueStore, self) + case key: ScatterCollectorKey => key.processRunnable(data) + case key: ScatteredCallCompletionKey => key.processRunnable(data) + case key: ScatterKey => key.processRunnable(data, self, MaxScatterWidth) + case other => + workflowLogger.error(s"${other.tag} is not a runnable key") + WorkflowExecutionDiff.empty.validNel + }) + } map { case (key: JobKey, value: ErrorOr[WorkflowExecutionDiff]) => + value.valueOr { errors => + self ! JobFailedNonRetryableResponse(key, + new Exception(errors.toList.mkString(System.lineSeparator)) + with NoStackTrace, + None + ) // Don't update the execution store now - the failure message we just sent to ourselves will take care of that: WorkflowExecutionDiff.empty - }) + } } // Merge the execution diffs upon success val result = updateExecutionStore(keyStartDiffs, updatedData) - sendGauge(NonEmptyList("workflows", List("workflowexecutionactor", "startRunnableNodes", "duration_millis", "set")), OffsetDateTime.now.toInstant.toEpochMilli - startRunnableStartTimestamp.toInstant.toEpochMilli) + sendGauge( + NonEmptyList("workflows", List("workflowexecutionactor", "startRunnableNodes", "duration_millis", "set")), + OffsetDateTime.now.toInstant.toEpochMilli - startRunnableStartTimestamp.toInstant.toEpochMilli + ) result } @@ -587,35 +676,45 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) */ private def processRunnableTaskCallInputExpression(key: ExpressionKey, data: WorkflowExecutionActorData, - expressionNode: TaskCallInputExpressionNode): ErrorOr[WorkflowExecutionDiff] = { + expressionNode: TaskCallInputExpressionNode + ): ErrorOr[WorkflowExecutionDiff] = { import cats.syntax.either._ val taskCallNode: CommandCallNode = expressionNode.taskCallNodeReceivingInput.get(()) (for { - backendJobDescriptorKey <- data.executionStore.backendJobDescriptorKeyForNode(taskCallNode) toChecked s"No BackendJobDescriptorKey found for call node ${taskCallNode.identifier.fullyQualifiedName}" + backendJobDescriptorKey <- data.executionStore.backendJobDescriptorKeyForNode( + taskCallNode + ) toChecked s"No BackendJobDescriptorKey found for call node ${taskCallNode.identifier.fullyQualifiedName}" factory <- backendFactoryForTaskCallNode(taskCallNode) backendInitializationData = params.initializationData.get(factory.name) - functions = factory.expressionLanguageFunctions(workflowDescriptor.backendDescriptor, backendJobDescriptorKey, backendInitializationData, params.ioActor, ioEc) + functions = factory.expressionLanguageFunctions(workflowDescriptor.backendDescriptor, + backendJobDescriptorKey, + backendInitializationData, + params.ioActor, + ioEc + ) diff <- key.processRunnable(functions, data.valueStore, self).toEither } yield diff).toValidated } - private def backendFactoryForTaskCallNode(taskCallNode: CommandCallNode): Checked[BackendLifecycleActorFactory] = { + private def backendFactoryForTaskCallNode(taskCallNode: CommandCallNode): Checked[BackendLifecycleActorFactory] = for { - name <- workflowDescriptor - .backendAssignments.get(taskCallNode).toChecked(s"Cannot find an assigned backend for call ${taskCallNode.fullyQualifiedName}") + name <- workflowDescriptor.backendAssignments + .get(taskCallNode) + .toChecked(s"Cannot find an assigned backend for call ${taskCallNode.fullyQualifiedName}") factory <- backendFactories.get(name).toChecked(s"Cannot find a backend factory for backend $name") } yield factory - } /* - * Job and Sub Workflow processing - * - * Unlike other job keys, those methods are not embedded in the key class itself because they require creating a child actor. - * While it would be possible to extract those methods from the WEA as well and provide them with an actor factory, the majority of the objects needed to create - * the children actors are attributes of this class, so it makes more sense to keep the functions here. + * Job and Sub Workflow processing + * + * Unlike other job keys, those methods are not embedded in the key class itself because they require creating a child actor. + * While it would be possible to extract those methods from the WEA as well and provide them with an actor factory, the majority of the objects needed to create + * the children actors are attributes of this class, so it makes more sense to keep the functions here. */ - private def processRunnableJob(key: BackendJobDescriptorKey, data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = { + private def processRunnableJob(key: BackendJobDescriptorKey, + data: WorkflowExecutionActorData + ): ErrorOr[WorkflowExecutionDiff] = { import cats.syntax.either._ import common.validation.Checked._ @@ -650,7 +749,8 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) private def startEJEA(jobKey: BackendJobDescriptorKey, backendLifecycleActorFactory: BackendLifecycleActorFactory, - command: BackendJobExecutionActorCommand): WorkflowExecutionDiff = { + command: BackendJobExecutionActorCommand + ): WorkflowExecutionDiff = { val ejeaName = s"${workflowDescriptor.id}-EngineJobExecutionActor-${jobKey.tag}" val backendName = backendLifecycleActorFactory.name val backendSingleton = params.backendSingletonCollection.backendSingletonActors(backendName) @@ -695,9 +795,11 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) } /* - * Creates another WEA to process the SubWorkflowKey + * Creates another WEA to process the SubWorkflowKey */ - private def processRunnableSubWorkflow(key:SubWorkflowKey, data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = { + private def processRunnableSubWorkflow(key: SubWorkflowKey, + data: WorkflowExecutionActorData + ): ErrorOr[WorkflowExecutionDiff] = { val sweaRef = context.actorOf( SubWorkflowExecutionActor.props( key, @@ -719,7 +821,9 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) params.rootConfig, params.totalJobsByRootWf, fileHashCacheActor = params.fileHashCacheActor, - blacklistCache = params.blacklistCache), s"$workflowIdForLogging-SubWorkflowExecutionActor-${key.tag}" + blacklistCache = params.blacklistCache + ), + s"$workflowIdForLogging-SubWorkflowExecutionActor-${key.tag}" ) context watch sweaRef @@ -784,17 +888,19 @@ object WorkflowExecutionActor { case class WorkflowExecutionSucceededResponse(jobExecutionMap: JobExecutionMap, rootAndSubworklowIds: Set[WorkflowId], outputs: CallOutputs, - cumulativeOutputs: Set[WomValue] = Set.empty) - extends WorkflowExecutionActorResponse { + cumulativeOutputs: Set[WomValue] = Set.empty + ) extends WorkflowExecutionActorResponse { override def toString = "WorkflowExecutionSucceededResponse" } case class WorkflowExecutionAbortedResponse(jobExecutionMap: JobExecutionMap) - extends WorkflowExecutionActorResponse with EngineLifecycleActorAbortedResponse { + extends WorkflowExecutionActorResponse + with EngineLifecycleActorAbortedResponse { override def toString = "WorkflowExecutionAbortedResponse" } - final case class WorkflowExecutionFailedResponse(jobExecutionMap: JobExecutionMap, reason: Throwable) extends WorkflowExecutionActorResponse { + final case class WorkflowExecutionFailedResponse(jobExecutionMap: JobExecutionMap, reason: Throwable) + extends WorkflowExecutionActorResponse { override def toString = "WorkflowExecutionFailedResponse" } @@ -822,31 +928,34 @@ object WorkflowExecutionActor { jobExecutionMap: JobExecutionMap, rootAndSubworklowIds: Set[WorkflowId], outputs: CallOutputs, - cumulativeOutputs: Set[WomValue] = Set.empty) extends SubWorkflowTerminalStateResponse + cumulativeOutputs: Set[WomValue] = Set.empty + ) extends SubWorkflowTerminalStateResponse - case class SubWorkflowFailedResponse(key: SubWorkflowKey, jobExecutionMap: JobExecutionMap, reason: Throwable) extends SubWorkflowTerminalStateResponse + case class SubWorkflowFailedResponse(key: SubWorkflowKey, jobExecutionMap: JobExecutionMap, reason: Throwable) + extends SubWorkflowTerminalStateResponse - case class SubWorkflowAbortedResponse(key: SubWorkflowKey, jobExecutionMap: JobExecutionMap) extends SubWorkflowTerminalStateResponse + case class SubWorkflowAbortedResponse(key: SubWorkflowKey, jobExecutionMap: JobExecutionMap) + extends SubWorkflowTerminalStateResponse case class WorkflowExecutionActorParams( - workflowDescriptor: EngineWorkflowDescriptor, - ioActor: ActorRef, - serviceRegistryActor: ActorRef, - jobStoreActor: ActorRef, - subWorkflowStoreActor: ActorRef, - callCacheReadActor: ActorRef, - callCacheWriteActor: ActorRef, - workflowDockerLookupActor: ActorRef, - jobRestartCheckTokenDispenserActor: ActorRef, - jobExecutionTokenDispenserActor: ActorRef, - backendSingletonCollection: BackendSingletonCollection, - initializationData: AllBackendInitializationData, - startState: StartableState, - rootConfig: Config, - totalJobsByRootWf: AtomicInteger, - fileHashCacheActor: Option[ActorRef], - blacklistCache: Option[BlacklistCache] - ) + workflowDescriptor: EngineWorkflowDescriptor, + ioActor: ActorRef, + serviceRegistryActor: ActorRef, + jobStoreActor: ActorRef, + subWorkflowStoreActor: ActorRef, + callCacheReadActor: ActorRef, + callCacheWriteActor: ActorRef, + workflowDockerLookupActor: ActorRef, + jobRestartCheckTokenDispenserActor: ActorRef, + jobExecutionTokenDispenserActor: ActorRef, + backendSingletonCollection: BackendSingletonCollection, + initializationData: AllBackendInitializationData, + startState: StartableState, + rootConfig: Config, + totalJobsByRootWf: AtomicInteger, + fileHashCacheActor: Option[ActorRef], + blacklistCache: Option[BlacklistCache] + ) def props(workflowDescriptor: EngineWorkflowDescriptor, ioActor: ActorRef, @@ -864,7 +973,8 @@ object WorkflowExecutionActor { rootConfig: Config, totalJobsByRootWf: AtomicInteger, fileHashCacheActor: Option[ActorRef], - blacklistCache: Option[BlacklistCache]): Props = { + blacklistCache: Option[BlacklistCache] + ): Props = Props( WorkflowExecutionActor( WorkflowExecutionActorParams( @@ -888,7 +998,6 @@ object WorkflowExecutionActor { ) ) ).withDispatcher(EngineDispatcher) - } implicit class EnhancedWorkflowOutputs(val outputs: Map[LocallyQualifiedName, WomValue]) extends AnyVal { def maxStringLength = 1000 diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala index ce00523a6e9..75e29db9c2f 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala @@ -20,15 +20,16 @@ import scala.concurrent.ExecutionContext object WorkflowExecutionDiff { def empty = WorkflowExecutionDiff(Map.empty) } + /** Data differential between current execution data, and updates performed in a method that needs to be merged. */ final case class WorkflowExecutionDiff(executionStoreChanges: Map[JobKey, ExecutionStatus], jobKeyActorMappings: Map[ActorRef, JobKey] = Map.empty, valueStoreAdditions: Map[ValueKey, WomValue] = Map.empty, cumulativeOutputsChanges: Set[WomValue] = Set.empty, - rootAndSubworkflowIds: Set[WorkflowId] = Set.empty) { - def containsNewEntry: Boolean = { + rootAndSubworkflowIds: Set[WorkflowId] = Set.empty +) { + def containsNewEntry: Boolean = executionStoreChanges.exists(esc => esc._2 == NotStarted) || valueStoreAdditions.nonEmpty - } } object WorkflowExecutionActorData { @@ -36,7 +37,8 @@ object WorkflowExecutionActorData { ec: ExecutionContext, asyncIo: AsyncIo, totalJobsByRootWf: AtomicInteger, - executionStore: ExecutionStore): WorkflowExecutionActorData = { + executionStore: ExecutionStore + ): WorkflowExecutionActorData = WorkflowExecutionActorData( workflowDescriptor, executionStore, @@ -46,9 +48,11 @@ object WorkflowExecutionActorData { totalJobsByRootWf = totalJobsByRootWf, rootAndSubworkflowIds = Set(workflowDescriptor.id) ) - } - final case class DataStoreUpdate(runnableKeys: List[JobKey], statusChanges: Map[JobKey, ExecutionStatus], newData: WorkflowExecutionActorData) + final case class DataStoreUpdate(runnableKeys: List[JobKey], + statusChanges: Map[JobKey, ExecutionStatus], + newData: WorkflowExecutionActorData + ) } case class WorkflowExecutionActorData(workflowDescriptor: EngineWorkflowDescriptor, @@ -61,7 +65,8 @@ case class WorkflowExecutionActorData(workflowDescriptor: EngineWorkflowDescript downstreamExecutionMap: JobExecutionMap = Map.empty, totalJobsByRootWf: AtomicInteger, cumulativeOutputs: Set[WomValue] = Set.empty, - rootAndSubworkflowIds: Set[WorkflowId]) { + rootAndSubworkflowIds: Set[WorkflowId] +) { val expressionLanguageFunctions = new EngineIoFunctions(workflowDescriptor.pathBuilders, asyncIo, ec) @@ -69,54 +74,63 @@ case class WorkflowExecutionActorData(workflowDescriptor: EngineWorkflowDescript executionStore = executionStore.seal ) - def callExecutionSuccess(jobKey: JobKey, outputs: CallOutputs, cumulativeOutputs: Set[WomValue], rootAndSubworkflowIds: Set[WorkflowId]): WorkflowExecutionActorData = { - mergeExecutionDiff(WorkflowExecutionDiff( - executionStoreChanges = Map(jobKey -> Done), - valueStoreAdditions = toValuesMap(jobKey, outputs), - cumulativeOutputsChanges = cumulativeOutputs ++ outputs.outputs.values, - rootAndSubworkflowIds = rootAndSubworkflowIds - )) - } - - final def expressionEvaluationSuccess(expressionKey: ExpressionKey, values: Map[OutputPort, WomValue]): WorkflowExecutionActorData = { - val valueStoreAdditions = values.map({ - case (outputPort, value) => ValueKey(outputPort, expressionKey.index) -> value - }) - mergeExecutionDiff(WorkflowExecutionDiff( - executionStoreChanges = Map(expressionKey -> Done), - valueStoreAdditions =valueStoreAdditions - )) - } + def callExecutionSuccess(jobKey: JobKey, + outputs: CallOutputs, + cumulativeOutputs: Set[WomValue], + rootAndSubworkflowIds: Set[WorkflowId] + ): WorkflowExecutionActorData = + mergeExecutionDiff( + WorkflowExecutionDiff( + executionStoreChanges = Map(jobKey -> Done), + valueStoreAdditions = toValuesMap(jobKey, outputs), + cumulativeOutputsChanges = cumulativeOutputs ++ outputs.outputs.values, + rootAndSubworkflowIds = rootAndSubworkflowIds + ) + ) - def executionFailure(failedJobKey: JobKey, reason: Throwable, jobExecutionMap: JobExecutionMap): WorkflowExecutionActorData = { - mergeExecutionDiff(WorkflowExecutionDiff( - executionStoreChanges = Map(failedJobKey -> ExecutionStatus.Failed)) - ).addExecutions(jobExecutionMap) - .copy( - jobFailures = jobFailures + (failedJobKey -> reason) + final def expressionEvaluationSuccess(expressionKey: ExpressionKey, + values: Map[OutputPort, WomValue] + ): WorkflowExecutionActorData = { + val valueStoreAdditions = values.map { case (outputPort, value) => + ValueKey(outputPort, expressionKey.index) -> value + } + mergeExecutionDiff( + WorkflowExecutionDiff( + executionStoreChanges = Map(expressionKey -> Done), + valueStoreAdditions = valueStoreAdditions + ) ) } - def executionFailed(jobKey: JobKey): WorkflowExecutionActorData = mergeExecutionDiff(WorkflowExecutionDiff(Map(jobKey -> ExecutionStatus.Failed))) + def executionFailure(failedJobKey: JobKey, + reason: Throwable, + jobExecutionMap: JobExecutionMap + ): WorkflowExecutionActorData = + mergeExecutionDiff(WorkflowExecutionDiff(executionStoreChanges = Map(failedJobKey -> ExecutionStatus.Failed))) + .addExecutions(jobExecutionMap) + .copy( + jobFailures = jobFailures + (failedJobKey -> reason) + ) + + def executionFailed(jobKey: JobKey): WorkflowExecutionActorData = mergeExecutionDiff( + WorkflowExecutionDiff(Map(jobKey -> ExecutionStatus.Failed)) + ) /** Converts call outputs to a ValueStore entries */ - private def toValuesMap(jobKey: JobKey, outputs: CallOutputs): Map[ValueKey, WomValue] = { - outputs.outputs.map({ - case (outputPort, jobOutput) => ValueKey(outputPort, jobKey.index) -> jobOutput - }) - } + private def toValuesMap(jobKey: JobKey, outputs: CallOutputs): Map[ValueKey, WomValue] = + outputs.outputs.map { case (outputPort, jobOutput) => + ValueKey(outputPort, jobKey.index) -> jobOutput + } - def addExecutions(jobExecutionMap: JobExecutionMap): WorkflowExecutionActorData = { + def addExecutions(jobExecutionMap: JobExecutionMap): WorkflowExecutionActorData = this.copy(downstreamExecutionMap = downstreamExecutionMap ++ jobExecutionMap) - } - def removeJobKeyActor(actorRef: ActorRef): WorkflowExecutionActorData = { + def removeJobKeyActor(actorRef: ActorRef): WorkflowExecutionActorData = this.copy( jobKeyActorMappings = jobKeyActorMappings - actorRef ) - } - def mergeExecutionDiff(diff: WorkflowExecutionDiff): WorkflowExecutionActorData = { + def mergeExecutionDiff(diff: WorkflowExecutionDiff): WorkflowExecutionActorData = this.copy( executionStore = executionStore.updateKeys(diff.executionStoreChanges), valueStore = valueStore.add(diff.valueStoreAdditions), @@ -124,15 +138,12 @@ case class WorkflowExecutionActorData(workflowDescriptor: EngineWorkflowDescript cumulativeOutputs = cumulativeOutputs ++ diff.cumulativeOutputsChanges, rootAndSubworkflowIds = rootAndSubworkflowIds ++ diff.rootAndSubworkflowIds ) - } - def mergeExecutionDiffs(diffs: Iterable[WorkflowExecutionDiff]): WorkflowExecutionActorData = { + def mergeExecutionDiffs(diffs: Iterable[WorkflowExecutionDiff]): WorkflowExecutionActorData = diffs.foldLeft(this)((newData, diff) => newData.mergeExecutionDiff(diff)) - } - def jobExecutionMap: JobExecutionMap = { + def jobExecutionMap: JobExecutionMap = downstreamExecutionMap updated (workflowDescriptor.backendDescriptor, executionStore.startedJobs) - } def executionStoreUpdate: DataStoreUpdate = { val update = executionStore.update diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala index 5e23a69d0c7..d904333bc22 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala @@ -33,7 +33,8 @@ class CallCache(database: CallCachingSqlDatabase) { jobIndex = b.jobIndex.fromIndex, jobAttempt = b.jobAttempt, returnCode = b.returnCode, - allowResultReuse = b.allowResultReuse) + allowResultReuse = b.allowResultReuse + ) val result = b.callOutputs.outputs.simplify val jobDetritus = b.jobDetritusFiles.getOrElse(Map.empty) buildCallCachingJoin(metaInfo, b.callCacheHashes, result, jobDetritus) @@ -42,70 +43,81 @@ class CallCache(database: CallCachingSqlDatabase) { database.addCallCaching(joins, batchSize) } - private def buildCallCachingJoin(callCachingEntry: CallCachingEntry, callCacheHashes: CallCacheHashes, - result: Iterable[WomValueSimpleton], jobDetritus: Map[String, Path]): CallCachingJoin = { + private def buildCallCachingJoin(callCachingEntry: CallCachingEntry, + callCacheHashes: CallCacheHashes, + result: Iterable[WomValueSimpleton], + jobDetritus: Map[String, Path] + ): CallCachingJoin = { - val hashesToInsert: Iterable[CallCachingHashEntry] = { + val hashesToInsert: Iterable[CallCachingHashEntry] = callCacheHashes.hashes map { hash => CallCachingHashEntry(hash.hashKey.key, hash.hashValue.value) } - } - val aggregatedHashesToInsert: Option[CallCachingAggregationEntry] = { - Option(CallCachingAggregationEntry( - baseAggregation = callCacheHashes.aggregatedInitialHash, - inputFilesAggregation = callCacheHashes.fileHashes.map(_.aggregatedHash) - )) - } + val aggregatedHashesToInsert: Option[CallCachingAggregationEntry] = + Option( + CallCachingAggregationEntry( + baseAggregation = callCacheHashes.aggregatedInitialHash, + inputFilesAggregation = callCacheHashes.fileHashes.map(_.aggregatedHash) + ) + ) - val resultToInsert: Iterable[CallCachingSimpletonEntry] = { - result map { - case WomValueSimpleton(simpletonKey, wdlPrimitive) => - CallCachingSimpletonEntry(simpletonKey, wdlPrimitive.valueString.toClobOption, wdlPrimitive.womType.stableName) + val resultToInsert: Iterable[CallCachingSimpletonEntry] = + result map { case WomValueSimpleton(simpletonKey, wdlPrimitive) => + CallCachingSimpletonEntry(simpletonKey, wdlPrimitive.valueString.toClobOption, wdlPrimitive.womType.stableName) } - } - val jobDetritusToInsert: Iterable[CallCachingDetritusEntry] = { - jobDetritus map { - case (fileName, filePath) => CallCachingDetritusEntry(fileName, filePath.pathAsString.toClobOption) + val jobDetritusToInsert: Iterable[CallCachingDetritusEntry] = + jobDetritus map { case (fileName, filePath) => + CallCachingDetritusEntry(fileName, filePath.pathAsString.toClobOption) } - } - CallCachingJoin(callCachingEntry, hashesToInsert.toSeq, aggregatedHashesToInsert, resultToInsert.toSeq, jobDetritusToInsert.toSeq) + CallCachingJoin(callCachingEntry, + hashesToInsert.toSeq, + aggregatedHashesToInsert, + resultToInsert.toSeq, + jobDetritusToInsert.toSeq + ) } - def hasBaseAggregatedHashMatch(baseAggregatedHash: String, hints: List[CacheHitHint])(implicit ec: ExecutionContext): Future[Boolean] = { + def hasBaseAggregatedHashMatch(baseAggregatedHash: String, hints: List[CacheHitHint])(implicit + ec: ExecutionContext + ): Future[Boolean] = { val ccpp = hints collectFirst { case h: CallCachePathPrefixes => h.prefixes } database.hasMatchingCallCachingEntriesForBaseAggregation(baseAggregatedHash, ccpp) } - def callCachingHitForAggregatedHashes(aggregatedCallHashes: AggregatedCallHashes, prefixesHint: Option[CallCachePathPrefixes], excludedIds: Set[CallCachingEntryId]) - (implicit ec: ExecutionContext): Future[Option[CallCachingEntryId]] = { - database.findCacheHitForAggregation( - baseAggregationHash = aggregatedCallHashes.baseAggregatedHash, - inputFilesAggregationHash = aggregatedCallHashes.inputFilesAggregatedHash, - callCachePathPrefixes = prefixesHint.map(_.prefixes), - excludedIds.map(_.id)).map(_ map CallCachingEntryId.apply) - } + def callCachingHitForAggregatedHashes(aggregatedCallHashes: AggregatedCallHashes, + prefixesHint: Option[CallCachePathPrefixes], + excludedIds: Set[CallCachingEntryId] + )(implicit ec: ExecutionContext): Future[Option[CallCachingEntryId]] = + database + .findCacheHitForAggregation( + baseAggregationHash = aggregatedCallHashes.baseAggregatedHash, + inputFilesAggregationHash = aggregatedCallHashes.inputFilesAggregatedHash, + callCachePathPrefixes = prefixesHint.map(_.prefixes), + excludedIds.map(_.id) + ) + .map(_ map CallCachingEntryId.apply) - def fetchCachedResult(callCachingEntryId: CallCachingEntryId)(implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { + def fetchCachedResult(callCachingEntryId: CallCachingEntryId)(implicit + ec: ExecutionContext + ): Future[Option[CallCachingJoin]] = database.queryResultsForCacheId(callCachingEntryId.id) - } - def callCachingJoinForCall(workflowUuid: String, callFqn: String, index: Int)(implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { + def callCachingJoinForCall(workflowUuid: String, callFqn: String, index: Int)(implicit + ec: ExecutionContext + ): Future[Option[CallCachingJoin]] = database.callCacheJoinForCall(workflowUuid, callFqn, index) - } - def invalidate(callCachingEntryId: CallCachingEntryId)(implicit ec: ExecutionContext) = { + def invalidate(callCachingEntryId: CallCachingEntryId)(implicit ec: ExecutionContext) = database.invalidateCall(callCachingEntryId.id) - } - def callCacheEntryIdsForWorkflowId(workflowId: String)(implicit ec: ExecutionContext) = { + def callCacheEntryIdsForWorkflowId(workflowId: String)(implicit ec: ExecutionContext) = database.callCacheEntryIdsForWorkflowId(workflowId) - } } object CallCache { object CallCacheHashBundle { - def apply(workflowId: WorkflowId, callCacheHashes: CallCacheHashes, jobSucceededResponse: JobSucceededResponse) = { + def apply(workflowId: WorkflowId, callCacheHashes: CallCacheHashes, jobSucceededResponse: JobSucceededResponse) = new CallCacheHashBundle( workflowId = workflowId, callCacheHashes = callCacheHashes, @@ -117,9 +129,11 @@ object CallCache { callOutputs = jobSucceededResponse.jobOutputs, jobDetritusFiles = jobSucceededResponse.jobDetritusFiles ) - } - def apply(workflowId: WorkflowId, callCacheHashes: CallCacheHashes, jobFailedNonRetryableResponse: JobFailedNonRetryableResponse) = { + def apply(workflowId: WorkflowId, + callCacheHashes: CallCacheHashes, + jobFailedNonRetryableResponse: JobFailedNonRetryableResponse + ) = new CallCacheHashBundle( workflowId = workflowId, callCacheHashes = callCacheHashes, @@ -131,47 +145,58 @@ object CallCache { callOutputs = CallOutputs.empty, jobDetritusFiles = None ) - } } case class CallCacheHashBundle private ( - workflowId: WorkflowId, - callCacheHashes: CallCacheHashes, - fullyQualifiedName: FullyQualifiedName, - jobIndex: ExecutionIndex, - jobAttempt: Option[Int], - returnCode: Option[Int], - allowResultReuse: Boolean, - callOutputs: CallOutputs, - jobDetritusFiles: Option[Map[String, Path]] - ) + workflowId: WorkflowId, + callCacheHashes: CallCacheHashes, + fullyQualifiedName: FullyQualifiedName, + jobIndex: ExecutionIndex, + jobAttempt: Option[Int], + returnCode: Option[Int], + allowResultReuse: Boolean, + callOutputs: CallOutputs, + jobDetritusFiles: Option[Map[String, Path]] + ) implicit class EnhancedCallCachingJoin(val callCachingJoin: CallCachingJoin) extends AnyVal { def toJobSuccess(key: BackendJobDescriptorKey, pathBuilders: List[PathBuilder]): JobSucceededResponse = { import cromwell.Simpletons._ import cromwell.core.path.PathFactory._ - val detritus = callCachingJoin.callCachingDetritusEntries.map({ jobDetritusEntry => + val detritus = callCachingJoin.callCachingDetritusEntries.map { jobDetritusEntry => jobDetritusEntry.detritusKey -> buildPath(jobDetritusEntry.detritusValue.toRawString, pathBuilders) - }).toMap - - val outputs = if (callCachingJoin.callCachingSimpletonEntries.isEmpty) CallOutputs(Map.empty) - else WomValueBuilder.toJobOutputs(key.call.outputPorts, callCachingJoin.callCachingSimpletonEntries map toSimpleton) - - JobSucceededResponse(key, callCachingJoin.callCachingEntry.returnCode,outputs, Option(detritus), Seq.empty, None, resultGenerationMode = CallCached) + }.toMap + + val outputs = + if (callCachingJoin.callCachingSimpletonEntries.isEmpty) CallOutputs(Map.empty) + else + WomValueBuilder.toJobOutputs(key.call.outputPorts, + callCachingJoin.callCachingSimpletonEntries map toSimpleton + ) + + JobSucceededResponse(key, + callCachingJoin.callCachingEntry.returnCode, + outputs, + Option(detritus), + Seq.empty, + None, + resultGenerationMode = CallCached + ) } def callCacheHashes: Set[HashResult] = { - val hashResults = callCachingJoin.callCachingHashEntries.map({ - case CallCachingHashEntry(k, v, _, _) => HashResult(HashKey.deserialize(k), HashValue(v)) - }) ++ callCachingJoin.callCachingAggregationEntry.collect({ - case CallCachingAggregationEntry(k, Some(v), _, _) => HashResult(HashKey.deserialize(k), HashValue(v)) - }) + val hashResults = callCachingJoin.callCachingHashEntries.map { case CallCachingHashEntry(k, v, _, _) => + HashResult(HashKey.deserialize(k), HashValue(v)) + } ++ callCachingJoin.callCachingAggregationEntry.collect { case CallCachingAggregationEntry(k, Some(v), _, _) => + HashResult(HashKey.deserialize(k), HashValue(v)) + } hashResults.toSet } } sealed trait CacheHitHint - case class CallCachePathPrefixes(callCacheRootPrefix: Option[String], workflowOptionPrefixes: List[String]) extends CacheHitHint { + case class CallCachePathPrefixes(callCacheRootPrefix: Option[String], workflowOptionPrefixes: List[String]) + extends CacheHitHint { lazy val prefixes: List[String] = (callCacheRootPrefix.toList ++ workflowOptionPrefixes) map { _.ensureSlashed } } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala index 22a1612c672..f99ff6ee675 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala @@ -14,35 +14,43 @@ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffAct import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffQueryParameter.CallCacheDiffQueryCall import cromwell.services.metadata.MetadataService.GetMetadataAction import cromwell.services.metadata._ -import cromwell.services.{SuccessfulMetadataJsonResponse, FailedMetadataJsonResponse} +import cromwell.services.{FailedMetadataJsonResponse, SuccessfulMetadataJsonResponse} import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsString, JsValue} -class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[CallCacheDiffActorState, CallCacheDiffActorData] { +class CallCacheDiffActor(serviceRegistryActor: ActorRef) + extends LoggingFSM[CallCacheDiffActorState, CallCacheDiffActorData] { startWith(Idle, CallCacheDiffNoData) - when(Idle) { - case Event(CallCacheDiffQueryParameter(callA, callB), CallCacheDiffNoData) => - val queryA = makeMetadataQuery(callA) - val queryB = makeMetadataQuery(callB) - serviceRegistryActor ! GetMetadataAction(queryA) - serviceRegistryActor ! GetMetadataAction(queryB) - goto(WaitingForMetadata) using CallCacheDiffWithRequest(queryA, queryB, None, None, sender()) + when(Idle) { case Event(CallCacheDiffQueryParameter(callA, callB), CallCacheDiffNoData) => + val queryA = makeMetadataQuery(callA) + val queryB = makeMetadataQuery(callB) + serviceRegistryActor ! GetMetadataAction(queryA) + serviceRegistryActor ! GetMetadataAction(queryB) + goto(WaitingForMetadata) using CallCacheDiffWithRequest(queryA, queryB, None, None, sender()) } when(WaitingForMetadata) { // First Response // Response A - case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), data@CallCacheDiffWithRequest(queryA, _, None, None, _)) if queryA == originalQuery => + case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), + data @ CallCacheDiffWithRequest(queryA, _, None, None, _) + ) if queryA == originalQuery => stay() using data.copy(responseA = Option(WorkflowMetadataJson(responseJson))) // Response B - case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), data@CallCacheDiffWithRequest(_, queryB, None, None, _)) if queryB == originalQuery => + case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), + data @ CallCacheDiffWithRequest(_, queryB, None, None, _) + ) if queryB == originalQuery => stay() using data.copy(responseB = Option(WorkflowMetadataJson(responseJson))) // Second Response // Response A - case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), CallCacheDiffWithRequest(queryA, queryB, None, Some(responseB), replyTo)) if queryA == originalQuery => + case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), + CallCacheDiffWithRequest(queryA, queryB, None, Some(responseB), replyTo) + ) if queryA == originalQuery => buildDiffAndRespond(queryA, queryB, WorkflowMetadataJson(responseJson), responseB, replyTo) // Response B - case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), CallCacheDiffWithRequest(queryA, queryB, Some(responseA), None, replyTo)) if queryB == originalQuery => + case Event(SuccessfulMetadataJsonResponse(GetMetadataAction(originalQuery, _), responseJson), + CallCacheDiffWithRequest(queryA, queryB, Some(responseA), None, replyTo) + ) if queryB == originalQuery => buildDiffAndRespond(queryA, queryB, responseA, WorkflowMetadataJson(responseJson), replyTo) case Event(FailedMetadataJsonResponse(_, failure), data: CallCacheDiffWithRequest) => data.replyTo ! FailedCallCacheDiffResponse(failure) @@ -50,10 +58,11 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call stay() } - whenUnhandled { - case Event(oops, oopsData) => - log.error(s"Programmer Error: Unexpected event received by ${this.getClass.getSimpleName}: $oops / $oopsData (in state $stateName)") - stay() + whenUnhandled { case Event(oops, oopsData) => + log.error( + s"Programmer Error: Unexpected event received by ${this.getClass.getSimpleName}: $oops / $oopsData (in state $stateName)" + ) + stay() } @@ -61,17 +70,22 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call queryB: MetadataQuery, responseA: WorkflowMetadataJson, responseB: WorkflowMetadataJson, - replyTo: ActorRef) = { + replyTo: ActorRef + ) = { - def describeCallFromQuery(query: MetadataQuery): String = s"${query.workflowId} / ${query.jobKey.map(_.callFqn).getOrElse("<>")}:${query.jobKey.map(_.index.getOrElse(-1)).getOrElse("<>")}" + def describeCallFromQuery(query: MetadataQuery): String = + s"${query.workflowId} / ${query.jobKey.map(_.callFqn).getOrElse("<>")}:${query.jobKey.map(_.index.getOrElse(-1)).getOrElse("<>")}" - val callACachingMetadata = extractCallMetadata(queryA, responseA).contextualizeErrors(s"extract relevant metadata for call A (${describeCallFromQuery(queryA)})") - val callBCachingMetadata = extractCallMetadata(queryB, responseB).contextualizeErrors(s"extract relevant metadata for call B (${describeCallFromQuery(queryB)})") + val callACachingMetadata = extractCallMetadata(queryA, responseA).contextualizeErrors( + s"extract relevant metadata for call A (${describeCallFromQuery(queryA)})" + ) + val callBCachingMetadata = extractCallMetadata(queryB, responseB).contextualizeErrors( + s"extract relevant metadata for call B (${describeCallFromQuery(queryB)})" + ) val response = (callACachingMetadata, callBCachingMetadata) flatMapN { case (callA, callB) => - - val callADetails = extractCallDetails(queryA, callA) - val callBDetails = extractCallDetails(queryB, callB) + val callADetails = extractCallDetails(queryA, callA) + val callBDetails = extractCallDetails(queryB, callB) (callADetails, callBDetails) mapN { (cad, cbd) => val callAHashes = callA.callCachingMetadataJson.hashes @@ -79,8 +93,10 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call SuccessfulCallCacheDiffResponse(cad, cbd, calculateHashDifferential(callAHashes, callBHashes)) } - } valueOr { - e => FailedCallCacheDiffResponse(AggregatedMessageException("Failed to calculate diff for call A and call B", e.toList)) + } valueOr { e => + FailedCallCacheDiffResponse( + AggregatedMessageException("Failed to calculate diff for call A and call B", e.toList) + ) } replyTo ! response @@ -90,7 +106,6 @@ class CallCacheDiffActor(serviceRegistryActor: ActorRef) extends LoggingFSM[Call } } - object CallCacheDiffActor { final case class CachedCallNotFoundException(message: String) extends Exception { @@ -108,18 +123,26 @@ object CallCacheDiffActor { responseA: Option[WorkflowMetadataJson], responseB: Option[WorkflowMetadataJson], replyTo: ActorRef - ) extends CallCacheDiffActorData + ) extends CallCacheDiffActorData sealed abstract class CallCacheDiffActorResponse case class FailedCallCacheDiffResponse(reason: Throwable) extends CallCacheDiffActorResponse - final case class SuccessfulCallCacheDiffResponse(callA: CallDetails, callB: CallDetails, hashDifferential: List[HashDifference]) extends CallCacheDiffActorResponse - def props(serviceRegistryActor: ActorRef) = Props(new CallCacheDiffActor(serviceRegistryActor)).withDispatcher(EngineDispatcher) - - final case class CallDetails(executionStatus: String, allowResultReuse: Boolean, callFqn: String, jobIndex: Int, workflowId: String) + final case class SuccessfulCallCacheDiffResponse(callA: CallDetails, + callB: CallDetails, + hashDifferential: List[HashDifference] + ) extends CallCacheDiffActorResponse + def props(serviceRegistryActor: ActorRef) = + Props(new CallCacheDiffActor(serviceRegistryActor)).withDispatcher(EngineDispatcher) + + final case class CallDetails(executionStatus: String, + allowResultReuse: Boolean, + callFqn: String, + jobIndex: Int, + workflowId: String + ) final case class HashDifference(hashKey: String, callA: Option[String], callB: Option[String]) - /** * Create a Metadata query from a CallCacheDiffQueryCall */ @@ -135,15 +158,16 @@ object CallCacheDiffActor { // These simple case classes are just to help apply a little type safety to input and output types: final case class WorkflowMetadataJson(value: JsObject) extends AnyVal - final case class CallMetadataJson(rawValue: JsObject, jobKey: MetadataQueryJobKey, callCachingMetadataJson: CallCachingMetadataJson) + final case class CallMetadataJson(rawValue: JsObject, + jobKey: MetadataQueryJobKey, + callCachingMetadataJson: CallCachingMetadataJson + ) final case class CallCachingMetadataJson(rawValue: JsObject, hashes: Map[String, String]) - /* * Takes in the JsObject returned from a metadata query and filters out only the appropriate call's callCaching section */ - def extractCallMetadata(query: MetadataQuery, response: WorkflowMetadataJson): ErrorOr[CallMetadataJson] = { - + def extractCallMetadata(query: MetadataQuery, response: WorkflowMetadataJson): ErrorOr[CallMetadataJson] = for { // Sanity Checks: _ <- response.value.validateNonEmptyResponse() @@ -158,17 +182,18 @@ object CallCacheDiffActor { callCachingElement <- onlyShardElement.fieldAsObject(CallMetadataKeys.CallCaching) hashes <- extractHashes(callCachingElement) } yield CallMetadataJson(onlyShardElement, jobKey, CallCachingMetadataJson(callCachingElement, hashes)) - } def extractHashes(callCachingMetadataJson: JsObject): ErrorOr[Map[String, String]] = { - def processField(keyPrefix: String)(fieldValue: (String, JsValue)): ErrorOr[Map[String, String]] = fieldValue match { - case (key, hashString: JsString) => Map(keyPrefix + key -> hashString.value).validNel - case (key, subObject: JsObject) => extractHashEntries(s"$keyPrefix$key:", subObject) - case (key, jsArray: JsArray) => - val subObjectElements = jsArray.elements.zipWithIndex.map { case (element, index) => (s"[$index]", element) } - extractHashEntries(keyPrefix + key, JsObject(subObjectElements: _*)) - case (key, otherValue) => s"Cannot extract hashes for $key. Expected JsString, JsObject, or JsArray but got ${otherValue.getClass.getSimpleName} $otherValue".invalidNel - } + def processField(keyPrefix: String)(fieldValue: (String, JsValue)): ErrorOr[Map[String, String]] = + fieldValue match { + case (key, hashString: JsString) => Map(keyPrefix + key -> hashString.value).validNel + case (key, subObject: JsObject) => extractHashEntries(s"$keyPrefix$key:", subObject) + case (key, jsArray: JsArray) => + val subObjectElements = jsArray.elements.zipWithIndex.map { case (element, index) => (s"[$index]", element) } + extractHashEntries(keyPrefix + key, JsObject(subObjectElements: _*)) + case (key, otherValue) => + s"Cannot extract hashes for $key. Expected JsString, JsObject, or JsArray but got ${otherValue.getClass.getSimpleName} $otherValue".invalidNel + } def extractHashEntries(keyPrefix: String, jsObject: JsObject): ErrorOr[Map[String, String]] = { val traversed = jsObject.fields.toList.traverse(processField(keyPrefix)) @@ -215,10 +240,12 @@ object CallCacheDiffActor { def fieldAsBoolean(field: String): ErrorOr[JsBoolean] = jsObject.getField(field) flatMap { _.mapToJsBoolean } def checkFieldValue(field: String, expectation: String): ErrorOr[Unit] = jsObject.getField(field) flatMap { case v: JsValue if v.toString == expectation => ().validNel - case other => s"Unexpected value '${other.toString}' for metadata field '$field', should have been '$expectation'".invalidNel + case other => + s"Unexpected value '${other.toString}' for metadata field '$field', should have been '$expectation'".invalidNel } - def validateNonEmptyResponse(): ErrorOr[Unit] = if (jsObject.fields.nonEmpty) { ().validNel } else { + def validateNonEmptyResponse(): ErrorOr[Unit] = if (jsObject.fields.nonEmpty) { ().validNel } + else { "No metadata was found for that workflow/call/index combination. Check that the workflow ID is correct, that the call name is formatted like 'workflowname.callname' and that an index is provided if this was a scattered task. (NOTE: the default index is -1, i.e. non-scattered)".invalidNel } } @@ -231,18 +258,20 @@ object CallCacheDiffActor { attempt <- asObject.fieldAsNumber("attempt") } yield (attempt.value.intValue, asObject) - def foldFunction(accumulator: ErrorOr[(Int, JsObject)], nextElement: JsValue): ErrorOr[(Int, JsObject)] = { - (accumulator, extractAttemptAndObject(nextElement)) mapN { case ((previousHighestAttempt, previousJsObject), (nextAttempt, nextJsObject)) => - if (previousHighestAttempt > nextAttempt) { - (previousHighestAttempt, previousJsObject) - } else { - (nextAttempt, nextJsObject) - } + def foldFunction(accumulator: ErrorOr[(Int, JsObject)], nextElement: JsValue): ErrorOr[(Int, JsObject)] = + (accumulator, extractAttemptAndObject(nextElement)) mapN { + case ((previousHighestAttempt, previousJsObject), (nextAttempt, nextJsObject)) => + if (previousHighestAttempt > nextAttempt) { + (previousHighestAttempt, previousJsObject) + } else { + (nextAttempt, nextJsObject) + } } - } for { - attemptListNel <- NonEmptyList.fromList(jsArray.elements.toList).toErrorOr("Expected at least one attempt but found 0") + attemptListNel <- NonEmptyList + .fromList(jsArray.elements.toList) + .toErrorOr("Expected at least one attempt but found 0") highestAttempt <- attemptListNel.toList.foldLeft(extractAttemptAndObject(attemptListNel.head))(foldFunction) } yield highestAttempt._2 } @@ -251,23 +280,28 @@ object CallCacheDiffActor { implicit class EnhancedJsValue(val jsValue: JsValue) extends AnyVal { def mapToJsObject: ErrorOr[JsObject] = jsValue match { case obj: JsObject => obj.validNel - case other => s"Invalid value type. Expected JsObject but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel + case other => + s"Invalid value type. Expected JsObject but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel } def mapToJsArray: ErrorOr[JsArray] = jsValue match { case arr: JsArray => arr.validNel - case other => s"Invalid value type. Expected JsArray but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel + case other => + s"Invalid value type. Expected JsArray but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel } def mapToJsString: ErrorOr[JsString] = jsValue match { case str: JsString => str.validNel - case other => s"Invalid value type. Expected JsString but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel + case other => + s"Invalid value type. Expected JsString but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel } def mapToJsBoolean: ErrorOr[JsBoolean] = jsValue match { case boo: JsBoolean => boo.validNel - case other => s"Invalid value type. Expected JsBoolean but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel + case other => + s"Invalid value type. Expected JsBoolean but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel } def mapToJsNumber: ErrorOr[JsNumber] = jsValue match { case boo: JsNumber => boo.validNel - case other => s"Invalid value type. Expected JsNumber but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel + case other => + s"Invalid value type. Expected JsNumber but got ${other.getClass.getSimpleName}: ${other.prettyPrint}".invalidNel } } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorJsonFormatting.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorJsonFormatting.scala index 11ba8bfc3a4..8012c8951d8 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorJsonFormatting.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorJsonFormatting.scala @@ -1,6 +1,10 @@ package cromwell.engine.workflow.lifecycle.execution.callcaching -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor.{CallDetails, HashDifference, SuccessfulCallCacheDiffResponse} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor.{ + CallDetails, + HashDifference, + SuccessfulCallCacheDiffResponse +} import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import org.apache.commons.lang3.NotImplementedException import spray.json._ @@ -13,14 +17,18 @@ object CallCacheDiffActorJsonFormatting extends SprayJsonSupport with DefaultJso implicit val hashDifferenceJsonFormatter = new RootJsonFormat[HashDifference] { override def write(hashDifference: HashDifference): JsValue = { def fromOption(opt: Option[String]) = opt.map(JsString.apply).getOrElse(JsNull) - JsObject(Map( - "hashKey" -> JsString(hashDifference.hashKey), - "callA" -> fromOption(hashDifference.callA), - "callB" -> fromOption(hashDifference.callB) - )) + JsObject( + Map( + "hashKey" -> JsString(hashDifference.hashKey), + "callA" -> fromOption(hashDifference.callA), + "callB" -> fromOption(hashDifference.callB) + ) + ) } override def read(json: JsValue): HashDifference = - throw new NotImplementedException("Programmer Error: No reader for HashDifferentials written. It was not expected to be required") + throw new NotImplementedException( + "Programmer Error: No reader for HashDifferentials written. It was not expected to be required" + ) } implicit val successfulResponseJsonFormatter = jsonFormat3(SuccessfulCallCacheDiffResponse) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffQueryParameter.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffQueryParameter.scala index ca094105988..6b32632a5a9 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffQueryParameter.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffQueryParameter.scala @@ -15,30 +15,29 @@ object CallCacheDiffQueryParameter { private def missingWorkflowError(attribute: String) = s"missing $attribute query parameter".invalidNel def fromParameters(parameters: Seq[(String, String)]): ErrorOr[CallCacheDiffQueryParameter] = { - def extractIndex(parameter: String): ErrorOr[Option[Int]] = { + def extractIndex(parameter: String): ErrorOr[Option[Int]] = parameters.find(_._1 == parameter) match { - case Some((_, value)) => Try(value.trim.toInt) match { - case Success(index) => Option(index).validNel - case Failure(f) => f.getMessage.invalidNel - } + case Some((_, value)) => + Try(value.trim.toInt) match { + case Success(index) => Option(index).validNel + case Failure(f) => f.getMessage.invalidNel + } case None => None.validNel } - } - def extractAttribute(parameter: String): ErrorOr[String] = { + def extractAttribute(parameter: String): ErrorOr[String] = parameters.find(_._1 == parameter) match { case Some((_, value)) => value.validNel case None => missingWorkflowError(parameter) } - } - + def validateWorkflowId(parameter: String): ErrorOr[WorkflowId] = for { workflowIdString <- extractAttribute(parameter) workflowId <- fromTry(Try(WorkflowId.fromString(workflowIdString.trim))) .leftMap(_.getMessage) .toValidatedNel[String, WorkflowId] } yield workflowId - + val workflowAValidation = validateWorkflowId("workflowA") val workflowBValidation = validateWorkflowId("workflowB") @@ -49,15 +48,16 @@ object CallCacheDiffQueryParameter { val indexBValidation: ErrorOr[Option[Int]] = extractIndex("indexB") (workflowAValidation, - callAValidation, - indexAValidation, - workflowBValidation, - callBValidation, - indexBValidation) mapN { (workflowA, callA, indexA, workflowB, callB, indexB) => - CallCacheDiffQueryParameter( - CallCacheDiffQueryCall(workflowA, callA, indexA), - CallCacheDiffQueryCall(workflowB, callB, indexB) - ) + callAValidation, + indexAValidation, + workflowBValidation, + callBValidation, + indexBValidation + ) mapN { (workflowA, callA, indexA, workflowB, callB, indexB) => + CallCacheDiffQueryParameter( + CallCacheDiffQueryCall(workflowA, callA, indexA), + CallCacheDiffQueryCall(workflowB, callB, indexB) + ) } } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActor.scala index 2329ad1331b..ff36d6deb2f 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActor.scala @@ -17,7 +17,6 @@ import wom.values._ import java.security.MessageDigest import javax.xml.bind.DatatypeConverter - /** * Actor responsible for calculating individual as well as aggregated hashes for a job. * First calculate the initial hashes (individual and aggregated), and send them to its parent @@ -42,7 +41,7 @@ class CallCacheHashingJobActor(jobDescriptor: BackendJobDescriptor, callCachingActivity: CallCachingActivity, callCachePathPrefixes: Option[CallCachePathPrefixes], fileHashBatchSize: Int - ) extends LoggingFSM[CallCacheHashingJobActorState, CallCacheHashingJobActorData] { +) extends LoggingFSM[CallCacheHashingJobActorState, CallCacheHashingJobActorData] { val fileHashingActor: ActorRef = makeFileHashingActor() @@ -103,9 +102,8 @@ class CallCacheHashingJobActor(jobDescriptor: BackendJobDescriptor, } // In its own function so it can be overridden in the test - private [callcaching] def addFileHash(hashResult: HashResult, data: CallCacheHashingJobActorData) = { + private[callcaching] def addFileHash(hashResult: HashResult, data: CallCacheHashingJobActorData) = data.withFileHash(hashResult) - } private def stopAndStay(fileHashResult: Option[FinalFileHashingResult]) = { fileHashResult foreach { context.parent ! _ } @@ -114,14 +112,15 @@ class CallCacheHashingJobActor(jobDescriptor: BackendJobDescriptor, stay() } - private def sendToCallCacheReadingJobActor(message: Any, data: CallCacheHashingJobActorData): Unit = { + private def sendToCallCacheReadingJobActor(message: Any, data: CallCacheHashingJobActorData): Unit = data.callCacheReadingJobActor foreach { _ ! message } - } private def initializeCCHJA(): Unit = { import cromwell.core.simpleton.WomValueSimpleton._ - val unqualifiedInputs = jobDescriptor.evaluatedTaskInputs map { case (declaration, value) => declaration.name -> value } + val unqualifiedInputs = jobDescriptor.evaluatedTaskInputs map { case (declaration, value) => + declaration.name -> value + } val inputSimpletons = unqualifiedInputs.simplifyForCaching val (fileInputSimpletons, nonFileInputSimpletons) = inputSimpletons partition { @@ -131,11 +130,12 @@ class CallCacheHashingJobActor(jobDescriptor: BackendJobDescriptor, val initialHashes = calculateInitialHashes(nonFileInputSimpletons, fileInputSimpletons) - val fileHashRequests = fileInputSimpletons collect { - case WomValueSimpleton(name, x: WomFile) => SingleFileHashRequest(jobDescriptor.key, HashKey(true, "input", s"File $name"), x, initializationData) + val fileHashRequests = fileInputSimpletons collect { case WomValueSimpleton(name, x: WomFile) => + SingleFileHashRequest(jobDescriptor.key, HashKey(true, "input", s"File $name"), x, initializationData) } - val hashingJobActorData = CallCacheHashingJobActorData(fileHashRequests.toList, callCacheReadingJobActor, fileHashBatchSize) + val hashingJobActorData = + CallCacheHashingJobActorData(fileHashRequests.toList, callCacheReadingJobActor, fileHashBatchSize) startWith(WaitingForHashFileRequest, hashingJobActorData) val aggregatedBaseHash = calculateHashAggregation(initialHashes, MessageDigest.getInstance("MD5")) @@ -149,39 +149,65 @@ class CallCacheHashingJobActor(jobDescriptor: BackendJobDescriptor, if (hashingJobActorData.callCacheReadingJobActor.isEmpty) self ! NextBatchOfFileHashesRequest } - private def calculateInitialHashes(nonFileInputs: Iterable[WomValueSimpleton], fileInputs: Iterable[WomValueSimpleton]): Set[HashResult] = { + private def calculateInitialHashes(nonFileInputs: Iterable[WomValueSimpleton], + fileInputs: Iterable[WomValueSimpleton] + ): Set[HashResult] = { - val commandTemplateHash = HashResult(HashKey("command template"), jobDescriptor.taskCall.callable.commandTemplateString(jobDescriptor.evaluatedTaskInputs).md5HashValue) + val commandTemplateHash = HashResult( + HashKey("command template"), + jobDescriptor.taskCall.callable.commandTemplateString(jobDescriptor.evaluatedTaskInputs).md5HashValue + ) val backendNameHash = HashResult(HashKey("backend name"), backendNameForCallCachingPurposes.md5HashValue) - val inputCountHash = HashResult(HashKey("input count"), (nonFileInputs.size + fileInputs.size).toString.md5HashValue) - val outputCountHash = HashResult(HashKey("output count"), jobDescriptor.taskCall.callable.outputs.size.toString.md5HashValue) - - val runtimeAttributeHashes = runtimeAttributeDefinitions map { definition => jobDescriptor.runtimeAttributes.get(definition.name) match { - case Some(_) if definition.name == RuntimeAttributesKeys.DockerKey && callCachingEligible.dockerHash.isDefined => - HashResult(HashKey(definition.usedInCallCaching, "runtime attribute", definition.name), callCachingEligible.dockerHash.get.md5HashValue) - case Some(womValue) => HashResult(HashKey(definition.usedInCallCaching, "runtime attribute", definition.name), womValue.valueString.md5HashValue) - case None => HashResult(HashKey(definition.usedInCallCaching, "runtime attribute", definition.name), UnspecifiedRuntimeAttributeHashValue) - }} - - val inputHashResults = nonFileInputs map { - case WomValueSimpleton(name, value) => - val womTypeHashKeyString = value.womType.toHashKeyString - log.debug("Hashing input expression as {} {}", womTypeHashKeyString, name) - HashResult(HashKey("input", s"$womTypeHashKeyString $name"), value.toWomString.md5HashValue) + val inputCountHash = + HashResult(HashKey("input count"), (nonFileInputs.size + fileInputs.size).toString.md5HashValue) + val outputCountHash = + HashResult(HashKey("output count"), jobDescriptor.taskCall.callable.outputs.size.toString.md5HashValue) + + val runtimeAttributeHashes = runtimeAttributeDefinitions map { definition => + jobDescriptor.runtimeAttributes.get(definition.name) match { + case Some(_) + if definition.name == RuntimeAttributesKeys.DockerKey && callCachingEligible.dockerHash.isDefined => + HashResult(HashKey(definition.usedInCallCaching, "runtime attribute", definition.name), + callCachingEligible.dockerHash.get.md5HashValue + ) + case Some(womValue) => + HashResult(HashKey(definition.usedInCallCaching, "runtime attribute", definition.name), + womValue.valueString.md5HashValue + ) + case None => + HashResult(HashKey(definition.usedInCallCaching, "runtime attribute", definition.name), + UnspecifiedRuntimeAttributeHashValue + ) + } + } + + val inputHashResults = nonFileInputs map { case WomValueSimpleton(name, value) => + val womTypeHashKeyString = value.womType.toHashKeyString + log.debug("Hashing input expression as {} {}", womTypeHashKeyString, name) + HashResult(HashKey("input", s"$womTypeHashKeyString $name"), value.toWomString.md5HashValue) } val outputExpressionHashResults = jobDescriptor.taskCall.callable.outputs map { output => val womTypeHashKeyString = output.womType.toHashKeyString val outputExpressionCacheString = output.expression.cacheString - log.debug("Hashing output expression type as '{}' and value as '{}'", womTypeHashKeyString, outputExpressionCacheString) - HashResult(HashKey("output expression", s"$womTypeHashKeyString ${output.name}"), outputExpressionCacheString.md5HashValue) + log.debug("Hashing output expression type as '{}' and value as '{}'", + womTypeHashKeyString, + outputExpressionCacheString + ) + HashResult(HashKey("output expression", s"$womTypeHashKeyString ${output.name}"), + outputExpressionCacheString.md5HashValue + ) } // Build these all together for the final set of initial hashes: - Set(commandTemplateHash, backendNameHash, inputCountHash, outputCountHash) ++ runtimeAttributeHashes ++ inputHashResults ++ outputExpressionHashResults + Set(commandTemplateHash, + backendNameHash, + inputCountHash, + outputCountHash + ) ++ runtimeAttributeHashes ++ inputHashResults ++ outputExpressionHashResults } - private [callcaching] def makeFileHashingActor() = { + private[callcaching] def makeFileHashingActor() = { val fileHashingActorName = s"FileHashingActor_for_${jobDescriptor.key.tag}" context.actorOf(fileHashingActorProps, fileHashingActorName) } @@ -199,18 +225,20 @@ object CallCacheHashingJobActor { callCachingActivity: CallCachingActivity, callCachePathPrefixes: Option[CallCachePathPrefixes], fileHashBatchSize: Int - ): Props = Props(new CallCacheHashingJobActor( - jobDescriptor, - callCacheReadingJobActor, - initializationData, - runtimeAttributeDefinitions, - backendNameForCallCachingPurposes, - fileHashingActorProps, - callCachingEligible, - callCachingActivity, - callCachePathPrefixes, - fileHashBatchSize - )).withDispatcher(EngineDispatcher) + ): Props = Props( + new CallCacheHashingJobActor( + jobDescriptor, + callCacheReadingJobActor, + initializationData, + runtimeAttributeDefinitions, + backendNameForCallCachingPurposes, + fileHashingActorProps, + callCachingEligible, + callCachingActivity, + callCachePathPrefixes, + fileHashBatchSize + ) + ).withDispatcher(EngineDispatcher) sealed trait CallCacheHashingJobActorState case object WaitingForHashFileRequest extends CallCacheHashingJobActorState @@ -227,22 +255,29 @@ object CallCacheHashingJobActor { val sortedHashes = hashes.toList .filter(_.hashKey.checkForHitOrMiss) .sortBy(_.hashKey.key) - .map({ case HashResult(hashKey, HashValue(hashValue)) => hashKey.key + hashValue }) + .map { case HashResult(hashKey, HashValue(hashValue)) => hashKey.key + hashValue } .map(_.getBytes) sortedHashes foreach messageDigest.update DatatypeConverter.printHexBinary(messageDigest.digest()) } object CallCacheHashingJobActorData { - def apply(fileHashRequestsRemaining: List[SingleFileHashRequest], callCacheReadingJobActor: Option[ActorRef], batchSize: Int): CallCacheHashingJobActorData = { - new CallCacheHashingJobActorData(fileHashRequestsRemaining.grouped(batchSize).toList, List.empty, callCacheReadingJobActor, batchSize) - } + def apply(fileHashRequestsRemaining: List[SingleFileHashRequest], + callCacheReadingJobActor: Option[ActorRef], + batchSize: Int + ): CallCacheHashingJobActorData = + new CallCacheHashingJobActorData(fileHashRequestsRemaining.grouped(batchSize).toList, + List.empty, + callCacheReadingJobActor, + batchSize + ) } final case class CallCacheHashingJobActorData(fileHashRequestsRemaining: List[List[SingleFileHashRequest]], fileHashResults: List[HashResult], callCacheReadingJobActor: Option[ActorRef], - batchSize: Int) { + batchSize: Int + ) { private val md5Digest = MessageDigest.getInstance("MD5") /** @@ -259,7 +294,14 @@ object CallCacheHashingJobActor { val updatedBatch = lastBatch.filterNot(_.hashKey == hashResult.hashKey) // If we're processing the last batch, and it's now empty, then we're done // In that case compute the aggregated hash and send that - if (updatedBatch.isEmpty) (List.empty, Option(CompleteFileHashingResult(newFileHashResults.toSet, calculateHashAggregation(newFileHashResults, md5Digest)))) + if (updatedBatch.isEmpty) + (List.empty, + Option( + CompleteFileHashingResult(newFileHashResults.toSet, + calculateHashAggregation(newFileHashResults, md5Digest) + ) + ) + ) // Otherwise just return the updated batch and no message else (List(updatedBatch), None) case currentBatch :: otherBatches => @@ -275,7 +317,9 @@ object CallCacheHashingJobActor { else (updatedBatch :: otherBatches, None) } - (this.copy(fileHashRequestsRemaining = updatedRequestsList, fileHashResults = newFileHashResults), responseMessage) + (this.copy(fileHashRequestsRemaining = updatedRequestsList, fileHashResults = newFileHashResults), + responseMessage + ) } } @@ -285,14 +329,18 @@ object CallCacheHashingJobActor { case object NextBatchOfFileHashesRequest extends CCHJARequest sealed trait CCHJAResponse - case class InitialHashingResult(initialHashes: Set[HashResult], aggregatedBaseHash: String, cacheHitHints: List[CacheHitHint] = List.empty) extends CCHJAResponse + case class InitialHashingResult(initialHashes: Set[HashResult], + aggregatedBaseHash: String, + cacheHitHints: List[CacheHitHint] = List.empty + ) extends CCHJAResponse // File Hashing responses sealed trait CCHJAFileHashResponse extends CCHJAResponse case class PartialFileHashingResult(initialHashes: NonEmptyList[HashResult]) extends CCHJAFileHashResponse sealed trait FinalFileHashingResult extends CCHJAFileHashResponse - case class CompleteFileHashingResult(fileHashes: Set[HashResult], aggregatedFileHash: String) extends FinalFileHashingResult + case class CompleteFileHashingResult(fileHashes: Set[HashResult], aggregatedFileHash: String) + extends FinalFileHashingResult case object NoFileHashesResult extends FinalFileHashingResult implicit class StringMd5er(val unhashedString: String) extends AnyVal { @@ -303,11 +351,11 @@ object CallCacheHashingJobActor { } implicit class WomTypeHashString(val womType: WomType) extends AnyVal { - def toHashKeyString: String = { + def toHashKeyString: String = womType match { case c: WomCompositeType => - val fieldTypes = c.typeMap map { - case (key, value) => s"$key -> ${value.stableName}" + val fieldTypes = c.typeMap map { case (key, value) => + s"$key -> ${value.stableName}" } "CompositeType_digest_" + fieldTypes.mkString("\n").md5Sum case a: WomArrayType => @@ -321,6 +369,5 @@ object CallCacheHashingJobActor { s"Coproduct($hashStrings)" case o => o.stableName } - } } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheInvalidateActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheInvalidateActor.scala index 16bb0fb9caa..f61a15cf3de 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheInvalidateActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheInvalidateActor.scala @@ -23,18 +23,19 @@ class CallCacheInvalidateActor(callCache: CallCache, cacheId: CallCachingEntryId context.stop(self) } - override def receive: Receive = { - case any => log.error("Unexpected message to InvalidateCallCacheActor: " + any) + override def receive: Receive = { case any => + log.error("Unexpected message to InvalidateCallCacheActor: " + any) } } object CallCacheInvalidateActor { - def props(callCache: CallCache, cacheId: CallCachingEntryId) = { - Props(new CallCacheInvalidateActor(callCache: CallCache, cacheId: CallCachingEntryId)).withDispatcher(EngineDispatcher) - } + def props(callCache: CallCache, cacheId: CallCachingEntryId) = + Props(new CallCacheInvalidateActor(callCache: CallCache, cacheId: CallCachingEntryId)) + .withDispatcher(EngineDispatcher) } sealed trait CallCacheInvalidatedResponse -case class CallCacheInvalidatedSuccess(cacheId: CallCachingEntryId, maybeEntry: Option[CallCachingEntry]) extends CallCacheInvalidatedResponse +case class CallCacheInvalidatedSuccess(cacheId: CallCachingEntryId, maybeEntry: Option[CallCachingEntry]) + extends CallCacheInvalidatedResponse case object CallCacheInvalidationUnnecessary extends CallCacheInvalidatedResponse case class CallCacheInvalidatedFailure(cacheId: CallCachingEntryId, t: Throwable) extends CallCacheInvalidatedResponse diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala index 16c4ce39ed7..63cc33d76b5 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala @@ -20,10 +20,8 @@ import cromwell.services.CallCaching.CallCachingEntryId * * Would be nice if instead there was a pull- rather than push-based mailbox but I can't find one... */ -class CallCacheReadActor(cache: CallCache, - override val serviceRegistryActor: ActorRef, - override val threshold: Int) - extends EnhancedThrottlerActor[CommandAndReplyTo[CallCacheReadActorRequest]] +class CallCacheReadActor(cache: CallCache, override val serviceRegistryActor: ActorRef, override val threshold: Int) + extends EnhancedThrottlerActor[CommandAndReplyTo[CallCacheReadActorRequest]] with ActorLogging { override def routed = true override def processHead(request: CommandAndReplyTo[CallCacheReadActorRequest]): Future[Int] = instrumentedProcess { @@ -58,29 +56,34 @@ class CallCacheReadActor(cache: CallCache, override def receive: Receive = enhancedReceive.orElse(super.receive) override protected def instrumentationPath = NonEmptyList.of("callcaching", "read") override protected def instrumentationPrefix = InstrumentationPrefixes.JobPrefix - override def commandToData(snd: ActorRef) = { - case request: CallCacheReadActorRequest => CommandAndReplyTo(request, snd) + override def commandToData(snd: ActorRef) = { case request: CallCacheReadActorRequest => + CommandAndReplyTo(request, snd) } } object CallCacheReadActor { - def props(callCache: CallCache, serviceRegistryActor: ActorRef): Props = { - Props(new CallCacheReadActor(callCache, serviceRegistryActor, LoadConfig.CallCacheReadThreshold)).withDispatcher(EngineDispatcher) - } + def props(callCache: CallCache, serviceRegistryActor: ActorRef): Props = + Props(new CallCacheReadActor(callCache, serviceRegistryActor, LoadConfig.CallCacheReadThreshold)) + .withDispatcher(EngineDispatcher) private[CallCacheReadActor] case class RequestTuple(requester: ActorRef, request: CallCacheReadActorRequest) object AggregatedCallHashes { - def apply(baseAggregatedHash: String, inputFilesAggregatedHash: String) = { + def apply(baseAggregatedHash: String, inputFilesAggregatedHash: String) = new AggregatedCallHashes(baseAggregatedHash, Option(inputFilesAggregatedHash)) - } } case class AggregatedCallHashes(baseAggregatedHash: String, inputFilesAggregatedHash: Option[String]) sealed trait CallCacheReadActorRequest - final case class CacheLookupRequest(aggregatedCallHashes: AggregatedCallHashes, excludedIds: Set[CallCachingEntryId], prefixesHint: Option[CallCachePathPrefixes]) extends CallCacheReadActorRequest - final case class HasMatchingInitialHashLookup(aggregatedTaskHash: String, cacheHitHints: List[CacheHitHint] = List.empty) extends CallCacheReadActorRequest - final case class CallCacheEntryForCall(workflowId: WorkflowId, jobKey: BackendJobDescriptorKey) extends CallCacheReadActorRequest + final case class CacheLookupRequest(aggregatedCallHashes: AggregatedCallHashes, + excludedIds: Set[CallCachingEntryId], + prefixesHint: Option[CallCachePathPrefixes] + ) extends CallCacheReadActorRequest + final case class HasMatchingInitialHashLookup(aggregatedTaskHash: String, + cacheHitHints: List[CacheHitHint] = List.empty + ) extends CallCacheReadActorRequest + final case class CallCacheEntryForCall(workflowId: WorkflowId, jobKey: BackendJobDescriptorKey) + extends CallCacheReadActorRequest sealed trait CallCacheReadActorResponse // Responses on whether or not there is at least one matching entry (can for initial matches of file matches) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala index 953da95042f..dee9dd2aeea 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala @@ -4,7 +4,12 @@ import akka.actor.{ActorRef, LoggingFSM, Props} import cromwell.core.Dispatcher.EngineDispatcher import cromwell.core.callcaching.HashingFailedMessage import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCache.CallCachePathPrefixes -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{CompleteFileHashingResult, InitialHashingResult, NextBatchOfFileHashesRequest, NoFileHashesResult} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{ + CompleteFileHashingResult, + InitialHashingResult, + NextBatchOfFileHashesRequest, + NoFileHashesResult +} import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CacheHit, CacheMiss, HashError} @@ -24,16 +29,16 @@ import cromwell.services.CallCaching.CallCachingEntryId * Sends the response to its parent. * In case of a CacheHit, stays alive in case using the hit fails and it needs to fetch the next one. Otherwise just dies. */ -class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Option[CallCachePathPrefixes]) extends LoggingFSM[CallCacheReadingJobActorState, CCRJAData] { - +class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Option[CallCachePathPrefixes]) + extends LoggingFSM[CallCacheReadingJobActorState, CCRJAData] { + startWith(WaitingForInitialHash, CCRJANoData) - - when(WaitingForInitialHash) { - case Event(InitialHashingResult(_, aggregatedBaseHash, hints), CCRJANoData) => - callCacheReadActor ! HasMatchingInitialHashLookup(aggregatedBaseHash, hints) - goto(WaitingForHashCheck) using CCRJAWithData(sender(), aggregatedBaseHash, fileHash = None, seenCaches = Set.empty) + + when(WaitingForInitialHash) { case Event(InitialHashingResult(_, aggregatedBaseHash, hints), CCRJANoData) => + callCacheReadActor ! HasMatchingInitialHashLookup(aggregatedBaseHash, hints) + goto(WaitingForHashCheck) using CCRJAWithData(sender(), aggregatedBaseHash, fileHash = None, seenCaches = Set.empty) } - + when(WaitingForHashCheck) { case Event(HasMatchingEntries, CCRJAWithData(hashingActor, _, _, _)) => hashingActor ! NextBatchOfFileHashesRequest @@ -41,16 +46,22 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio case Event(NoMatchingEntries, _) => cacheMiss } - + when(WaitingForFileHashes) { case Event(CompleteFileHashingResult(_, aggregatedFileHash), data: CCRJAWithData) => - callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, aggregatedFileHash), data.seenCaches, prefixesHint) + callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, aggregatedFileHash), + data.seenCaches, + prefixesHint + ) goto(WaitingForCacheHitOrMiss) using data.withFileHash(aggregatedFileHash) case Event(NoFileHashesResult, data: CCRJAWithData) => - callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, None), data.seenCaches, prefixesHint) + callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, None), + data.seenCaches, + prefixesHint + ) goto(WaitingForCacheHitOrMiss) } - + when(WaitingForCacheHitOrMiss) { case Event(CacheLookupNextHit(hit), data: CCRJAWithData) => context.parent ! CacheHit(hit) @@ -58,7 +69,10 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio case Event(CacheLookupNoHit, _) => cacheMiss case Event(NextHit, CCRJAWithData(_, aggregatedInitialHash, aggregatedFileHash, seenCaches)) => - callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), seenCaches, prefixesHint) + callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), + seenCaches, + prefixesHint + ) stay() } @@ -80,9 +94,8 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio object CallCacheReadingJobActor { - def props(callCacheReadActor: ActorRef, prefixesHint: Option[CallCachePathPrefixes]) = { + def props(callCacheReadActor: ActorRef, prefixesHint: Option[CallCachePathPrefixes]) = Props(new CallCacheReadingJobActor(callCacheReadActor, prefixesHint)).withDispatcher(EngineDispatcher) - } sealed trait CallCacheReadingJobActorState case object WaitingForInitialHash extends CallCacheReadingJobActorState @@ -92,7 +105,11 @@ object CallCacheReadingJobActor { sealed trait CCRJAData case object CCRJANoData extends CCRJAData - case class CCRJAWithData(hashingActor: ActorRef, initialHash: String, fileHash: Option[String], seenCaches: Set[CallCachingEntryId]) extends CCRJAData { + case class CCRJAWithData(hashingActor: ActorRef, + initialHash: String, + fileHash: Option[String], + seenCaches: Set[CallCachingEntryId] + ) extends CCRJAData { def withSeenCache(id: CallCachingEntryId): CCRJAWithData = this.copy(seenCaches = seenCaches + id) def withFileHash(aggregatedFileHash: String): CCRJAWithData = this.copy(fileHash = Option(aggregatedFileHash)) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheWriteActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheWriteActor.scala index 06d48fcdea0..012f2b0a7c0 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheWriteActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheWriteActor.scala @@ -16,18 +16,22 @@ import scala.concurrent.duration._ import scala.language.postfixOps case class CallCacheWriteActor(callCache: CallCache, serviceRegistryActor: ActorRef, threshold: Int) - extends EnhancedBatchActor[CommandAndReplyTo[SaveCallCacheHashes]]( - CallCacheWriteActor.dbFlushRate, - CallCacheWriteActor.dbBatchSize) { + extends EnhancedBatchActor[CommandAndReplyTo[SaveCallCacheHashes]](CallCacheWriteActor.dbFlushRate, + CallCacheWriteActor.dbBatchSize + ) { override protected def process(data: NonEmptyVector[CommandAndReplyTo[SaveCallCacheHashes]]) = instrumentedProcess { log.debug("Flushing {} call cache hashes sets to the DB", data.length) // Collect all the bundles of hashes that should be written and all the senders which should be informed of // success or failure. - val (bundles, replyTos) = data.toList.foldMap { case CommandAndReplyTo(s: SaveCallCacheHashes, r: ActorRef) => (List(s.bundle), List(r)) } + val (bundles, replyTos) = data.toList.foldMap { case CommandAndReplyTo(s: SaveCallCacheHashes, r: ActorRef) => + (List(s.bundle), List(r)) + } if (bundles.nonEmpty) { - val futureMessage = callCache.addToCache(bundles, batchSize) map { _ => CallCacheWriteSuccess } recover { case t => CallCacheWriteFailure(t) } + val futureMessage = callCache.addToCache(bundles, batchSize) map { _ => CallCacheWriteSuccess } recover { + case t => CallCacheWriteFailure(t) + } futureMessage map { message => replyTos foreach { _ ! message } } @@ -46,9 +50,9 @@ case class CallCacheWriteActor(callCache: CallCache, serviceRegistryActor: Actor } object CallCacheWriteActor { - def props(callCache: CallCache, registryActor: ActorRef): Props = { - Props(CallCacheWriteActor(callCache, registryActor, LoadConfig.CallCacheWriteThreshold)).withDispatcher(EngineDispatcher) - } + def props(callCache: CallCache, registryActor: ActorRef): Props = + Props(CallCacheWriteActor(callCache, registryActor, LoadConfig.CallCacheWriteThreshold)) + .withDispatcher(EngineDispatcher) case class SaveCallCacheHashes(bundle: CallCacheHashBundle) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActor.scala index 698f1d20d4d..d1d5a9a6db0 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActor.scala @@ -8,7 +8,12 @@ import cromwell.core.callcaching._ import cromwell.core.logging.JobLogging import cromwell.engine.workflow.lifecycle.execution.CallMetadataHelper import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCache.CallCachePathPrefixes -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{CompleteFileHashingResult, FinalFileHashingResult, InitialHashingResult, NoFileHashesResult} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{ + CompleteFileHashingResult, + FinalFileHashingResult, + InitialHashingResult, + NoFileHashesResult +} import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.NextHit import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor._ import cromwell.services.CallCaching.CallCachingEntryId @@ -31,7 +36,11 @@ class EngineJobHashingActor(receiver: ActorRef, activity: CallCachingActivity, callCachingEligible: CallCachingEligible, callCachePathPrefixes: Option[CallCachePathPrefixes], - fileHashBatchSize: Int) extends Actor with ActorLogging with JobLogging with CallMetadataHelper { + fileHashBatchSize: Int +) extends Actor + with ActorLogging + with JobLogging + with CallMetadataHelper { override val jobTag = jobDescriptor.key.tag val workflowId = jobDescriptor.workflowDescriptor.id @@ -39,25 +48,28 @@ class EngineJobHashingActor(receiver: ActorRef, override val rootWorkflowIdForLogging = jobDescriptor.workflowDescriptor.rootWorkflowId override val workflowIdForCallMetadata: WorkflowId = workflowId - private [callcaching] var initialHash: Option[InitialHashingResult] = None + private[callcaching] var initialHash: Option[InitialHashingResult] = None - private [callcaching] val callCacheReadingJobActor = if (activity.readFromCache) { + private[callcaching] val callCacheReadingJobActor = if (activity.readFromCache) { Option(context.actorOf(callCacheReadingJobActorProps, s"CCReadingJobActor-${workflowId.shortString}-$jobTag")) } else None override def preStart(): Unit = { - context.actorOf(CallCacheHashingJobActor.props( - jobDescriptor, - callCacheReadingJobActor, - initializationData, - runtimeAttributeDefinitions, - backendNameForCallCachingPurposes, - fileHashingActorProps, - callCachingEligible, - activity, - callCachePathPrefixes, - fileHashBatchSize - ), s"CCHashingJobActor-${workflowId.shortString}-$jobTag") + context.actorOf( + CallCacheHashingJobActor.props( + jobDescriptor, + callCacheReadingJobActor, + initializationData, + runtimeAttributeDefinitions, + backendNameForCallCachingPurposes, + fileHashingActorProps, + callCachingEligible, + activity, + callCachePathPrefixes, + fileHashBatchSize + ), + s"CCHashingJobActor-${workflowId.shortString}-$jobTag" + ) super.preStart() } @@ -81,7 +93,10 @@ class EngineJobHashingActor(receiver: ActorRef, private def publishHashFailure(failure: Throwable) = { import cromwell.services.metadata.MetadataService._ - val failureAsEvents = throwableToMetadataEvents(metadataKeyForCall(jobDescriptor.key, CallMetadataKeys.CallCachingKeys.HashFailuresKey), failure) + val failureAsEvents = throwableToMetadataEvents( + metadataKeyForCall(jobDescriptor.key, CallMetadataKeys.CallCachingKeys.HashFailuresKey), + failure + ) serviceRegistryActor ! PutMetadataAction(failureAsEvents) } @@ -125,7 +140,10 @@ object EngineJobHashingActor { case class CacheHit(cacheResultId: CallCachingEntryId) extends EJHAResponse case class HashError(reason: Throwable) extends EJHAResponse case class FileHashes(hashes: Set[HashResult], aggregatedHash: String) - case class CallCacheHashes(initialHashes: Set[HashResult], aggregatedInitialHash: String, fileHashes: Option[FileHashes]) extends EJHAResponse { + case class CallCacheHashes(initialHashes: Set[HashResult], + aggregatedInitialHash: String, + fileHashes: Option[FileHashes] + ) extends EJHAResponse { val hashes = initialHashes ++ fileHashes.map(_.hashes).getOrElse(Set.empty) def aggregatedHashString: String = { val file = fileHashes match { @@ -147,17 +165,21 @@ object EngineJobHashingActor { activity: CallCachingActivity, callCachingEligible: CallCachingEligible, callCachePathPrefixes: Option[CallCachePathPrefixes], - fileHashBatchSize: Int): Props = Props(new EngineJobHashingActor( - receiver = receiver, - serviceRegistryActor = serviceRegistryActor, - jobDescriptor = jobDescriptor, - initializationData = initializationData, - fileHashingActorProps = fileHashingActorProps, - callCacheReadingJobActorProps = callCacheReadingJobActorProps, - runtimeAttributeDefinitions = runtimeAttributeDefinitions, - backendNameForCallCachingPurposes = backendNameForCallCachingPurposes, - activity = activity, - callCachingEligible = callCachingEligible, - callCachePathPrefixes = callCachePathPrefixes, - fileHashBatchSize = fileHashBatchSize)).withDispatcher(EngineDispatcher) + fileHashBatchSize: Int + ): Props = Props( + new EngineJobHashingActor( + receiver = receiver, + serviceRegistryActor = serviceRegistryActor, + jobDescriptor = jobDescriptor, + initializationData = initializationData, + fileHashingActorProps = fileHashingActorProps, + callCacheReadingJobActorProps = callCacheReadingJobActorProps, + runtimeAttributeDefinitions = runtimeAttributeDefinitions, + backendNameForCallCachingPurposes = backendNameForCallCachingPurposes, + activity = activity, + callCachingEligible = callCachingEligible, + callCachePathPrefixes = callCachePathPrefixes, + fileHashBatchSize = fileHashBatchSize + ) + ).withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/FetchCachedResultsActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/FetchCachedResultsActor.scala index 89745e6523d..5d30adf9ce0 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/FetchCachedResultsActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/FetchCachedResultsActor.scala @@ -5,7 +5,10 @@ import cromwell.Simpletons._ import cromwell.core.Dispatcher.EngineDispatcher import cromwell.core.simpleton.WomValueSimpleton import cromwell.database.sql.SqlConverters._ -import cromwell.engine.workflow.lifecycle.execution.callcaching.FetchCachedResultsActor.{CachedOutputLookupFailed, CachedOutputLookupSucceeded} +import cromwell.engine.workflow.lifecycle.execution.callcaching.FetchCachedResultsActor.{ + CachedOutputLookupFailed, + CachedOutputLookupSucceeded +} import cromwell.services.CallCaching.CallCachingEntryId import scala.concurrent.ExecutionContext @@ -16,14 +19,19 @@ object FetchCachedResultsActor { Props(new FetchCachedResultsActor(callCachingEntryId, replyTo, callCache)).withDispatcher(EngineDispatcher) sealed trait CachedResultResponse - case class CachedOutputLookupFailed(callCachingEntryId: CallCachingEntryId, failure: Throwable) extends CachedResultResponse - case class CachedOutputLookupSucceeded(simpletons: Seq[WomValueSimpleton], callOutputFiles: Map[String,String], - returnCode: Option[Int], cacheHit: CallCachingEntryId, cacheHitDetails: String) extends CachedResultResponse + case class CachedOutputLookupFailed(callCachingEntryId: CallCachingEntryId, failure: Throwable) + extends CachedResultResponse + case class CachedOutputLookupSucceeded(simpletons: Seq[WomValueSimpleton], + callOutputFiles: Map[String, String], + returnCode: Option[Int], + cacheHit: CallCachingEntryId, + cacheHitDetails: String + ) extends CachedResultResponse } - class FetchCachedResultsActor(cacheResultId: CallCachingEntryId, replyTo: ActorRef, callCache: CallCache) - extends Actor with ActorLogging { + extends Actor + with ActorLogging { { implicit val ec: ExecutionContext = context.dispatcher @@ -36,12 +44,16 @@ class FetchCachedResultsActor(cacheResultId: CallCachingEntryId, replyTo: ActorR } val sourceCacheDetails = Seq(result.callCachingEntry.workflowExecutionUuid, - result.callCachingEntry.callFullyQualifiedName, - result.callCachingEntry.jobIndex.toString).mkString(":") + result.callCachingEntry.callFullyQualifiedName, + result.callCachingEntry.jobIndex.toString + ).mkString(":") - CachedOutputLookupSucceeded(simpletons, jobDetritusFiles.toMap, - result.callCachingEntry.returnCode, - cacheResultId, sourceCacheDetails) + CachedOutputLookupSucceeded(simpletons, + jobDetritusFiles.toMap, + result.callCachingEntry.returnCode, + cacheResultId, + sourceCacheDetails + ) case None => val reason = new RuntimeException(s"Cache hit vanished between discovery and retrieval: $cacheResultId") CachedOutputLookupFailed(cacheResultId, reason) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/package.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/package.scala index 1913e64c9a9..917b559693f 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/package.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/package.scala @@ -1,6 +1,3 @@ package cromwell.engine.workflow.lifecycle.execution -package object callcaching { - - -} +package object callcaching {} diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala index 4423de202cc..b6db1f05fcd 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala @@ -3,7 +3,13 @@ package cromwell.engine.workflow.lifecycle.execution.job import akka.actor.SupervisorStrategy.{Escalate, Stop} import akka.actor.{ActorInitializationException, ActorRef, LoggingFSM, OneForOneStrategy, Props} import cats.data.NonEmptyList -import cromwell.backend.BackendCacheHitCopyingActor.{CacheCopyFailure, CopyOutputsCommand, CopyingOutputsFailedResponse, CopyAttemptError, BlacklistSkip} +import cromwell.backend.BackendCacheHitCopyingActor.{ + BlacklistSkip, + CacheCopyFailure, + CopyAttemptError, + CopyingOutputsFailedResponse, + CopyOutputsCommand +} import cromwell.backend.BackendJobExecutionActor._ import cromwell.backend.BackendLifecycleActor.AbortJobCommand import cromwell.backend.MetricableCacheCopyErrorCategory.MetricableCacheCopyErrorCategory @@ -27,10 +33,16 @@ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadAct import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.NextHit import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheWriteActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor._ -import cromwell.engine.workflow.lifecycle.execution.callcaching.FetchCachedResultsActor.{CachedOutputLookupFailed, CachedOutputLookupSucceeded} +import cromwell.engine.workflow.lifecycle.execution.callcaching.FetchCachedResultsActor.{ + CachedOutputLookupFailed, + CachedOutputLookupSucceeded +} import cromwell.engine.workflow.lifecycle.execution.callcaching._ import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor._ -import cromwell.engine.workflow.lifecycle.execution.job.preparation.CallPreparation.{BackendJobPreparationSucceeded, CallPreparationFailed} +import cromwell.engine.workflow.lifecycle.execution.job.preparation.CallPreparation.{ + BackendJobPreparationSucceeded, + CallPreparationFailed +} import cromwell.engine.workflow.lifecycle.execution.job.preparation.{CallPreparation, JobPreparationActor} import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore import cromwell.engine.workflow.lifecycle.{EngineLifecycleActorAbortCommand, TimedFSM} @@ -63,12 +75,13 @@ class EngineJobExecutionActor(replyTo: ActorRef, jobExecutionTokenDispenserActor: ActorRef, backendSingletonActor: Option[ActorRef], command: BackendJobExecutionActorCommand, - callCachingParameters: CallCachingParameters) extends LoggingFSM[EngineJobExecutionActorState, EJEAData] - with WorkflowLogging - with CallMetadataHelper - with JobInstrumentation - with CromwellInstrumentation - with TimedFSM[EngineJobExecutionActorState] { + callCachingParameters: CallCachingParameters +) extends LoggingFSM[EngineJobExecutionActorState, EJEAData] + with WorkflowLogging + with CallMetadataHelper + with JobInstrumentation + with CromwellInstrumentation + with TimedFSM[EngineJobExecutionActorState] { override val workflowIdForLogging = workflowDescriptor.possiblyNotRootWorkflowId override val rootWorkflowIdForLogging = workflowDescriptor.rootWorkflowId @@ -85,21 +98,21 @@ class EngineJobExecutionActor(replyTo: ActorRef, super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate) } - val jobTag = s"${workflowIdForLogging.shortString}:${jobDescriptorKey.call.fullyQualifiedName}:${jobDescriptorKey.index.fromIndex}:${jobDescriptorKey.attempt}" + val jobTag = + s"${workflowIdForLogging.shortString}:${jobDescriptorKey.call.fullyQualifiedName}:${jobDescriptorKey.index.fromIndex}:${jobDescriptorKey.attempt}" val tag = s"EJEA_$jobTag" - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection // There's no need to check for a cache hit again if we got preempted, or if there's no result copying actor defined // NB: this can also change (e.g. if we have a HashError we just force this to CallCachingOff) - private[execution] var effectiveCallCachingMode = { + private[execution] var effectiveCallCachingMode = if (backendLifecycleActorFactory.fileHashingActorProps.isEmpty) CallCachingOff else if (jobDescriptorKey.node.callable.meta.get("volatile").contains(MetaValueElementBoolean(true))) CallCachingOff else if (backendLifecycleActorFactory.cacheHitCopyingActorProps.isEmpty || jobDescriptorKey.attempt > 1) { callCachingParameters.mode.withoutRead } else callCachingParameters.mode - } - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection // If this actor is currently holding a job token, the token dispenser to which the token should be returned. private var currentTokenDispenser: Option[ActorRef] = None @@ -128,48 +141,46 @@ class EngineJobExecutionActor(replyTo: ActorRef, implicit val ec: ExecutionContext = context.dispatcher - override def preStart() = { + override def preStart() = log.debug(s"$tag: $effectiveCallCachingKey: $effectiveCallCachingMode") - } startWith(Pending, NoData) - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection private var eventList: Seq[ExecutionEvent] = Seq(ExecutionEvent(stateName.toString)) - override def onTimedTransition(from: EngineJobExecutionActorState, to: EngineJobExecutionActorState, duration: FiniteDuration) = { + override def onTimedTransition(from: EngineJobExecutionActorState, + to: EngineJobExecutionActorState, + duration: FiniteDuration + ) = // Send to StatsD recordExecutionStepTiming(from.toString, duration) - } // When Pending, the FSM always has NoData - when(Pending) { - case Event(Execute, NoData) => - increment(NonEmptyList("jobs", List("ejea", "executing", "starting"))) - if (restarting) { - requestRestartCheckToken() - goto(RequestingRestartCheckToken) - } else { - requestExecutionToken() - goto(RequestingExecutionToken) - } + when(Pending) { case Event(Execute, NoData) => + increment(NonEmptyList("jobs", List("ejea", "executing", "starting"))) + if (restarting) { + requestRestartCheckToken() + goto(RequestingRestartCheckToken) + } else { + requestExecutionToken() + goto(RequestingExecutionToken) + } } // This condition only applies for restarts - when(RequestingRestartCheckToken) { - case Event(JobTokenDispensed, NoData) => - currentTokenDispenser = Option(jobRestartCheckTokenDispenserActor) - replyTo ! JobStarting(jobDescriptorKey) - val jobStoreKey = jobDescriptorKey.toJobStoreKey(workflowIdForLogging) - jobStoreActor ! QueryJobCompletion(jobStoreKey, jobDescriptorKey.call.outputPorts.toSeq) - goto(CheckingJobStore) + when(RequestingRestartCheckToken) { case Event(JobTokenDispensed, NoData) => + currentTokenDispenser = Option(jobRestartCheckTokenDispenserActor) + replyTo ! JobStarting(jobDescriptorKey) + val jobStoreKey = jobDescriptorKey.toJobStoreKey(workflowIdForLogging) + jobStoreActor ! QueryJobCompletion(jobStoreKey, jobDescriptorKey.call.outputPorts.toSeq) + goto(CheckingJobStore) } - when(RequestingExecutionToken) { - case Event(JobTokenDispensed, NoData) => - currentTokenDispenser = Option(jobExecutionTokenDispenserActor) - if (!restarting) - replyTo ! JobStarting(jobDescriptorKey) - requestValueStore() + when(RequestingExecutionToken) { case Event(JobTokenDispensed, NoData) => + currentTokenDispenser = Option(jobExecutionTokenDispenserActor) + if (!restarting) + replyTo ! JobStarting(jobDescriptorKey) + requestValueStore() } // When CheckingJobStore, the FSM always has NoData @@ -196,37 +207,45 @@ class EngineJobExecutionActor(replyTo: ActorRef, when(CheckingCacheEntryExistence) { // There was already a cache entry for this job case Event(join: CallCachingJoin, NoData) => - Try(join.toJobSuccess(jobDescriptorKey, backendLifecycleActorFactory.pathBuilders(initializationData))).map({ jobSuccess => - // We can't create a CallCacheHashes to give to the SucceededResponseData here because it involves knowledge of - // which hashes are file hashes and which are not. We can't know that (nor do we care) when pulling them from the - // database. So instead manually publish the hashes here. - publishHashResultsToMetadata(Option(Success(join.callCacheHashes))) - saveJobCompletionToJobStore(SucceededResponseData(jobSuccess, None)) - }).recover({ - case f => + Try(join.toJobSuccess(jobDescriptorKey, backendLifecycleActorFactory.pathBuilders(initializationData))) + .map { jobSuccess => + // We can't create a CallCacheHashes to give to the SucceededResponseData here because it involves knowledge of + // which hashes are file hashes and which are not. We can't know that (nor do we care) when pulling them from the + // database. So instead manually publish the hashes here. + publishHashResultsToMetadata(Option(Success(join.callCacheHashes))) + saveJobCompletionToJobStore(SucceededResponseData(jobSuccess, None)) + } + .recover { case f => // If for some reason the above fails, fail the job cleanly - saveJobCompletionToJobStore(FailedResponseData(JobFailedNonRetryableResponse(jobDescriptorKey, f, None), None)) - }).get + saveJobCompletionToJobStore( + FailedResponseData(JobFailedNonRetryableResponse(jobDescriptorKey, f, None), None) + ) + } + .get // No cache entry for this job - keep going case Event(NoCallCacheEntry(_), NoData) => returnCurrentToken() requestExecutionToken() goto(RequestingExecutionToken) case Event(CacheResultLookupFailure(reason), NoData) => - log.error(reason, "{}: Failure checking for cache entry existence: {}. Attempting to resume job anyway.", jobTag, reason.getMessage) + log.error(reason, + "{}: Failure checking for cache entry existence: {}. Attempting to resume job anyway.", + jobTag, + reason.getMessage + ) returnCurrentToken() requestExecutionToken() goto(RequestingExecutionToken) } /* - * ! Hot Potato Warning ! - * We ask explicitly for the output store so we can use it on the fly and more importantly not store it as a - * variable in this actor, which would prevent it from being garbage collected for the duration of the - * job and would lead to memory leaks. - */ - when(WaitingForValueStore) { - case Event(valueStore: ValueStore, NoData) => prepareJob(valueStore) + * ! Hot Potato Warning ! + * We ask explicitly for the output store so we can use it on the fly and more importantly not store it as a + * variable in this actor, which would prevent it from being garbage collected for the duration of the + * job and would lead to memory leaks. + */ + when(WaitingForValueStore) { case Event(valueStore: ValueStore, NoData) => + prepareJob(valueStore) } // When PreparingJob, the FSM always has NoData @@ -234,7 +253,8 @@ class EngineJobExecutionActor(replyTo: ActorRef, case Event(BackendJobPreparationSucceeded(jobDescriptor, bjeaProps), NoData) => val updatedData = ResponsePendingData(jobDescriptor, bjeaProps) effectiveCallCachingMode match { - case activity: CallCachingActivity if activity.readFromCache => handleReadFromCacheOn(jobDescriptor, activity, updatedData) + case activity: CallCachingActivity if activity.readFromCache => + handleReadFromCacheOn(jobDescriptor, activity, updatedData) case activity: CallCachingActivity => handleReadFromCacheOff(jobDescriptor, activity, updatedData) case CallCachingOff => runJob(updatedData) } @@ -244,9 +264,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, when(CheckingCallCache) { case Event(CacheMiss, data: ResponsePendingData) => - writeToMetadata(Map( - callCachingHitResultMetadataKey -> false, - callCachingReadResultMetadataKey -> "Cache Miss")) + writeToMetadata(Map(callCachingHitResultMetadataKey -> false, callCachingReadResultMetadataKey -> "Cache Miss")) if (data.cacheHitFailureCount > 0) { val totalHits = data.cacheHitFailureCount @@ -259,7 +277,12 @@ class EngineJobExecutionActor(replyTo: ActorRef, s"Falling back to running job." ) val template = s"BT-322 {} cache hit copying failure: {} failed copy attempts of maximum {} with {}." - log.info(template, jobTag, data.failedCopyAttempts, callCachingParameters.maxFailedCopyAttempts, data.aggregatedHashString) + log.info(template, + jobTag, + data.failedCopyAttempts, + callCachingParameters.maxFailedCopyAttempts, + data.aggregatedHashString + ) } else { log.info(s"BT-322 {} cache hit copying nomatch: could not find a suitable cache hit.", jobTag) workflowLogger.info("Could not copy a suitable cache hit for {}. No copy attempts were made.", arg = jobTag) @@ -280,17 +303,25 @@ class EngineJobExecutionActor(replyTo: ActorRef, when(FetchingCachedOutputsFromDatabase) { case Event( - CachedOutputLookupSucceeded(womValueSimpletons, jobDetritus, returnCode, cacheResultId, cacheHitDetails), - data@ResponsePendingData(_, _, _, _, Some(ejeaCacheHit), _, _, _), - ) => + CachedOutputLookupSucceeded(womValueSimpletons, jobDetritus, returnCode, cacheResultId, cacheHitDetails), + data @ ResponsePendingData(_, _, _, _, Some(ejeaCacheHit), _, _, _) + ) => if (cacheResultId != ejeaCacheHit.hit.cacheResultId) { // Sanity check: was this the right set of results (a false here is a BAD thing!): - log.error(s"Received incorrect call cache results from FetchCachedResultsActor. Expected ${ejeaCacheHit.hit} but got $cacheResultId. Running job") + log.error( + s"Received incorrect call cache results from FetchCachedResultsActor. Expected ${ejeaCacheHit.hit} but got $cacheResultId. Running job" + ) // Treat this like the "CachedOutputLookupFailed" event: runJob(data) } else { log.debug("Cache hit for {}! Fetching cached result {}", jobTag, cacheResultId) - makeBackendCopyCacheHit(womValueSimpletons, jobDetritus, returnCode, data, cacheResultId, ejeaCacheHit.hitNumber) using data.withCacheDetails(cacheHitDetails) + makeBackendCopyCacheHit(womValueSimpletons, + jobDetritus, + returnCode, + data, + cacheResultId, + ejeaCacheHit.hitNumber + ) using data.withCacheDetails(cacheHitDetails) } case Event(CachedOutputLookupFailed(_, error), data: ResponsePendingData) => log.warning("Can't fetch a list of cached outputs to copy for {} due to {}. Running job.", jobTag, error) @@ -306,12 +337,13 @@ class EngineJobExecutionActor(replyTo: ActorRef, when(BackendIsCopyingCachedOutputs) { // Backend copying response: case Event( - response: JobSucceededResponse, - data@ResponsePendingData(_, _, Some(Success(hashes)), _, _, _, _, _), - ) => + response: JobSucceededResponse, + data @ ResponsePendingData(_, _, Some(Success(hashes)), _, _, _, _, _) + ) => logCacheHitSuccessAndNotifyMetadata(data) saveCacheResults(hashes, data.withSuccessResponse(response)) - case Event(response: JobSucceededResponse, data: ResponsePendingData) if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => + case Event(response: JobSucceededResponse, data: ResponsePendingData) + if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => logCacheHitSuccessAndNotifyMetadata(data) // Wait for the CallCacheHashes stay() using data.withSuccessResponse(response) @@ -319,9 +351,9 @@ class EngineJobExecutionActor(replyTo: ActorRef, logCacheHitSuccessAndNotifyMetadata(data) saveJobCompletionToJobStore(data.withSuccessResponse(response)) case Event( - CopyingOutputsFailedResponse(_, cacheCopyAttempt, reason), - data@ResponsePendingData(_, _, _, _, Some(cacheHit), _, _, _) - ) if cacheCopyAttempt == cacheHit.hitNumber => + CopyingOutputsFailedResponse(_, cacheCopyAttempt, reason), + data @ ResponsePendingData(_, _, _, _, Some(cacheHit), _, _, _) + ) if cacheCopyAttempt == cacheHit.hitNumber => invalidateCacheHitAndTransition(cacheHit, data, reason) // Hashes arrive: @@ -359,16 +391,17 @@ class EngineJobExecutionActor(replyTo: ActorRef, val jobSuccessHandler: StateFunction = { // writeToCache is true and all hashes have already been retrieved - save to the cache case Event( - response: JobSucceededResponse, - data@ResponsePendingData(_, _, Some(Success(hashes)), _, _, _, _, _) - ) if effectiveCallCachingMode.writeToCache => + response: JobSucceededResponse, + data @ ResponsePendingData(_, _, Some(Success(hashes)), _, _, _, _, _) + ) if effectiveCallCachingMode.writeToCache => eventList ++= response.executionEvents // Publish the image used now that we have it as we might lose the information if Cromwell is restarted // in between writing to the cache and writing to the job store response.dockerImageUsed foreach publishDockerImageUsed saveCacheResults(hashes, data.withSuccessResponse(response)) // Hashes are still missing and we want them (writeToCache is true) - wait for them - case Event(response: JobSucceededResponse, data: ResponsePendingData) if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => + case Event(response: JobSucceededResponse, data: ResponsePendingData) + if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => eventList ++= response.executionEvents stay() using data.withSuccessResponse(response) // Hashes are missing but writeToCache is OFF - complete the job @@ -381,12 +414,13 @@ class EngineJobExecutionActor(replyTo: ActorRef, val jobFailedHandler: StateFunction = { // writeToCache is true and all hashes already retrieved - save to job store case Event( - response: BackendJobFailedResponse, - data@ResponsePendingData(_, _, Some(Success(_)), _, _, _, _, _) - ) if effectiveCallCachingMode.writeToCache => + response: BackendJobFailedResponse, + data @ ResponsePendingData(_, _, Some(Success(_)), _, _, _, _, _) + ) if effectiveCallCachingMode.writeToCache => saveJobCompletionToJobStore(data.withFailedResponse(response)) // Hashes are still missing and we want them (writeToCache is true) - wait for them - case Event(response: BackendJobFailedResponse, data: ResponsePendingData) if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => + case Event(response: BackendJobFailedResponse, data: ResponsePendingData) + if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => stay() using data.withFailedResponse(response) // Hashes are missing but writeToCache is OFF - complete the job case Event(response: BackendJobFailedResponse, data: ResponsePendingData) => @@ -448,7 +482,13 @@ class EngineJobExecutionActor(replyTo: ActorRef, stay() using data.copy(hashes = Option(Failure(t))) } - when(RunningJob)(jobSuccessHandler.orElse(jobFailedHandler).orElse(jobAbortedHandler).orElse(hashSuccessResponseHandler).orElse(hashFailureResponseHandler)) + when(RunningJob)( + jobSuccessHandler + .orElse(jobFailedHandler) + .orElse(jobAbortedHandler) + .orElse(hashSuccessResponseHandler) + .orElse(hashFailureResponseHandler) + ) // When UpdatingCallCache, the FSM always has SucceededResponseData. when(UpdatingCallCache) { @@ -464,16 +504,20 @@ class EngineJobExecutionActor(replyTo: ActorRef, case Event(JobStoreWriteSuccess(_), data: ResponseData) => forwardAndStop(data.response) case Event(JobStoreWriteFailure(t), _: ResponseData) => - respondAndStop(JobFailedNonRetryableResponse(jobDescriptorKey, new Exception(s"JobStore write failure: ${t.getMessage}", t), None)) + respondAndStop( + JobFailedNonRetryableResponse(jobDescriptorKey, + new Exception(s"JobStore write failure: ${t.getMessage}", t), + None + ) + ) } - onTransition { - case fromState -> toState => - log.debug("Transitioning from {}({}) to {}({})", fromState, stateData, toState, nextStateData) + onTransition { case fromState -> toState => + log.debug("Transitioning from {}({}) to {}({})", fromState, stateData, toState, nextStateData) - EngineJobExecutionActorState.transitionEventString(fromState, toState) foreach { - eventList :+= ExecutionEvent(_) - } + EngineJobExecutionActorState.transitionEventString(fromState, toState) foreach { + eventList :+= ExecutionEvent(_) + } } @@ -502,23 +546,35 @@ class EngineJobExecutionActor(replyTo: ActorRef, // due to timeouts). That's ok, we just ignore this message in any other situation: stay() case Event(msg, _) => - log.error("Bad message from {} to EngineJobExecutionActor in state {}(with data {}): {}", sender(), stateName, stateData, msg) + log.error("Bad message from {} to EngineJobExecutionActor in state {}(with data {}): {}", + sender(), + stateName, + stateData, + msg + ) stay() } - private def publishHashesToMetadata(maybeHashes: Option[Try[CallCacheHashes]]) = publishHashResultsToMetadata(maybeHashes.map(_.map(_.hashes))) + private def publishHashesToMetadata(maybeHashes: Option[Try[CallCacheHashes]]) = publishHashResultsToMetadata( + maybeHashes.map(_.map(_.hashes)) + ) private def publishDockerImageUsed(image: String) = writeToMetadata(Map("dockerImageUsed" -> image)) private def publishHashResultsToMetadata(maybeHashes: Option[Try[Set[HashResult]]]) = maybeHashes match { case Some(Success(hashes)) => - val hashMap = hashes.collect({ + val hashMap = hashes.collect { case HashResult(HashKey(useInCallCaching, keyComponents), HashValue(value)) if useInCallCaching => - (callCachingHashes + MetadataKey.KeySeparator + keyComponents.mkString(MetadataKey.KeySeparator.toString)) -> value - }).toMap + (callCachingHashes + MetadataKey.KeySeparator + keyComponents.mkString( + MetadataKey.KeySeparator.toString + )) -> value + }.toMap writeToMetadata(hashMap) case _ => } - private def handleReadFromCacheOn(jobDescriptor: BackendJobDescriptor, activity: CallCachingActivity, updatedData: ResponsePendingData) = { + private def handleReadFromCacheOn(jobDescriptor: BackendJobDescriptor, + activity: CallCachingActivity, + updatedData: ResponsePendingData + ) = jobDescriptor.maybeCallCachingEligible match { // If the job is eligible, initialize job hashing and go to CheckingCallCache state case eligible: CallCachingEligible => @@ -538,21 +594,24 @@ class EngineJobExecutionActor(replyTo: ActorRef, disableCallCaching() runJob(updatedData) } - } - private def handleReadFromCacheOff(jobDescriptor: BackendJobDescriptor, activity: CallCachingActivity, updatedData: ResponsePendingData) = { + private def handleReadFromCacheOff(jobDescriptor: BackendJobDescriptor, + activity: CallCachingActivity, + updatedData: ResponsePendingData + ) = { jobDescriptor.maybeCallCachingEligible match { // If the job is eligible, initialize job hashing so it can be written to the cache - case eligible: CallCachingEligible => initializeJobHashing(jobDescriptor, activity, eligible) match { - case Failure(failure) => - log.warning(s"BT-322 {} failed to initialize job hashing", jobTag) - // This condition in `handleReadFromCacheOn` ends in a `respondAndStop(JobFailedNonRetryableResponse(...))`, - // but with cache reading off Cromwell instead logs this condition and runs the job. - log.error(failure, "Failed to initialize job hashing. The job will not be written to the cache") - case _ => - val template = s"BT-322 {} is eligible for call caching with read = {} and write = {}" - log.info(template, jobTag, activity.readFromCache, activity.writeToCache) - } + case eligible: CallCachingEligible => + initializeJobHashing(jobDescriptor, activity, eligible) match { + case Failure(failure) => + log.warning(s"BT-322 {} failed to initialize job hashing", jobTag) + // This condition in `handleReadFromCacheOn` ends in a `respondAndStop(JobFailedNonRetryableResponse(...))`, + // but with cache reading off Cromwell instead logs this condition and runs the job. + log.error(failure, "Failed to initialize job hashing. The job will not be written to the cache") + case _ => + val template = s"BT-322 {} is eligible for call caching with read = {} and write = {}" + log.info(template, jobTag, activity.readFromCache, activity.writeToCache) + } // Don't even initialize hashing to write to the cache if the job is ineligible case _ => log.info(s"BT-322 {} is not eligible for call caching", jobTag) @@ -562,16 +621,20 @@ class EngineJobExecutionActor(replyTo: ActorRef, runJob(updatedData) } - private def requestRestartCheckToken(): Unit = { - jobRestartCheckTokenDispenserActor ! JobTokenRequest(workflowDescriptor.backendDescriptor.hogGroup, backendLifecycleActorFactory.jobRestartCheckTokenType) - } + private def requestRestartCheckToken(): Unit = + jobRestartCheckTokenDispenserActor ! JobTokenRequest(workflowDescriptor.backendDescriptor.hogGroup, + backendLifecycleActorFactory.jobRestartCheckTokenType + ) - private def requestExecutionToken(): Unit = { - jobExecutionTokenDispenserActor ! JobTokenRequest(workflowDescriptor.backendDescriptor.hogGroup, backendLifecycleActorFactory.jobExecutionTokenType) - } + private def requestExecutionToken(): Unit = + jobExecutionTokenDispenserActor ! JobTokenRequest(workflowDescriptor.backendDescriptor.hogGroup, + backendLifecycleActorFactory.jobExecutionTokenType + ) // Return any currently held job restart check or execution token. - private def returnCurrentToken(): Unit = if (stateName != Pending && stateName != RequestingRestartCheckToken && stateName != RequestingExecutionToken) { + private def returnCurrentToken(): Unit = if ( + stateName != Pending && stateName != RequestingRestartCheckToken && stateName != RequestingExecutionToken + ) { currentTokenDispenser foreach { _ ! JobTokenReturn } currentTokenDispenser = None } @@ -597,9 +660,8 @@ class EngineJobExecutionActor(replyTo: ActorRef, } // Note: StatsD will automatically add a counter value so ne need to separately increment a counter. - private def instrumentJobComplete(response: BackendJobExecutionResponse) = { + private def instrumentJobComplete(response: BackendJobExecutionResponse) = setJobTimePerState(response, (System.currentTimeMillis() - jobStartTime).millis) - } private def disableCallCaching(reason: Option[Throwable] = None) = { log.warning(s"BT-322 {} disabling call caching due to error", jobTag) @@ -626,8 +688,16 @@ class EngineJobExecutionActor(replyTo: ActorRef, def prepareJob(valueStore: ValueStore) = { writeCallCachingModeToMetadata() val jobPreparationActorName = s"BackendPreparationActor_for_$jobTag" - val jobPrepProps = JobPreparationActor.props(workflowDescriptor, jobDescriptorKey, backendLifecycleActorFactory, workflowDockerLookupActor = workflowDockerLookupActor, - initializationData, serviceRegistryActor = serviceRegistryActor, ioActor = ioActor, backendSingletonActor = backendSingletonActor) + val jobPrepProps = JobPreparationActor.props( + workflowDescriptor, + jobDescriptorKey, + backendLifecycleActorFactory, + workflowDockerLookupActor = workflowDockerLookupActor, + initializationData, + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor, + backendSingletonActor = backendSingletonActor + ) val jobPreparationActor = createJobPreparationActor(jobPrepProps, jobPreparationActorName) jobPreparationActor ! CallPreparation.Start(valueStore) goto(PreparingJob) @@ -638,9 +708,17 @@ class EngineJobExecutionActor(replyTo: ActorRef, goto(WaitingForValueStore) } - def initializeJobHashing(jobDescriptor: BackendJobDescriptor, activity: CallCachingActivity, callCachingEligible: CallCachingEligible): Try[ActorRef] = { + def initializeJobHashing(jobDescriptor: BackendJobDescriptor, + activity: CallCachingActivity, + callCachingEligible: CallCachingEligible + ): Try[ActorRef] = { val maybeFileHashingActorProps = backendLifecycleActorFactory.fileHashingActorProps map { - _.apply(jobDescriptor, initializationData, serviceRegistryActor, ioActor, callCachingParameters.fileHashCacheActor) + _.apply(jobDescriptor, + initializationData, + serviceRegistryActor, + ioActor, + callCachingParameters.fileHashCacheActor + ) } maybeFileHashingActorProps match { @@ -667,41 +745,55 @@ class EngineJobExecutionActor(replyTo: ActorRef, } def makeFetchCachedResultsActor(callCachingEntryId: CallCachingEntryId): Unit = { - context.actorOf(FetchCachedResultsActor.props(callCachingEntryId, self, - new CallCache(EngineServicesStore.engineDatabaseInterface))) + context.actorOf( + FetchCachedResultsActor.props(callCachingEntryId, + self, + new CallCache(EngineServicesStore.engineDatabaseInterface) + ) + ) () } - private def fetchCachedResults( callCachingEntryId: CallCachingEntryId, data: ResponsePendingData) = { + private def fetchCachedResults(callCachingEntryId: CallCachingEntryId, data: ResponsePendingData) = { makeFetchCachedResultsActor(callCachingEntryId) goto(FetchingCachedOutputsFromDatabase) using data } private def makeBackendCopyCacheHit(womValueSimpletons: Seq[WomValueSimpleton], - jobDetritusFiles: Map[String,String], + jobDetritusFiles: Map[String, String], returnCode: Option[Int], data: ResponsePendingData, cacheResultId: CallCachingEntryId, - cacheCopyAttempt: Int) = { + cacheCopyAttempt: Int + ) = backendLifecycleActorFactory.cacheHitCopyingActorProps match { case Some(propsMaker) => - val backendCacheHitCopyingActorProps = propsMaker(data.jobDescriptor, initializationData, serviceRegistryActor, ioActor, cacheCopyAttempt, callCachingParameters.blacklistCache) - val cacheHitCopyActor = context.actorOf(backendCacheHitCopyingActorProps, buildCacheHitCopyingActorName(data.jobDescriptor, cacheResultId)) + val backendCacheHitCopyingActorProps = propsMaker(data.jobDescriptor, + initializationData, + serviceRegistryActor, + ioActor, + cacheCopyAttempt, + callCachingParameters.blacklistCache + ) + val cacheHitCopyActor = context.actorOf(backendCacheHitCopyingActorProps, + buildCacheHitCopyingActorName(data.jobDescriptor, cacheResultId) + ) cacheHitCopyActor ! CopyOutputsCommand(womValueSimpletons, jobDetritusFiles, cacheResultId, returnCode) replyTo ! JobRunning(data.jobDescriptor.key, data.jobDescriptor.evaluatedTaskInputs) goto(BackendIsCopyingCachedOutputs) case None => // This should be impossible with the FSM, but luckily, we CAN recover if some foolish future programmer makes this happen: - val errorMessage = "Call caching copying should never have even been attempted with no copy actor props! (Programmer error!)" + val errorMessage = + "Call caching copying should never have even been attempted with no copy actor props! (Programmer error!)" log.error(errorMessage) self ! JobFailedNonRetryableResponse(data.jobDescriptor.key, new RuntimeException(errorMessage), None) goto(BackendIsCopyingCachedOutputs) } - } - private [job] def createBackendJobExecutionActor(data: ResponsePendingData) = { - context.actorOf(data.bjeaProps, BackendJobExecutionActor.buildJobExecutionActorName(workflowIdForLogging, data.jobDescriptor.key)) - } + private[job] def createBackendJobExecutionActor(data: ResponsePendingData) = + context.actorOf(data.bjeaProps, + BackendJobExecutionActor.buildJobExecutionActorName(workflowIdForLogging, data.jobDescriptor.key) + ) private def runJob(data: ResponsePendingData) = { val backendJobExecutionActor = createBackendJobExecutionActor(data) @@ -724,7 +816,8 @@ class EngineJobExecutionActor(replyTo: ActorRef, } response match { - case CallCacheInvalidatedFailure(_, failure) => log.error(failure, "Failed to invalidate cache entry for job: {}", jobDescriptorKey) + case CallCacheInvalidatedFailure(_, failure) => + log.error(failure, "Failed to invalidate cache entry for job: {}", jobDescriptorKey) case CallCacheInvalidatedSuccess(_, Some(entry)) => updateMetadataForInvalidatedEntry(entry) case _ => } @@ -735,25 +828,36 @@ class EngineJobExecutionActor(replyTo: ActorRef, ejha ! NextHit goto(CheckingCallCache) using data case Some(_) => - writeToMetadata(Map( - callCachingHitResultMetadataKey -> false, - callCachingReadResultMetadataKey -> s"Cache Miss (${callCachingParameters.maxFailedCopyAttempts} failed copy attempts)")) - log.warning("BT-322 {} cache hit copying maxfail: Cache miss due to exceeding the maximum of {} failed copy attempts.", jobTag, callCachingParameters.maxFailedCopyAttempts) + writeToMetadata( + Map( + callCachingHitResultMetadataKey -> false, + callCachingReadResultMetadataKey -> s"Cache Miss (${callCachingParameters.maxFailedCopyAttempts} failed copy attempts)" + ) + ) + log.warning( + "BT-322 {} cache hit copying maxfail: Cache miss due to exceeding the maximum of {} failed copy attempts.", + jobTag, + callCachingParameters.maxFailedCopyAttempts + ) publishCopyAttemptAbandonedMetrics(data) runJob(data) case _ => - workflowLogger.error("Programmer error: We got a cache failure but there was no hashing actor scanning for hits. Falling back to running job") + workflowLogger.error( + "Programmer error: We got a cache failure but there was no hashing actor scanning for hits. Falling back to running job" + ) runJob(data) } } - private def buildCacheHitCopyingActorName(jobDescriptor: BackendJobDescriptor, cacheResultId: CallCachingEntryId) = { + private def buildCacheHitCopyingActorName(jobDescriptor: BackendJobDescriptor, cacheResultId: CallCachingEntryId) = s"$workflowIdForLogging-BackendCacheHitCopyingActor-$jobTag-${cacheResultId.id}" - } private def logCacheHitSuccessAndNotifyMetadata(data: ResponsePendingData): Unit = { - val metadataMap = Map[String, Any](callCachingHitResultMetadataKey -> true) ++ data.ejeaCacheHit.flatMap(_.details).map(details => callCachingReadResultMetadataKey -> s"Cache Hit: $details").toMap + val metadataMap = Map[String, Any](callCachingHitResultMetadataKey -> true) ++ data.ejeaCacheHit + .flatMap(_.details) + .map(details => callCachingReadResultMetadataKey -> s"Cache Hit: $details") + .toMap writeToMetadata(metadataMap) @@ -786,19 +890,15 @@ class EngineJobExecutionActor(replyTo: ActorRef, workflowLogger.info( s"Failure copying cache results for job $jobDescriptorKey (${reason.getClass.getSimpleName}: ${reason.getMessage})" - + multipleFailuresContext + + multipleFailuresContext ) } private def publishCopyAttemptFailuresMetrics(data: ResponsePendingData): Unit = { val copyErrorsPerHitPath: NonEmptyList[String] = - NonEmptyList.of( - "job", - "callcaching", "read", "error", "invalidhits", "copyerrors") + NonEmptyList.of("job", "callcaching", "read", "error", "invalidhits", "copyerrors") val copyBlacklistsPerHitPath: NonEmptyList[String] = - NonEmptyList.of( - "job", - "callcaching", "read", "error", "invalidhits", "blacklisted") + NonEmptyList.of("job", "callcaching", "read", "error", "invalidhits", "blacklisted") sendGauge(copyErrorsPerHitPath, data.failedCopyAttempts.longValue) sendGauge(copyBlacklistsPerHitPath, data.cacheHitFailureCount - data.failedCopyAttempts.longValue) @@ -806,24 +906,33 @@ class EngineJobExecutionActor(replyTo: ActorRef, private def publishCopyAttemptAbandonedMetrics(data: ResponsePendingData): Unit = { val cacheCopyAttemptAbandonedPath: NonEmptyList[String] = - NonEmptyList.of( - "job", - "callcaching", "read", "error", "invalidhits", "abandonments") + NonEmptyList.of("job", "callcaching", "read", "error", "invalidhits", "abandonments") increment(cacheCopyAttemptAbandonedPath) // Also publish the attempt failure metrics publishCopyAttemptFailuresMetrics(data) } - private def publishBlacklistReadMetrics(data: ResponsePendingData, failureCategory: MetricableCacheCopyErrorCategory): Unit = { + private def publishBlacklistReadMetrics(data: ResponsePendingData, + failureCategory: MetricableCacheCopyErrorCategory + ): Unit = { val callCachingErrorsMetricPath: NonEmptyList[String] = NonEmptyList.of( "job", - "callcaching", "read", "error", failureCategory.toString, data.jobDescriptor.taskCall.localName, data.jobDescriptor.workflowDescriptor.hogGroup.value) + "callcaching", + "read", + "error", + failureCategory.toString, + data.jobDescriptor.taskCall.localName, + data.jobDescriptor.workflowDescriptor.hogGroup.value + ) increment(callCachingErrorsMetricPath) } - private def invalidateCacheHitAndTransition(ejeaCacheHit: EJEACacheHit, data: ResponsePendingData, reason: CacheCopyFailure) = { + private def invalidateCacheHitAndTransition(ejeaCacheHit: EJEACacheHit, + data: ResponsePendingData, + reason: CacheCopyFailure + ) = { val copyAttemptIncrement = reason match { case CopyAttemptError(failure) => logCacheHitFailure(data, failure) @@ -837,10 +946,13 @@ class EngineJobExecutionActor(replyTo: ActorRef, // Increment the total failure count and actual copy failure count as appropriate. val updatedData = data.copy(cacheHitFailureCount = data.cacheHitFailureCount + 1, - failedCopyAttempts = data.failedCopyAttempts + copyAttemptIncrement) + failedCopyAttempts = data.failedCopyAttempts + copyAttemptIncrement + ) if (invalidationRequired) { - workflowLogger.warn(s"Invalidating cache entry ${ejeaCacheHit.hit.cacheResultId} (Cache entry details: ${ejeaCacheHit.details})") + workflowLogger.warn( + s"Invalidating cache entry ${ejeaCacheHit.hit.cacheResultId} (Cache entry details: ${ejeaCacheHit.details})" + ) invalidateCacheHit(ejeaCacheHit.hit.cacheResultId) goto(InvalidatingCacheEntry) using updatedData } else { @@ -860,7 +972,9 @@ class EngineJobExecutionActor(replyTo: ActorRef, } private def saveCacheResults(hashes: CallCacheHashes, data: SucceededResponseData) = { - callCachingParameters.writeActor ! SaveCallCacheHashes(CallCacheHashBundle(workflowIdForLogging, hashes, data.response)) + callCachingParameters.writeActor ! SaveCallCacheHashes( + CallCacheHashBundle(workflowIdForLogging, hashes, data.response) + ) val updatedData = data.copy(hashes = Option(Success(hashes))) goto(UpdatingCallCache) using updatedData } @@ -890,7 +1004,11 @@ class EngineJobExecutionActor(replyTo: ActorRef, jobStoreActor ! RegisterJobCompleted(jobStoreKey, jobStoreResult) } - private def saveUnsuccessfulJobResults(jobKey: JobKey, returnCode: Option[Int], reason: Throwable, retryable: Boolean) = { + private def saveUnsuccessfulJobResults(jobKey: JobKey, + returnCode: Option[Int], + reason: Throwable, + retryable: Boolean + ) = { val jobStoreKey = jobKey.toJobStoreKey(workflowIdForLogging) val jobStoreResult = JobResultFailure(returnCode, reason, retryable) jobStoreActor ! RegisterJobCompleted(jobStoreKey, jobStoreResult) @@ -912,6 +1030,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, } object EngineJobExecutionActor { + /** States */ sealed trait EngineJobExecutionActorState case object Pending extends EngineJobExecutionActorState @@ -930,7 +1049,9 @@ object EngineJobExecutionActor { case object InvalidatingCacheEntry extends EngineJobExecutionActorState object EngineJobExecutionActorState { - def transitionEventString(fromState: EngineJobExecutionActorState, toState: EngineJobExecutionActorState): Option[String] = { + def transitionEventString(fromState: EngineJobExecutionActorState, + toState: EngineJobExecutionActorState + ): Option[String] = { def callCacheStateGroup: Set[EngineJobExecutionActorState] = Set( CheckingCallCache, @@ -948,14 +1069,14 @@ object EngineJobExecutionActor { } case class CallCachingParameters( - mode: CallCachingMode, - readActor: ActorRef, - writeActor: ActorRef, - fileHashCacheActor: Option[ActorRef], - maxFailedCopyAttempts: Int, - blacklistCache: Option[BlacklistCache], - fileHashBatchSize: Int - ) + mode: CallCachingMode, + readActor: ActorRef, + writeActor: ActorRef, + fileHashCacheActor: Option[ActorRef], + maxFailedCopyAttempts: Int, + blacklistCache: Option[BlacklistCache], + fileHashBatchSize: Int + ) /** Commands */ sealed trait EngineJobExecutionActorCommand @@ -977,29 +1098,31 @@ object EngineJobExecutionActor { jobExecutionTokenDispenserActor: ActorRef, backendSingletonActor: Option[ActorRef], command: BackendJobExecutionActorCommand, - callCachingParameters: EngineJobExecutionActor.CallCachingParameters) = { - - Props(new EngineJobExecutionActor( - replyTo = replyTo, - jobDescriptorKey = jobDescriptorKey, - workflowDescriptor = workflowDescriptor, - backendLifecycleActorFactory = backendLifecycleActorFactory, - initializationData = initializationData, - restarting = restarting, - serviceRegistryActor = serviceRegistryActor, - ioActor = ioActor, - jobStoreActor = jobStoreActor, - workflowDockerLookupActor = workflowDockerLookupActor, - jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, - jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, - backendSingletonActor = backendSingletonActor, - command = command, - callCachingParameters = callCachingParameters)).withDispatcher(EngineDispatcher) - } + callCachingParameters: EngineJobExecutionActor.CallCachingParameters + ) = + Props( + new EngineJobExecutionActor( + replyTo = replyTo, + jobDescriptorKey = jobDescriptorKey, + workflowDescriptor = workflowDescriptor, + backendLifecycleActorFactory = backendLifecycleActorFactory, + initializationData = initializationData, + restarting = restarting, + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor, + jobStoreActor = jobStoreActor, + workflowDockerLookupActor = workflowDockerLookupActor, + jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, + jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, + backendSingletonActor = backendSingletonActor, + command = command, + callCachingParameters = callCachingParameters + ) + ).withDispatcher(EngineDispatcher) case class EJEACacheHit(hit: CacheHit, hitNumber: Int, details: Option[String]) - private[execution] sealed trait EJEAData { + sealed private[execution] trait EJEAData { override def toString = getClass.getSimpleName } @@ -1013,13 +1136,14 @@ object EngineJobExecutionActor { backendJobActor: Option[ActorRef] = None, cacheHitFailureCount: Int = 0, failedCopyAttempts: Int = 0 - ) extends EJEAData { + ) extends EJEAData { def withEJHA(ejha: ActorRef): EJEAData = this.copy(ejha = Option(ejha)) def withBackendActor(actorRef: ActorRef) = this.copy(backendJobActor = Option(actorRef)) - def withSuccessResponse(success: JobSucceededResponse): SucceededResponseData = SucceededResponseData(success, hashes) + def withSuccessResponse(success: JobSucceededResponse): SucceededResponseData = + SucceededResponseData(success, hashes) def withFailedResponse(failed: BackendJobFailedResponse): FailedResponseData = FailedResponseData(failed, hashes) def withAbortedResponse(aborted: JobAbortedResponse): AbortedResponseData = AbortedResponseData(aborted, hashes) @@ -1031,7 +1155,8 @@ object EngineJobExecutionActor { this.copy(ejeaCacheHit = Option(newEjeaCacheHit)) } - def withCacheDetails(details: String) = this.copy(ejeaCacheHit = ejeaCacheHit.map(_.copy(details = Option(details)))) + def withCacheDetails(details: String) = + this.copy(ejeaCacheHit = ejeaCacheHit.map(_.copy(details = Option(details)))) def aggregatedHashString: String = hashes match { case Some(Success(hashes)) => hashes.aggregatedHashString @@ -1051,14 +1176,16 @@ object EngineJobExecutionActor { private[execution] trait ShouldBeSavedToJobStoreResponseData extends ResponseData private[execution] case class SucceededResponseData(successResponse: JobSucceededResponse, - hashes: Option[Try[CallCacheHashes]] = None) extends ShouldBeSavedToJobStoreResponseData { + hashes: Option[Try[CallCacheHashes]] = None + ) extends ShouldBeSavedToJobStoreResponseData { override def response = successResponse override def dockerImageUsed = successResponse.dockerImageUsed override def withHashes(hashes: Option[Try[CallCacheHashes]]) = copy(hashes = hashes) } private[execution] case class FailedResponseData(failedResponse: BackendJobFailedResponse, - hashes: Option[Try[CallCacheHashes]] = None) extends ShouldBeSavedToJobStoreResponseData { + hashes: Option[Try[CallCacheHashes]] = None + ) extends ShouldBeSavedToJobStoreResponseData { override def response = failedResponse // Seems like we should be able to get the docker image used even if the job failed override def dockerImageUsed = None @@ -1066,7 +1193,8 @@ object EngineJobExecutionActor { } private[execution] case class AbortedResponseData(abortedResponse: JobAbortedResponse, - hashes: Option[Try[CallCacheHashes]] = None) extends ResponseData { + hashes: Option[Try[CallCacheHashes]] = None + ) extends ResponseData { override def response = abortedResponse override def dockerImageUsed = None override def withHashes(hashes: Option[Try[CallCacheHashes]]) = copy(hashes = hashes) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparation.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparation.scala index e9665aa01b6..efdee8df7a0 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparation.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparation.scala @@ -15,15 +15,15 @@ object CallPreparation { trait CallPreparationActorResponse - case class BackendJobPreparationSucceeded(jobDescriptor: BackendJobDescriptor, bjeaProps: Props) extends CallPreparationActorResponse + case class BackendJobPreparationSucceeded(jobDescriptor: BackendJobDescriptor, bjeaProps: Props) + extends CallPreparationActorResponse case class JobCallPreparationFailed(jobKey: JobKey, throwable: Throwable) extends CallPreparationActorResponse case class CallPreparationFailed(jobKey: JobKey, throwable: Throwable) extends CallPreparationActorResponse def resolveAndEvaluateInputs(callKey: CallKey, expressionLanguageFunctions: IoFunctionSet, - valueStore: ValueStore): ErrorOr[WomEvaluatedCallInputs] = { - + valueStore: ValueStore + ): ErrorOr[WomEvaluatedCallInputs] = CallNode.resolveAndEvaluateInputs(callKey.node, expressionLanguageFunctions, valueStore.resolve(callKey.index)) - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala index 8540717a073..b621c26ed4d 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala @@ -13,7 +13,7 @@ import cromwell.core.Dispatcher.EngineDispatcher import cromwell.core.callcaching._ import cromwell.core.logging.WorkflowLogging import cromwell.core.{Dispatcher, DockerConfiguration} -import cromwell.docker.DockerInfoActor.{DockerInfoSuccessResponse, DockerInformation, DockerSize} +import cromwell.docker.DockerInfoActor.{DockerInformation, DockerInfoSuccessResponse, DockerSize} import cromwell.docker._ import cromwell.engine.EngineWorkflowDescriptor import cromwell.engine.workflow.WorkflowDockerLookupActor.{WorkflowDockerLookupFailure, WorkflowDockerTerminalFailure} @@ -51,8 +51,10 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, initializationData: Option[BackendInitializationData], val serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]) - extends FSM[JobPreparationActorState, JobPreparationActorData] with WorkflowLogging with CallMetadataHelper { + backendSingletonActor: Option[ActorRef] +) extends FSM[JobPreparationActorState, JobPreparationActorData] + with WorkflowLogging + with CallMetadataHelper { override lazy val workflowIdForLogging = workflowDescriptor.possiblyNotRootWorkflowId override lazy val workflowIdForCallMetadata = workflowDescriptor.possiblyNotRootWorkflowId @@ -62,29 +64,39 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, private[preparation] val ioEc = context.system.dispatchers.lookup(Dispatcher.IoDispatcher) private[preparation] lazy val expressionLanguageFunctions = { - val ioFunctionSet: IoFunctionSet = factory.expressionLanguageFunctions(workflowDescriptor.backendDescriptor, jobKey, initializationData, ioActor, ioEc) + val ioFunctionSet: IoFunctionSet = factory.expressionLanguageFunctions(workflowDescriptor.backendDescriptor, + jobKey, + initializationData, + ioActor, + ioEc + ) ioFunctionSet.makeInputSpecificFunctions() } - private[preparation] lazy val dockerHashCredentials = factory.dockerHashCredentials(workflowDescriptor.backendDescriptor, initializationData) + private[preparation] lazy val dockerHashCredentials = + factory.dockerHashCredentials(workflowDescriptor.backendDescriptor, initializationData) private[preparation] lazy val runtimeAttributeDefinitions = factory.runtimeAttributeDefinitions(initializationData) - private[preparation] lazy val hasDockerDefinition = runtimeAttributeDefinitions.exists(_.name == DockerValidation.instance.key) + private[preparation] lazy val hasDockerDefinition = + runtimeAttributeDefinitions.exists(_.name == DockerValidation.instance.key) startWith(Idle, JobPreparationActorNoData) - when(Idle) { - case Event(Start(valueStore), JobPreparationActorNoData) => - evaluateInputsAndAttributes(valueStore) match { - case Valid((inputs, attributes)) => fetchDockerHashesIfNecessary(inputs, attributes) - case Invalid(failure) => sendFailureAndStop(new MessageAggregation with NoStackTrace { - override def exceptionContext: String = s"Call input and runtime attributes evaluation failed for ${jobKey.call.localName}" + when(Idle) { case Event(Start(valueStore), JobPreparationActorNoData) => + evaluateInputsAndAttributes(valueStore) match { + case Valid((inputs, attributes)) => fetchDockerHashesIfNecessary(inputs, attributes) + case Invalid(failure) => + sendFailureAndStop(new MessageAggregation with NoStackTrace { + override def exceptionContext: String = + s"Call input and runtime attributes evaluation failed for ${jobKey.call.localName}" override def errorMessages: Iterable[String] = failure.toList }) - } + } } when(WaitingForDockerHash) { - case Event(DockerInfoSuccessResponse(DockerInformation(dockerHash, dockerSize), _), data: JobPreparationDockerLookupData) => + case Event(DockerInfoSuccessResponse(DockerInformation(dockerHash, dockerSize), _), + data: JobPreparationDockerLookupData + ) => handleDockerHashSuccess(dockerHash, dockerSize, data) case Event(WorkflowDockerLookupFailure(reason, _, _), data: JobPreparationDockerLookupData) => workflowLogger.warn("Docker lookup failed", reason) @@ -94,32 +106,45 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } when(FetchingKeyValueStoreEntries) { - case Event(kvResponse: KvResponse, data @ JobPreparationKeyLookupData(keyLookups, maybeCallCachingEligible, dockerSize, inputs, attributes)) => + case Event(kvResponse: KvResponse, + data @ JobPreparationKeyLookupData(keyLookups, maybeCallCachingEligible, dockerSize, inputs, attributes) + ) => keyLookups.withResponse(kvResponse.key, kvResponse) match { case newPartialLookup: PartialKeyValueLookups => stay() using data.copy(keyLookups = newPartialLookup) case finished: KeyValueLookupResults => - sendResponseAndStop(prepareBackendDescriptor(inputs, attributes, maybeCallCachingEligible, finished.unscoped, dockerSize)) + sendResponseAndStop( + prepareBackendDescriptor(inputs, attributes, maybeCallCachingEligible, finished.unscoped, dockerSize) + ) } } - whenUnhandled { - case Event(unexpectedMessage, _) => - workflowLogger.warn(s"JobPreparation actor received an unexpected message in state $stateName: $unexpectedMessage") - stay() + whenUnhandled { case Event(unexpectedMessage, _) => + workflowLogger.warn(s"JobPreparation actor received an unexpected message in state $stateName: $unexpectedMessage") + stay() } - private[preparation] lazy val kvStoreKeysToPrefetch: Seq[String] = factory.requestedKeyValueStoreKeys ++ factory.defaultKeyValueStoreKeys + private[preparation] lazy val kvStoreKeysToPrefetch: Seq[String] = + factory.requestedKeyValueStoreKeys ++ factory.defaultKeyValueStoreKeys private[preparation] def scopedKey(key: String) = ScopedKey(workflowDescriptor.id, KvJobKey(jobKey), key) private[preparation] def lookupKeyValueEntries(inputs: WomEvaluatedCallInputs, attributes: Map[LocallyQualifiedName, WomValue], maybeCallCachingEligible: MaybeCallCachingEligible, - dockerSize: Option[DockerSize]) = { + dockerSize: Option[DockerSize] + ) = { val keysToLookup = kvStoreKeysToPrefetch map scopedKey keysToLookup foreach { serviceRegistryActor ! KvGet(_) } - goto(FetchingKeyValueStoreEntries) using JobPreparationKeyLookupData(PartialKeyValueLookups(Map.empty, keysToLookup), maybeCallCachingEligible, dockerSize, inputs, attributes) + goto(FetchingKeyValueStoreEntries) using JobPreparationKeyLookupData( + PartialKeyValueLookups(Map.empty, keysToLookup), + maybeCallCachingEligible, + dockerSize, + inputs, + attributes + ) } - private [preparation] def evaluateInputsAndAttributes(valueStore: ValueStore): ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])] = { + private[preparation] def evaluateInputsAndAttributes( + valueStore: ValueStore + ): ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])] = { import common.validation.ErrorOr.{ShortCircuitingFlatMap, NestedErrorOr} for { evaluatedInputs <- ErrorOr(resolveAndEvaluateInputs(jobKey, expressionLanguageFunctions, valueStore)).flatten @@ -127,7 +152,9 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } yield (evaluatedInputs, runtimeAttributes) } - private def fetchDockerHashesIfNecessary(inputs: WomEvaluatedCallInputs, attributes: Map[LocallyQualifiedName, WomValue]) = { + private def fetchDockerHashesIfNecessary(inputs: WomEvaluatedCallInputs, + attributes: Map[LocallyQualifiedName, WomValue] + ) = { def sendDockerRequest(dockerImageId: DockerImageIdentifier) = { val dockerHashRequest = DockerInfoRequest(dockerImageId, dockerHashCredentials) val newData = JobPreparationDockerLookupData(dockerHashRequest, inputs, attributes) @@ -136,12 +163,17 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } def handleDockerValue(value: String) = DockerImageIdentifier.fromString(value) match { - // If the backend supports docker, lookup is enabled, and we got a tag - we need to lookup the hash - case Success(dockerImageId: DockerImageIdentifierWithoutHash) if hasDockerDefinition && DockerConfiguration.instance.enabled => + // If the backend supports docker, lookup is enabled, and we got a tag - we need to lookup the hash + case Success(dockerImageId: DockerImageIdentifierWithoutHash) + if hasDockerDefinition && DockerConfiguration.instance.enabled => sendDockerRequest(dockerImageId) - // If the backend supports docker, we got a tag but lookup is disabled, continue with no call caching and no hash + // If the backend supports docker, we got a tag but lookup is disabled, continue with no call caching and no hash case Success(dockerImageId: DockerImageIdentifierWithoutHash) if hasDockerDefinition => - lookupKvsOrBuildDescriptorAndStop(inputs, attributes, FloatingDockerTagWithoutHash(dockerImageId.fullName), None) + lookupKvsOrBuildDescriptorAndStop(inputs, + attributes, + FloatingDockerTagWithoutHash(dockerImageId.fullName), + None + ) // If the backend doesn't support docker - no need to lookup and we're ok for call caching case Success(_: DockerImageIdentifierWithoutHash) if !hasDockerDefinition => @@ -165,8 +197,9 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } private def updateRuntimeMemory(runtimeAttributes: Map[LocallyQualifiedName, WomValue], - memoryMultiplierOption: Option[Double]): Map[LocallyQualifiedName, WomValue] = { - def multiplyRuntimeMemory(multiplier: Double): Map[LocallyQualifiedName, WomValue] = { + memoryMultiplierOption: Option[Double] + ): Map[LocallyQualifiedName, WomValue] = { + def multiplyRuntimeMemory(multiplier: Double): Map[LocallyQualifiedName, WomValue] = runtimeAttributes.get(RuntimeAttributesKeys.MemoryKey) match { case Some(WomString(memory)) => MemorySize.parse(memory) match { @@ -177,7 +210,6 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } case _ => runtimeAttributes } - } memoryMultiplierOption match { case None => runtimeAttributes @@ -192,12 +224,16 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, private def lookupKvsOrBuildDescriptorAndStop(inputs: WomEvaluatedCallInputs, attributes: Map[LocallyQualifiedName, WomValue], maybeCallCachingEligible: MaybeCallCachingEligible, - dockerSize: Option[DockerSize]) = { + dockerSize: Option[DockerSize] + ) = if (kvStoreKeysToPrefetch.nonEmpty) lookupKeyValueEntries(inputs, attributes, maybeCallCachingEligible, dockerSize) - else sendResponseAndStop(prepareBackendDescriptor(inputs, attributes, maybeCallCachingEligible, Map.empty, dockerSize)) - } + else + sendResponseAndStop(prepareBackendDescriptor(inputs, attributes, maybeCallCachingEligible, Map.empty, dockerSize)) - private def handleDockerHashSuccess(dockerHashResult: DockerHashResult, dockerSize: Option[DockerSize], data: JobPreparationDockerLookupData) = { + private def handleDockerHashSuccess(dockerHashResult: DockerHashResult, + dockerSize: Option[DockerSize], + data: JobPreparationDockerLookupData + ) = { val hashValue = data.dockerHashRequest.dockerImageID match { case withoutHash: DockerImageIdentifierWithoutHash => withoutHash.withHash(dockerHashResult) case withHash => withHash @@ -207,13 +243,19 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } private def sendCompressedDockerSizeToMetadata(dockerSize: DockerSize) = { - val event = MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.CompressedDockerSize), MetadataValue(dockerSize.compressedSize)) + val event = MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.CompressedDockerSize), + MetadataValue(dockerSize.compressedSize) + ) serviceRegistryActor ! PutMetadataAction(event) } private def handleDockerHashFailed(data: JobPreparationDockerLookupData) = { val floatingDockerTag = data.dockerHashRequest.dockerImageID.fullName - lookupKvsOrBuildDescriptorAndStop(data.inputs, data.attributes, FloatingDockerTagWithoutHash(floatingDockerTag), None) + lookupKvsOrBuildDescriptorAndStop(data.inputs, + data.attributes, + FloatingDockerTagWithoutHash(floatingDockerTag), + None + ) } private def sendResponseAndStop(response: CallPreparationActorResponse) = { @@ -221,44 +263,70 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, stay() } - private def sendFailureAndStop(failure: Throwable) = { + private def sendFailureAndStop(failure: Throwable) = sendResponseAndStop(CallPreparationFailed(jobKey, failure)) - } // 'jobExecutionProps' is broken into a separate function for TestJobPreparationActor to override: private[preparation] def jobExecutionProps(jobDescriptor: BackendJobDescriptor, initializationData: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]) = factory.jobExecutionActorProps(jobDescriptor, initializationData, serviceRegistryActor, ioActor, backendSingletonActor) + backendSingletonActor: Option[ActorRef] + ) = factory.jobExecutionActorProps(jobDescriptor, + initializationData, + serviceRegistryActor, + ioActor, + backendSingletonActor + ) private[preparation] def prepareBackendDescriptor(inputEvaluation: WomEvaluatedCallInputs, runtimeAttributes: Map[LocallyQualifiedName, WomValue], maybeCallCachingEligible: MaybeCallCachingEligible, prefetchedJobStoreEntries: Map[String, KvResponse], - dockerSize: Option[DockerSize]): BackendJobPreparationSucceeded = { + dockerSize: Option[DockerSize] + ): BackendJobPreparationSucceeded = { val memoryMultiplier: Option[Double] = prefetchedJobStoreEntries.get(MemoryMultiplierKey) match { - case Some(KvPair(_,v)) => Try(v.toDouble) match { - case Success(m) => Option(m) - case Failure(e) => - // should not happen as we are converting a value that Cromwell put in DB after validation - log.error(e, s"Programmer error: unexpected failure attempting to convert value of MemoryMultiplierKey from JOB_KEY_VALUE_ENTRY table to Double.") - None - } + case Some(KvPair(_, v)) => + Try(v.toDouble) match { + case Success(m) => Option(m) + case Failure(e) => + // should not happen as we are converting a value that Cromwell put in DB after validation + log.error( + e, + s"Programmer error: unexpected failure attempting to convert value of MemoryMultiplierKey from JOB_KEY_VALUE_ENTRY table to Double." + ) + None + } case _ => None } val updatedRuntimeAttributes = updateRuntimeMemory(runtimeAttributes, memoryMultiplier) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor.backendDescriptor, jobKey, updatedRuntimeAttributes, inputEvaluation, maybeCallCachingEligible, dockerSize, prefetchedJobStoreEntries) - BackendJobPreparationSucceeded(jobDescriptor, jobExecutionProps(jobDescriptor, initializationData, serviceRegistryActor, ioActor, backendSingletonActor)) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor.backendDescriptor, + jobKey, + updatedRuntimeAttributes, + inputEvaluation, + maybeCallCachingEligible, + dockerSize, + prefetchedJobStoreEntries + ) + BackendJobPreparationSucceeded( + jobDescriptor, + jobExecutionProps(jobDescriptor, initializationData, serviceRegistryActor, ioActor, backendSingletonActor) + ) } - private [preparation] def prepareRuntimeAttributes(inputEvaluation: Map[InputDefinition, WomValue]): ErrorOr[Map[LocallyQualifiedName, WomValue]] = { + private[preparation] def prepareRuntimeAttributes( + inputEvaluation: Map[InputDefinition, WomValue] + ): ErrorOr[Map[LocallyQualifiedName, WomValue]] = { import RuntimeAttributeDefinition.{addDefaultsToAttributes, evaluateRuntimeAttributes} - val curriedAddDefaultsToAttributes = addDefaultsToAttributes(runtimeAttributeDefinitions, workflowDescriptor.backendDescriptor.workflowOptions) _ + val curriedAddDefaultsToAttributes = + addDefaultsToAttributes(runtimeAttributeDefinitions, workflowDescriptor.backendDescriptor.workflowOptions) _ val unevaluatedRuntimeAttributes = jobKey.call.callable.runtimeAttributes - evaluateRuntimeAttributes(unevaluatedRuntimeAttributes, expressionLanguageFunctions, inputEvaluation) map curriedAddDefaultsToAttributes + evaluateRuntimeAttributes(unevaluatedRuntimeAttributes, + expressionLanguageFunctions, + inputEvaluation + ) map curriedAddDefaultsToAttributes } } @@ -266,14 +334,16 @@ object JobPreparationActor { sealed trait JobPreparationActorData case object JobPreparationActorNoData extends JobPreparationActorData - private final case class JobPreparationKeyLookupData(keyLookups: PartialKeyValueLookups, + final private case class JobPreparationKeyLookupData(keyLookups: PartialKeyValueLookups, maybeCallCachingEligible: MaybeCallCachingEligible, dockerSize: Option[DockerSize], inputs: WomEvaluatedCallInputs, - attributes: Map[LocallyQualifiedName, WomValue]) extends JobPreparationActorData - private final case class JobPreparationDockerLookupData(dockerHashRequest: DockerInfoRequest, + attributes: Map[LocallyQualifiedName, WomValue] + ) extends JobPreparationActorData + final private case class JobPreparationDockerLookupData(dockerHashRequest: DockerInfoRequest, inputs: WomEvaluatedCallInputs, - attributes: Map[LocallyQualifiedName, WomValue]) extends JobPreparationActorData + attributes: Map[LocallyQualifiedName, WomValue] + ) extends JobPreparationActorData sealed trait JobPreparationActorState case object Idle extends JobPreparationActorState @@ -287,16 +357,20 @@ object JobPreparationActor { initializationData: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]) = { + backendSingletonActor: Option[ActorRef] + ) = // Note that JobPreparationActor doesn't run on the engine dispatcher as it mostly executes backend-side code // (WDL expression evaluation using Backend's expressionLanguageFunctions) - Props(new JobPreparationActor(workflowDescriptor, - jobKey, - factory, - workflowDockerLookupActor = workflowDockerLookupActor, - initializationData, - serviceRegistryActor = serviceRegistryActor, - ioActor = ioActor, - backendSingletonActor = backendSingletonActor)).withDispatcher(EngineDispatcher) - } + Props( + new JobPreparationActor( + workflowDescriptor, + jobKey, + factory, + workflowDockerLookupActor = workflowDockerLookupActor, + initializationData, + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor, + backendSingletonActor = backendSingletonActor + ) + ).withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/KeyValueLookups.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/KeyValueLookups.scala index f4d5786223b..73014807d26 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/KeyValueLookups.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/KeyValueLookups.scala @@ -5,9 +5,11 @@ import cromwell.services.keyvalue.KeyValueServiceActor.{KvResponse, ScopedKey} /** * Handles the determination of when we know key lookups are successful. */ -private sealed trait KeyValueLookups +sealed private trait KeyValueLookups -private[preparation] final case class PartialKeyValueLookups(responses: Map[ScopedKey, KvResponse], awaiting: Seq[ScopedKey]) { +final private[preparation] case class PartialKeyValueLookups(responses: Map[ScopedKey, KvResponse], + awaiting: Seq[ScopedKey] +) { def withResponse(key: ScopedKey, response: KvResponse) = { val newResponses = responses + (key -> response) val newAwaiting = awaiting diff List(key) @@ -19,6 +21,6 @@ private[preparation] final case class PartialKeyValueLookups(responses: Map[Scop } } -private final case class KeyValueLookupResults(values: Map[ScopedKey, KvResponse]) { +final private case class KeyValueLookupResults(values: Map[ScopedKey, KvResponse]) { def unscoped: Map[String, KvResponse] = values map { case (k, v) => k.key -> v } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala index f14c00965d7..8165b13128b 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala @@ -20,7 +20,9 @@ import wom.values.{WomEvaluatedCallInputs, WomValue} class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, expressionLanguageFunctions: EngineIoFunctions, callKey: SubWorkflowKey, - subWorkflowId: WorkflowId) extends Actor with WorkflowLogging { + subWorkflowId: WorkflowId +) extends Actor + with WorkflowLogging { override lazy val workflowIdForLogging = workflowDescriptor.possiblyNotRootWorkflowId override lazy val rootWorkflowIdForLogging = workflowDescriptor.rootWorkflowId @@ -33,9 +35,13 @@ class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, id = subWorkflowId, callable = callKey.node.callable, knownValues = startingValues, - breadCrumbs = oldBackendDescriptor.breadCrumbs :+ BackendJobBreadCrumb(workflowDescriptor.callable, workflowDescriptor.id, callKey) + breadCrumbs = oldBackendDescriptor.breadCrumbs :+ BackendJobBreadCrumb(workflowDescriptor.callable, + workflowDescriptor.id, + callKey + ) ) - val engineDescriptor = workflowDescriptor.copy(backendDescriptor = newBackendDescriptor, parentWorkflow = Option(workflowDescriptor)) + val engineDescriptor = + workflowDescriptor.copy(backendDescriptor = newBackendDescriptor, parentWorkflow = Option(workflowDescriptor)) SubWorkflowPreparationSucceeded(engineDescriptor, inputEvaluation) } } @@ -43,7 +49,9 @@ class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, /** * Work out a set of "workflow inputs" to pass to this subworkflow as though it were a top-level workflow receiving inputs */ - private def evaluateStartingKnownValues(inputEvaluation: WomEvaluatedCallInputs, workflowInputs: Set[GraphInputNode]): ErrorOr[Map[OutputPort, WomValue]] = { + private def evaluateStartingKnownValues(inputEvaluation: WomEvaluatedCallInputs, + workflowInputs: Set[GraphInputNode] + ): ErrorOr[Map[OutputPort, WomValue]] = { // Find the values in the provided inputs that match up with subworkflow inputs. Silently drop the rest on the floor. val providedInputs: Map[OutputPort, WomValue] = inputEvaluation.toList.flatMap { case (name, value) => @@ -59,9 +67,14 @@ class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } // Make sure the subworkflow will be getting a value for every required input: - NonEmptyList.fromList((workflowInputs.toSet[GraphNode] diff providedInputs.keySet.map(_.graphNode) diff optionalsAndDefaults).toList) match { + NonEmptyList.fromList( + (workflowInputs.toSet[GraphNode] diff providedInputs.keySet.map(_.graphNode) diff optionalsAndDefaults).toList + ) match { case None => Valid(providedInputs) - case Some(missingNodeNel) => Invalid(missingNodeNel map (n => s"Couldn't find starting value for subworkflow input: ${n.identifier.localName}")) + case Some(missingNodeNel) => + Invalid( + missingNodeNel map (n => s"Couldn't find starting value for subworkflow input: ${n.identifier.localName}") + ) } } @@ -71,10 +84,14 @@ class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, import common.validation.ErrorOr._ evaluatedInputs.flatMap(prepareExecutionActor) match { case Valid(response) => context.parent ! response - case Invalid(f) => context.parent ! CallPreparationFailed(callKey, new MessageAggregation { - override def exceptionContext: String = "Failed to evaluate inputs for sub workflow" - override def errorMessages: Iterable[String] = f.toList - }) + case Invalid(f) => + context.parent ! CallPreparationFailed( + callKey, + new MessageAggregation { + override def exceptionContext: String = "Failed to evaluate inputs for sub workflow" + override def errorMessages: Iterable[String] = f.toList + } + ) } context stop self @@ -83,14 +100,17 @@ class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, } object SubWorkflowPreparationActor { - case class SubWorkflowPreparationSucceeded(workflowDescriptor: EngineWorkflowDescriptor, inputs: WomEvaluatedCallInputs) extends CallPreparationActorResponse + case class SubWorkflowPreparationSucceeded(workflowDescriptor: EngineWorkflowDescriptor, + inputs: WomEvaluatedCallInputs + ) extends CallPreparationActorResponse def props(workflowDescriptor: EngineWorkflowDescriptor, expressionLanguageFunctions: EngineIoFunctions, key: SubWorkflowKey, - subWorkflowId: WorkflowId) = { + subWorkflowId: WorkflowId + ) = // Note that JobPreparationActor doesn't run on the engine dispatcher as it mostly executes backend-side code // (WDL expression evaluation using Backend's expressionLanguageFunctions) - Props(new SubWorkflowPreparationActor(workflowDescriptor, expressionLanguageFunctions, key, subWorkflowId)).withDispatcher(EngineDispatcher) - } + Props(new SubWorkflowPreparationActor(workflowDescriptor, expressionLanguageFunctions, key, subWorkflowId)) + .withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalCollectorKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalCollectorKey.scala index 8645ce0f6a5..244c1dba99b 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalCollectorKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalCollectorKey.scala @@ -11,19 +11,19 @@ import wom.graph.GraphNodePort.ConditionalOutputPort * Key that becomes runnable when a node inside a conditional node is complete. * This is needed so that the ConditionalOutputPort of the conditional can be given a value. */ -private [execution] case class ConditionalCollectorKey(conditionalOutputPort: ConditionalOutputPort, - index: ExecutionIndex) extends JobKey { +private[execution] case class ConditionalCollectorKey(conditionalOutputPort: ConditionalOutputPort, + index: ExecutionIndex +) extends JobKey { val outputNodeToCollect: PortBasedGraphOutputNode = conditionalOutputPort.outputToExpose override val node: GraphNode = conditionalOutputPort.outputToExpose override val attempt = 1 override val tag = s"Collector-${node.localName}" - def processRunnable(data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = { + def processRunnable(data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = data.valueStore.collectConditional(this) map { outputs => WorkflowExecutionDiff( executionStoreChanges = Map(this -> ExecutionStatus.Done), valueStoreAdditions = outputs ) } - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalKey.scala index a263601f488..7cc18c3b964 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ConditionalKey.scala @@ -16,7 +16,7 @@ import wom.values.{WomBoolean, WomValue} * Represents a conditional node in the execution store. * Runnable when the associated expression (represented by an expression node in the graph) is done. */ -private [execution] case class ConditionalKey(node: ConditionalNode, index: ExecutionIndex) extends JobKey { +private[execution] case class ConditionalKey(node: ConditionalNode, index: ExecutionIndex) extends JobKey { override val tag = node.localName override val attempt = 1 @@ -30,10 +30,10 @@ private [execution] case class ConditionalKey(node: ConditionalNode, index: Exec * @return ExecutionStore of scattered children. */ def populate(bypassed: Boolean): Map[JobKey, ExecutionStatus.Value] = { - val conditionalKeys = node.innerGraph.nodes.flatMap({ node => keyify(node) }) + val conditionalKeys = node.innerGraph.nodes.flatMap(node => keyify(node)) val finalStatus = if (bypassed) ExecutionStatus.NotStarted else ExecutionStatus.Bypassed - (conditionalKeys ++ collectors).map({ _ -> finalStatus }).toMap + (conditionalKeys ++ collectors).map(_ -> finalStatus).toMap } /** @@ -48,12 +48,16 @@ private [execution] case class ConditionalKey(node: ConditionalNode, index: Exec case _: GraphInputNode => None case _: PortBasedGraphOutputNode => None case _: ScatterNode => - throw new UnsupportedOperationException("Nested Scatters are not supported (yet) ... but you might try a sub workflow to achieve the same effect!") + throw new UnsupportedOperationException( + "Nested Scatters are not supported (yet) ... but you might try a sub workflow to achieve the same effect!" + ) case e => throw new UnsupportedOperationException(s"Scope ${e.getClass.getName} is not supported in an If block.") } - def processRunnable(data: WorkflowExecutionActorData, workflowLogger: WorkflowLogger): ErrorOr[WorkflowExecutionDiff] = { + def processRunnable(data: WorkflowExecutionActorData, + workflowLogger: WorkflowLogger + ): ErrorOr[WorkflowExecutionDiff] = { // This is the output port from the conditional's 'condition' input: val conditionOutputPort = node.conditionExpression.singleOutputPort data.valueStore.get(conditionOutputPort, index) match { @@ -64,9 +68,9 @@ private [execution] case class ConditionalKey(node: ConditionalNode, index: Exec node.conditionalOutputPorts.map(op => ValueKey(op, index) -> op.womType.none).toMap } else Map.empty - WorkflowExecutionDiff( - executionStoreChanges = populate(b.value) + (this -> conditionalStatus), - valueStoreAdditions = valueStoreAdditions).validNel + WorkflowExecutionDiff(executionStoreChanges = populate(b.value) + (this -> conditionalStatus), + valueStoreAdditions = valueStoreAdditions + ).validNel case Some(v: WomValue) => s"'if' condition ${node.conditionExpression.womExpression.sourceString} must evaluate to a boolean but instead got ${v.womType.stableName}".invalidNel case None => diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ExpressionKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ExpressionKey.scala index 5978ec8c254..5fd8e36cac5 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ExpressionKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ExpressionKey.scala @@ -7,7 +7,10 @@ import common.validation.Validation._ import cromwell.core.ExecutionIndex._ import cromwell.core.{ExecutionStatus, JobKey} import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionDiff -import cromwell.engine.workflow.lifecycle.execution.keys.ExpressionKey.{ExpressionEvaluationFailedResponse, ExpressionEvaluationSucceededResponse} +import cromwell.engine.workflow.lifecycle.execution.keys.ExpressionKey.{ + ExpressionEvaluationFailedResponse, + ExpressionEvaluationSucceededResponse +} import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore import wom.expression.IoFunctionSet import wom.graph.GraphNodePort.OutputPort @@ -18,14 +21,17 @@ final case class ExpressionKey(node: ExpressionNodeLike, index: ExecutionIndex) override val attempt = 1 override lazy val tag = s"Expression-${node.localName}:${index.fromIndex}:$attempt" - def processRunnable(ioFunctionSet: IoFunctionSet, valueStore: ValueStore, workflowExecutionActor: ActorRef): ErrorOr[WorkflowExecutionDiff] = { + def processRunnable(ioFunctionSet: IoFunctionSet, + valueStore: ValueStore, + workflowExecutionActor: ActorRef + ): ErrorOr[WorkflowExecutionDiff] = { // Send a message to self in case we decide to change evaluate to return asynchronously, if we don't we could // directly add the value to the value store in the execution diff node .evaluate(valueStore.resolve(index), ioFunctionSet) .contextualizeErrors(s"evaluate '${node.fullyQualifiedName}'") match { case Right(result) => workflowExecutionActor ! ExpressionEvaluationSucceededResponse(this, result) - case Left(f) => + case Left(f) => workflowExecutionActor ! ExpressionEvaluationFailedResponse(this, new RuntimeException(f.toList.mkString(", "))) } @@ -34,6 +40,8 @@ final case class ExpressionKey(node: ExpressionNodeLike, index: ExecutionIndex) } object ExpressionKey { - private [execution] case class ExpressionEvaluationSucceededResponse(expressionKey: ExpressionKey, values: Map[OutputPort, WomValue]) - private [execution] case class ExpressionEvaluationFailedResponse(expressionKey: ExpressionKey, reason: Throwable) + private[execution] case class ExpressionEvaluationSucceededResponse(expressionKey: ExpressionKey, + values: Map[OutputPort, WomValue] + ) + private[execution] case class ExpressionEvaluationFailedResponse(expressionKey: ExpressionKey, reason: Throwable) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterCollectorKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterCollectorKey.scala index 353971bd652..aa50be93144 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterCollectorKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterCollectorKey.scala @@ -11,21 +11,21 @@ import wom.graph.{GraphNode, PortBasedGraphOutputNode} * Key that becomes runnable when all shards of a collectible node are complete and need to be collected to form the output of this * call outside the scatter block. */ -private [execution] case class ScatterCollectorKey(scatterGatherPort: ScatterGathererPort, - scatterWidth: Int, - scatterCollectionFunction: ScatterCollectionFunction) extends JobKey { +private[execution] case class ScatterCollectorKey(scatterGatherPort: ScatterGathererPort, + scatterWidth: Int, + scatterCollectionFunction: ScatterCollectionFunction +) extends JobKey { val outputNodeToGather: PortBasedGraphOutputNode = scatterGatherPort.outputToGather override val node: GraphNode = outputNodeToGather override val index = None override val attempt = 1 override val tag = s"Collector-${node.localName}" - def processRunnable(data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = { + def processRunnable(data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = data.valueStore.collectShards(this) map { outputs => WorkflowExecutionDiff( executionStoreChanges = Map(this -> ExecutionStatus.Done), valueStoreAdditions = outputs ) } - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterKey.scala index b3e4529b9d3..0bd8a14f3a3 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterKey.scala @@ -20,18 +20,19 @@ import wom.values.WomValue import scala.language.postfixOps -private [execution] case class ScatterKey(node: ScatterNode) extends JobKey { +private[execution] case class ScatterKey(node: ScatterNode) extends JobKey { // When scatters are nested, this might become Some(_) override val index = None override val attempt = 1 override val tag = node.localName - def makeCollectors(count: Int, scatterCollectionFunction: ScatterCollectionFunction): Set[ScatterCollectorKey] = (node.outputMapping.groupBy(_.outputToGather.source.graphNode) flatMap { - case (_: CallNode | _: ExposedExpressionNode | _: ConditionalNode, scatterGatherPorts) => - scatterGatherPorts.map(sgp => ScatterCollectorKey(sgp, count, scatterCollectionFunction)) - case _ => Set.empty[ScatterCollectorKey] - }).toSet + def makeCollectors(count: Int, scatterCollectionFunction: ScatterCollectionFunction): Set[ScatterCollectorKey] = + (node.outputMapping.groupBy(_.outputToGather.source.graphNode) flatMap { + case (_: CallNode | _: ExposedExpressionNode | _: ConditionalNode, scatterGatherPorts) => + scatterGatherPorts.map(sgp => ScatterCollectorKey(sgp, count, scatterCollectionFunction)) + case _ => Set.empty[ScatterCollectorKey] + }).toSet /** * Creates a sub-ExecutionStore with Starting entries for each of the scoped children. @@ -54,12 +55,17 @@ private [execution] case class ScatterKey(node: ScatterNode) extends JobKey { case _: GraphInputNode => List.empty case _: PortBasedGraphOutputNode => List.empty case _: ScatterNode => - throw new UnsupportedOperationException("Nested Scatters are not supported (yet) ... but you might try a sub workflow to achieve the same effect!") + throw new UnsupportedOperationException( + "Nested Scatters are not supported (yet) ... but you might try a sub workflow to achieve the same effect!" + ) case e => throw new UnsupportedOperationException(s"Scope ${e.getClass.getName} is not supported.") } - def processRunnable(data: WorkflowExecutionActorData, workflowExecutionActor: ActorRef, maxScatterWidth: Int): ErrorOr[WorkflowExecutionDiff] = { + def processRunnable(data: WorkflowExecutionActorData, + workflowExecutionActor: ActorRef, + maxScatterWidth: Int + ): ErrorOr[WorkflowExecutionDiff] = { import cats.syntax.traverse._ def getScatterArray(scatterVariableNode: ScatterVariableNode): ErrorOr[ScatterVariableAndValue] = { @@ -84,26 +90,29 @@ private [execution] case class ScatterKey(node: ScatterNode) extends JobKey { // Execution changes (for execution store and value store) generated by the scatter iteration nodes def buildExecutionChanges(scatterVariableAndValues: List[ScatterVariableAndValue]) = { - val (executionStoreChanges, valueStoreChanges) = scatterVariableAndValues.map({ + val (executionStoreChanges, valueStoreChanges) = scatterVariableAndValues.map { case ScatterVariableAndValue(scatterVariableNode, arrayValue) => val executionStoreChange = ScatterVariableInputKey(scatterVariableNode, arrayValue) -> ExecutionStatus.Done val valueStoreChange = ValueKey(scatterVariableNode.singleOutputPort, None) -> arrayValue executionStoreChange -> valueStoreChange - }).unzip + }.unzip executionStoreChanges.toMap -> valueStoreChanges.toMap } // Checks the scatter width of a scatter node and builds WorkflowExecutionDiff accordingly // If scatter width is more than max allowed limit, it fails the ScatterNode key - def buildExecutionDiff(scatterSize: Int, arrays: List[ScatterVariableAndValue]): WorkflowExecutionDiff = { - if(scatterSize > maxScatterWidth) { - workflowExecutionActor ! JobFailedNonRetryableResponse(this, new Exception(s"Workflow scatter width of $scatterSize exceeds configured limit of $maxScatterWidth."), None) + def buildExecutionDiff(scatterSize: Int, arrays: List[ScatterVariableAndValue]): WorkflowExecutionDiff = + if (scatterSize > maxScatterWidth) { + workflowExecutionActor ! JobFailedNonRetryableResponse( + this, + new Exception(s"Workflow scatter width of $scatterSize exceeds configured limit of $maxScatterWidth."), + None + ) WorkflowExecutionDiff(Map(this -> ExecutionStatus.Failed)) - } - else { + } else { val (scatterVariablesExecutionChanges, valueStoreChanges) = buildExecutionChanges(arrays) val executionStoreChanges = populate( scatterSize, @@ -115,8 +124,6 @@ private [execution] case class ScatterKey(node: ScatterNode) extends JobKey { valueStoreAdditions = valueStoreChanges ) } - } - (for { arrays <- scatterArraysValuesCheck diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterVariableInputKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterVariableInputKey.scala index 202e1e2db7b..6480c643ea0 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterVariableInputKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatterVariableInputKey.scala @@ -4,7 +4,7 @@ import cromwell.core.JobKey import wom.graph.GraphInputNode import wom.values.WomArray.WomArrayLike -private [execution] case class ScatterVariableInputKey(node: GraphInputNode, womArrayLike: WomArrayLike) extends JobKey { +private[execution] case class ScatterVariableInputKey(node: GraphInputNode, womArrayLike: WomArrayLike) extends JobKey { override def index: Option[Int] = None override def attempt: Int = 1 override def tag: String = node.localName diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatteredCallCompletionKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatteredCallCompletionKey.scala index c58982e807c..e418cbe1d43 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatteredCallCompletionKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/ScatteredCallCompletionKey.scala @@ -9,15 +9,13 @@ import wom.graph.{CallNode, GraphNode} /** * Key that should become runnable when all shards of a scattered call are complete. */ -private [execution] case class ScatteredCallCompletionKey(call: CallNode, - scatterWidth: Int) extends JobKey { +private[execution] case class ScatteredCallCompletionKey(call: CallNode, scatterWidth: Int) extends JobKey { override val node: GraphNode = call override val index = None override val attempt = 1 override val totalIndices = scatterWidth override val tag = s"CallCompletion-${node.localName}" - def processRunnable(data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = { + def processRunnable(data: WorkflowExecutionActorData): ErrorOr[WorkflowExecutionDiff] = WorkflowExecutionDiff(executionStoreChanges = Map(this -> ExecutionStatus.Done)).validNel - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/SubWorkflowKey.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/SubWorkflowKey.scala index 44c2f0a57e2..f66f7f2fade 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/SubWorkflowKey.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/SubWorkflowKey.scala @@ -4,6 +4,7 @@ import cromwell.core.CallKey import cromwell.core.ExecutionIndex._ import wom.graph.WorkflowCallNode -private [execution] case class SubWorkflowKey(node: WorkflowCallNode, index: ExecutionIndex, attempt: Int) extends CallKey { +private[execution] case class SubWorkflowKey(node: WorkflowCallNode, index: ExecutionIndex, attempt: Int) + extends CallKey { override val tag = s"SubWorkflow-${node.localName}:${index.fromIndex}:$attempt" } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/package.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/package.scala index 95d00892c4a..4ddef0c980d 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/package.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/keys/package.scala @@ -15,9 +15,8 @@ package object keys { ).validNel } - private def bypassedScopeResults(jobKey: JobKey): Map[ValueKey, WomOptionalValue] = { - jobKey.node.outputPorts.map({ outputPort => - ValueKey(outputPort, jobKey.index) -> WomOptionalValue.none(outputPort.womType) - }).toMap - } + private def bypassedScopeResults(jobKey: JobKey): Map[ValueKey, WomOptionalValue] = + jobKey.node.outputPorts.map { outputPort => + ValueKey(outputPort, jobKey.index) -> WomOptionalValue.none(outputPort.womType) + }.toMap } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala index 32d9aa2ca4c..e6445ffda23 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala @@ -25,6 +25,7 @@ object ExecutionStore { val MaxJobsToStartPerTick = 1000 implicit class EnhancedJobKey(val key: JobKey) extends AnyVal { + /** * Given a StatusStable, return true if all dependencies of this key are in the table (and therefore are in this status), * false otherwise. @@ -42,26 +43,30 @@ object ExecutionStore { case scatterCollector: ScatterCollectorKey => // The outputToGather is the PortBasedGraphOutputNode of the inner graph that we're collecting. Go one step upstream and then // find the node which will have entries in the execution store. If that has 'n' entries, then we're good to start collecting, - statusTable.row(scatterCollector.outputNodeToGather.singleUpstreamPort.executionNode).size == scatterCollector.scatterWidth + statusTable + .row(scatterCollector.outputNodeToGather.singleUpstreamPort.executionNode) + .size == scatterCollector.scatterWidth case conditionalCollector: ConditionalCollectorKey => val upstreamPort = conditionalCollector.outputNodeToCollect.singleUpstreamPort upstreamPort.executionNode.isInStatus(chooseIndex(upstreamPort), statusTable) // In the general case, the dependencies are held by the upstreamPorts case _ => key.node.upstreamPorts forall { p => - p.executionNode.isInStatus(chooseIndex(p), statusTable) + p.executionNode.isInStatus(chooseIndex(p), statusTable) } } } def nonStartableOutputKeys: Set[JobKey] = key match { - case scatterKey: ScatterKey => scatterKey.makeCollectors(0, scatterKey.node.scatterCollectionFunctionBuilder(List.empty)).toSet[JobKey] + case scatterKey: ScatterKey => + scatterKey.makeCollectors(0, scatterKey.node.scatterCollectionFunctionBuilder(List.empty)).toSet[JobKey] case conditionalKey: ConditionalKey => conditionalKey.collectors.toSet[JobKey] case _ => Set.empty[JobKey] } } implicit class EnhancedOutputPort(val outputPort: OutputPort) extends AnyVal { + /** * Node that should be considered to determine upstream dependencies */ @@ -78,18 +83,25 @@ object ExecutionStore { case svn: ScatterVariableNode => table.contains(svn.linkToOuterGraph.graphNode, None) // OuterGraphInputNodes signal that an input comes from outside the graph. // Depending on whether or not this input is outside of a scatter graph will change the index which we need to look at - case ogin: OuterGraphInputNode if !ogin.preserveScatterIndex => ogin.linkToOuterGraph.executionNode.isInStatus(None, table) + case ogin: OuterGraphInputNode if !ogin.preserveScatterIndex => + ogin.linkToOuterGraph.executionNode.isInStatus(None, table) case ogin: OuterGraphInputNode => ogin.linkToOuterGraph.executionNode.isInStatus(index, table) case _: GraphInputNode => true case _ => table.contains(graphNode, index) } } - case class ExecutionStoreUpdate(runnableKeys: List[JobKey], updatedStore: ExecutionStore, statusChanges: Map[JobKey, ExecutionStatus]) + case class ExecutionStoreUpdate(runnableKeys: List[JobKey], + updatedStore: ExecutionStore, + statusChanges: Map[JobKey, ExecutionStatus] + ) def empty = ActiveExecutionStore(Map.empty[JobKey, ExecutionStatus], needsUpdate = false) - def apply(callable: ExecutableCallable, totalJobsByRootWf: AtomicInteger, totalMaxJobsPerRootWf: Int): ErrorOr[ActiveExecutionStore] = { + def apply(callable: ExecutableCallable, + totalJobsByRootWf: AtomicInteger, + totalMaxJobsPerRootWf: Int + ): ErrorOr[ActiveExecutionStore] = { // Keys that are added in a NotStarted Status val notStartedKeys = callable.graph.nodes collect { case call: CommandCallNode => BackendJobDescriptorKey(call, None, attempt = 1) @@ -118,21 +130,25 @@ object ExecutionStore { /** * Execution store in its nominal state */ -final case class ActiveExecutionStore private[stores](private val statusStore: Map[JobKey, ExecutionStatus], override val needsUpdate: Boolean) extends ExecutionStore(statusStore, needsUpdate) { +final case class ActiveExecutionStore private[stores] (private val statusStore: Map[JobKey, ExecutionStatus], + override val needsUpdate: Boolean +) extends ExecutionStore(statusStore, needsUpdate) { override def toString: String = { import io.circe.syntax._ import io.circe.Printer - statusStore.map { - case (k, v) if k.isShard => s"${k.node.fullyQualifiedName}:${k.index.get}" -> v.toString - case (k, v) => k.node.fullyQualifiedName -> v.toString - }.asJson.printWith(Printer.spaces2.copy(dropNullValues = true, colonLeft = "")) + statusStore + .map { + case (k, v) if k.isShard => s"${k.node.fullyQualifiedName}:${k.index.get}" -> v.toString + case (k, v) => k.node.fullyQualifiedName -> v.toString + } + .asJson + .printWith(Printer.spaces2.copy(dropNullValues = true, colonLeft = "")) } - override def updateKeys(values: Map[JobKey, ExecutionStatus], needsUpdate: Boolean): ActiveExecutionStore = { + override def updateKeys(values: Map[JobKey, ExecutionStatus], needsUpdate: Boolean): ActiveExecutionStore = this.copy(statusStore = statusStore ++ values, needsUpdate = needsUpdate) - } override def seal: SealedExecutionStore = SealedExecutionStore(statusStore.filterNot(_._2 == NotStarted), needsUpdate) override def withNeedsUpdateFalse: ExecutionStore = if (!needsUpdate) this else this.copy(needsUpdate = false) override def withNeedsUpdateTrue: ExecutionStore = if (needsUpdate) this else this.copy(needsUpdate = true) @@ -142,12 +158,13 @@ final case class ActiveExecutionStore private[stores](private val statusStore: M * Execution store when the workflow is in either Failing or Aborting state. Keys in NotStarted state have been removed and * no new NotStarted key can be added. Other statuses can still be updated. */ -final case class SealedExecutionStore private[stores](private val statusStore: Map[JobKey, ExecutionStatus], override val needsUpdate: Boolean) extends ExecutionStore(statusStore, false) { +final case class SealedExecutionStore private[stores] (private val statusStore: Map[JobKey, ExecutionStatus], + override val needsUpdate: Boolean +) extends ExecutionStore(statusStore, false) { - override def updateKeys(values: Map[JobKey, ExecutionStatus], needsUpdate: Boolean): SealedExecutionStore = { + override def updateKeys(values: Map[JobKey, ExecutionStatus], needsUpdate: Boolean): SealedExecutionStore = // Don't allow NotStarted keys in sealed mode this.copy(statusStore = statusStore ++ values.filterNot(_._2 == NotStarted), needsUpdate = needsUpdate) - } override def seal: SealedExecutionStore = this override def withNeedsUpdateFalse: ExecutionStore = this.copy(needsUpdate = false) override def withNeedsUpdateTrue: ExecutionStore = this.copy(needsUpdate = true) @@ -162,13 +179,14 @@ final case class SealedExecutionStore private[stores](private val statusStore: M * when true, something happened since the last update that could yield new runnable keys, so update should be called * when false, nothing happened between the last update and now that will yield different results so no need to call the update method */ -sealed abstract class ExecutionStore private[stores](statusStore: Map[JobKey, ExecutionStatus], val needsUpdate: Boolean) { +sealed abstract class ExecutionStore private[stores] (statusStore: Map[JobKey, ExecutionStatus], + val needsUpdate: Boolean +) { // View of the statusStore more suited for lookup based on status lazy val store: Map[ExecutionStatus, List[JobKey]] = statusStore.groupBy(_._2).safeMapValues(_.keys.toList) - def backendJobDescriptorKeyForNode(node: GraphNode): Option[BackendJobDescriptorKey] = { + def backendJobDescriptorKeyForNode(node: GraphNode): Option[BackendJobDescriptorKey] = statusStore.keys collectFirst { case k: BackendJobDescriptorKey if k.node eq node => k } - } /** * Number of queued jobs @@ -183,10 +201,9 @@ sealed abstract class ExecutionStore private[stores](statusStore: Map[JobKey, Ex /** * Update key statuses */ - def updateKeys(values: Map[JobKey, ExecutionStatus]): ExecutionStore = { + def updateKeys(values: Map[JobKey, ExecutionStatus]): ExecutionStore = // The store might newly need updating now if a job has completed because downstream jobs might now be runnable updateKeys(values, needsUpdate || values.values.exists(_.isTerminalOrRetryable)) - } /** * Returns a SealedExecutionStore: all NotStarted keys will be removed and no new NotStarted keys can be added after that @@ -204,23 +221,23 @@ sealed abstract class ExecutionStore private[stores](statusStore: Map[JobKey, Ex protected def withNeedsUpdateFalse: ExecutionStore /* - * Create 2 Tables, one for keys in done status and one for keys in terminal status. - * A Table is nothing more than a Map[R, Map[C, V]], see Table trait for more details - * In this case, rows are GraphNodes, columns are ExecutionIndexes, and values are JobKeys - * This allows for quick lookup of all shards for a node, as well as accessing a specific key with a - * (node, index) pair + * Create 2 Tables, one for keys in done status and one for keys in terminal status. + * A Table is nothing more than a Map[R, Map[C, V]], see Table trait for more details + * In this case, rows are GraphNodes, columns are ExecutionIndexes, and values are JobKeys + * This allows for quick lookup of all shards for a node, as well as accessing a specific key with a + * (node, index) pair */ lazy val (doneStatus, terminalStatus) = { def toTableEntry(key: JobKey) = (key.node, key.index, key) - store.foldLeft((Table.empty[GraphNode, ExecutionIndex, JobKey], Table.empty[GraphNode, ExecutionIndex, JobKey]))({ - case ((done, terminal), (status, keys)) => + store.foldLeft((Table.empty[GraphNode, ExecutionIndex, JobKey], Table.empty[GraphNode, ExecutionIndex, JobKey])) { + case ((done, terminal), (status, keys)) => lazy val newMapEntries = keys map toTableEntry val newDone = if (status.isDoneOrBypassed) done.addAll(newMapEntries) else done val newTerminal = if (status.isTerminal) terminal.addAll(newMapEntries) else terminal newDone -> newTerminal - }) + } } private def keysWithStatus(status: ExecutionStatus) = store.getOrElse(status, List.empty) @@ -229,29 +246,28 @@ sealed abstract class ExecutionStore private[stores](statusStore: Map[JobKey, Ex * We're done when all the keys have a terminal status, * which is equivalent to non of them being in a non-terminal status and faster to verify */ - def isDone: Boolean = { + def isDone: Boolean = NonTerminalStatuses.toList.map(keysWithStatus).forall(_.isEmpty) - } - def isStalled: Boolean = { + def isStalled: Boolean = !isDone && !needsUpdate && ActiveStatuses.map(keysWithStatus).forall(_.isEmpty) - } def unstarted = keysWithStatus(NotStarted) def jobStatus(jobKey: JobKey): Option[ExecutionStatus] = statusStore.get(jobKey) - def startedJobs: List[BackendJobDescriptorKey] = { - store.filterNot({ case (s, _) => s == NotStarted}).values.toList.flatten collect { + def startedJobs: List[BackendJobDescriptorKey] = + store.filterNot { case (s, _) => s == NotStarted }.values.toList.flatten collect { case k: BackendJobDescriptorKey => k } - } override def toString: String = s""" |ExecutionStore( | statusStore = { - | ${store.map { case (j, s) => s"$j -> ${s.mkString(System.lineSeparator + " ", ", " + System.lineSeparator + " ", "")}" } mkString("," + System.lineSeparator + " ")} + | ${store.map { case (j, s) => + s"$j -> ${s.mkString(System.lineSeparator + " ", ", " + System.lineSeparator + " ", "")}" + } mkString ("," + System.lineSeparator + " ")} | }, | needsUpdate = $needsUpdate |)""".stripMargin diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ValueStore.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ValueStore.scala index a5c1d6fc995..778e434f634 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ValueStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ValueStore.scala @@ -39,9 +39,8 @@ case class ValueStore(store: Table[OutputPort, ExecutionIndex, WomValue]) { values.asJson.printWith(Printer.spaces2.copy(dropNullValues = true, colonLeft = "")) } - final def add(values: Map[ValueKey, WomValue]): ValueStore = { - this.copy(store = store.addAll(values.map({ case (key, value) => (key.port, key.index, value) }))) - } + final def add(values: Map[ValueKey, WomValue]): ValueStore = + this.copy(store = store.addAll(values.map { case (key, value) => (key.port, key.index, value) })) final def get(outputKey: ValueKey): Option[WomValue] = store.getValue(outputKey.port, outputKey.index) @@ -76,7 +75,8 @@ case class ValueStore(store: Table[OutputPort, ExecutionIndex, WomValue]) { } } else { // If we don't find enough shards, this collector was found "runnable" when it shouldn't have - s"Some shards are missing from the value store for node ${collector.node.fullyQualifiedName}, expected ${collector.scatterWidth} shards but only got ${collectedValue.size}: ${collectedValue.mkString(", ")}".invalidNel + s"Some shards are missing from the value store for node ${collector.node.fullyQualifiedName}, expected ${collector.scatterWidth} shards but only got ${collectedValue.size}: ${collectedValue + .mkString(", ")}".invalidNel } case None if collector.scatterWidth == 0 => // If there's nothing, the scatter was empty, let the scatterCollectionFunction deal with it and just pass it an empty shard list @@ -92,8 +92,10 @@ case class ValueStore(store: Table[OutputPort, ExecutionIndex, WomValue]) { val conditionalPort = collector.conditionalOutputPort val sourcePort = conditionalPort.outputToExpose.source store.getValue(sourcePort, collector.index) match { - case Some(womValue) => Map(ValueKey(conditionalPort, collector.index) -> WomOptionalValue(womValue).flattenOptional).validNel - case None => s"Conditional collector cannot find a value for output port ${sourcePort.identifier.fullyQualifiedName.value} in value store: $this".invalidNel + case Some(womValue) => + Map(ValueKey(conditionalPort, collector.index) -> WomOptionalValue(womValue).flattenOptional).validNel + case None => + s"Conditional collector cannot find a value for output port ${sourcePort.identifier.fullyQualifiedName.value} in value store: $this".invalidNel } } @@ -107,12 +109,16 @@ case class ValueStore(store: Table[OutputPort, ExecutionIndex, WomValue]) { case Some(womValue: WomArrayLike) => index match { case Some(jobIndex) => - womValue.asArray.value.lift(svn.indexForShard(jobIndex, womValue.asArray.value.size)) + womValue.asArray.value + .lift(svn.indexForShard(jobIndex, womValue.asArray.value.size)) .toValidNel(s"Shard index $jobIndex exceeds scatter array length: ${womValue.asArray.value.size}") - case None => s"Unsharded execution key references a scatter variable: ${p.identifier.fullyQualifiedName}".invalidNel + case None => + s"Unsharded execution key references a scatter variable: ${p.identifier.fullyQualifiedName}".invalidNel } - case Some(other) => s"Value for scatter collection ${p.identifier.fullyQualifiedName} is not an array: ${other.womType.stableName}".invalidNel - case None => s"Can't find a value for scatter collection ${p.identifier.fullyQualifiedName} (looking for index $index)".invalidNel + case Some(other) => + s"Value for scatter collection ${p.identifier.fullyQualifiedName} is not an array: ${other.womType.stableName}".invalidNel + case None => + s"Can't find a value for scatter collection ${p.identifier.fullyQualifiedName} (looking for index $index)".invalidNel } } @@ -123,7 +129,7 @@ case class ValueStore(store: Table[OutputPort, ExecutionIndex, WomValue]) { s"Can't find a ValueStore value for $p at index $index in $this".invalidNel } - def findValueStorePort(p: OutputPort, index: ExecutionIndex): ErrorOr[WomValue] = { + def findValueStorePort(p: OutputPort, index: ExecutionIndex): ErrorOr[WomValue] = p.graphNode match { case svn: ScatterVariableNode => forScatterVariable(svn) case ogin: OuterGraphInputNode if ogin.preserveScatterIndex => findValueStorePort(ogin.linkToOuterGraph, index) @@ -131,7 +137,6 @@ case class ValueStore(store: Table[OutputPort, ExecutionIndex, WomValue]) { case _: GraphInputNode => forGraphNodePort(p, None) // Must be a workflow input, which never have indices case _ => forGraphNodePort(p, index) } - } findValueStorePort(outputPort, index) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActor.scala index 467cc1c2586..f29482cb75e 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActor.scala @@ -22,14 +22,13 @@ object CopyWorkflowLogsActor { // Commands case class Copy(workflowId: WorkflowId, destinationDirPath: Path) - val strategy: OneForOneStrategy = OneForOneStrategy(maxNrOfRetries = 3) { - case _: IOException => Restart + val strategy: OneForOneStrategy = OneForOneStrategy(maxNrOfRetries = 3) { case _: IOException => + Restart } def props(serviceRegistryActor: ActorRef, ioActor: ActorRef, - workflowLogConfigurationOption: Option[WorkflowLogConfiguration] = - WorkflowLogger.workflowLogConfiguration, + workflowLogConfigurationOption: Option[WorkflowLogConfiguration] = WorkflowLogger.workflowLogConfiguration, /* The theory is that the `GcsBatchCommandBuilder` copies the temporary workflow logs from the local disk to GCS. Then later, the separate `DefaultIOCommandBuilder` deletes files from the local disk. @@ -47,16 +46,17 @@ object CopyWorkflowLogsActor { implemented, It Works (TM), and I'm not changing it for now. */ copyCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder, - deleteCommandBuilder: IoCommandBuilder = DefaultIoCommandBuilder, - ): Props = { - Props(new CopyWorkflowLogsActor( - serviceRegistryActor = serviceRegistryActor, - ioActor = ioActor, - workflowLogConfigurationOption = workflowLogConfigurationOption, - copyCommandBuilder = copyCommandBuilder, - deleteCommandBuilder = deleteCommandBuilder, - )).withDispatcher(IoDispatcher) - } + deleteCommandBuilder: IoCommandBuilder = DefaultIoCommandBuilder + ): Props = + Props( + new CopyWorkflowLogsActor( + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor, + workflowLogConfigurationOption = workflowLogConfigurationOption, + copyCommandBuilder = copyCommandBuilder, + deleteCommandBuilder = deleteCommandBuilder + ) + ).withDispatcher(IoDispatcher) } // This could potentially be turned into a more generic "Copy/Move something from A to B" @@ -65,9 +65,12 @@ class CopyWorkflowLogsActor(override val serviceRegistryActor: ActorRef, override val ioActor: ActorRef, workflowLogConfigurationOption: Option[WorkflowLogConfiguration], copyCommandBuilder: IoCommandBuilder, - deleteCommandBuilder: IoCommandBuilder, - ) extends Actor - with ActorLogging with IoClientHelper with WorkflowMetadataHelper with MonitoringCompanionHelper { + deleteCommandBuilder: IoCommandBuilder +) extends Actor + with ActorLogging + with IoClientHelper + with WorkflowMetadataHelper + with MonitoringCompanionHelper { implicit val ec: ExecutionContext = context.dispatcher @@ -87,9 +90,10 @@ class CopyWorkflowLogsActor(override val serviceRegistryActor: ActorRef, removeWork() } } else removeWork() - + private def updateLogsPathInMetadata(workflowId: WorkflowId, path: Path): Unit = { - val metadataEventMsg = MetadataEvent(MetadataKey(workflowId, None, WorkflowMetadataKeys.WorkflowLog), MetadataValue(path.pathAsString)) + val metadataEventMsg = + MetadataEvent(MetadataKey(workflowId, None, WorkflowMetadataKeys.WorkflowLog), MetadataValue(path.pathAsString)) serviceRegistryActor ! PutMetadataAction(metadataEventMsg) } @@ -111,30 +115,31 @@ class CopyWorkflowLogsActor(override val serviceRegistryActor: ActorRef, case Failure(failure) => log.error( cause = failure, - message = - s"Failed to copy workflow logs from ${src.pathAsString} to ${destPath.pathAsString}: " + - s"${failure.getMessage}", + message = s"Failed to copy workflow logs from ${src.pathAsString} to ${destPath.pathAsString}: " + + s"${failure.getMessage}" ) deleteLog(src) case Success(_) => - // Deliberately not deleting the file here, that will be done in batch in `deleteLog` - // after the copy is terminal. + // Deliberately not deleting the file here, that will be done in batch in `deleteLog` + // after the copy is terminal. } workflowLogger.close() } } - + case (workflowId: WorkflowId, IoSuccess(copy: IoCopyCommand, _)) => updateLogsPathInMetadata(workflowId, copy.destination) deleteLog(copy.source) - + case (workflowId: WorkflowId, IoFailAck(copy: IoCopyCommand, failure)) => pushWorkflowFailures(workflowId, List(new IOException("Could not copy workflow logs", failure))) - log.error(failure, s"Failed to copy workflow logs from ${copy.source.pathAsString} to ${copy.destination.pathAsString}") + log.error(failure, + s"Failed to copy workflow logs from ${copy.source.pathAsString} to ${copy.destination.pathAsString}" + ) deleteLog(copy.source) - + case IoSuccess(_: IoDeleteCommand, _) => removeWork() - + case IoFailAck(delete: IoDeleteCommand, failure) => removeWork() log.error(failure, s"Failed to delete workflow logs from ${delete.file.pathAsString}") @@ -148,13 +153,14 @@ class CopyWorkflowLogsActor(override val serviceRegistryActor: ActorRef, override def receive: Receive = monitoringReceive orElse ioReceive orElse copyLogsReceive /*_*/ - override def preRestart(t: Throwable, message: Option[Any]): Unit = { + override def preRestart(t: Throwable, message: Option[Any]): Unit = message foreach self.forward - } override protected def onTimeout(message: Any, to: ActorRef): Unit = message match { case copy: IoCopyCommand => - log.error(s"Failed to copy workflow logs from ${copy.source.pathAsString} to ${copy.destination.pathAsString}: Timeout") + log.error( + s"Failed to copy workflow logs from ${copy.source.pathAsString} to ${copy.destination.pathAsString}: Timeout" + ) deleteLog(copy.source) case delete: IoDeleteCommand => log.error(s"Failed to delete workflow logs from ${delete.file.pathAsString}: Timeout") diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala index b6fa6b285a6..142a6623fcd 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala @@ -3,8 +3,18 @@ package cromwell.engine.workflow.lifecycle.finalization import akka.actor.{Actor, ActorLogging, ActorRef, Props} import akka.event.LoggingReceive import cromwell.backend.BackendLifecycleActor.BackendWorkflowLifecycleActorResponse -import cromwell.backend.BackendWorkflowFinalizationActor.{FinalizationFailed, FinalizationResponse, FinalizationSuccess, Finalize} -import cromwell.backend.{AllBackendInitializationData, BackendConfigurationDescriptor, BackendInitializationData, BackendLifecycleActorFactory} +import cromwell.backend.BackendWorkflowFinalizationActor.{ + FinalizationFailed, + FinalizationResponse, + FinalizationSuccess, + Finalize +} +import cromwell.backend.{ + AllBackendInitializationData, + BackendConfigurationDescriptor, + BackendInitializationData, + BackendLifecycleActorFactory +} import cromwell.core.Dispatcher.IoDispatcher import cromwell.core.WorkflowOptions._ import cromwell.core._ @@ -19,26 +29,36 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} object CopyWorkflowOutputsActor { - def props(workflowId: WorkflowId, ioActor: ActorRef, workflowDescriptor: EngineWorkflowDescriptor, workflowOutputs: CallOutputs, - initializationData: AllBackendInitializationData) = Props( + def props(workflowId: WorkflowId, + ioActor: ActorRef, + workflowDescriptor: EngineWorkflowDescriptor, + workflowOutputs: CallOutputs, + initializationData: AllBackendInitializationData + ) = Props( new CopyWorkflowOutputsActor(workflowId, ioActor, workflowDescriptor, workflowOutputs, initializationData) ).withDispatcher(IoDispatcher) } -class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: ActorRef, val workflowDescriptor: EngineWorkflowDescriptor, workflowOutputs: CallOutputs, - initializationData: AllBackendInitializationData) - extends Actor with ActorLogging with PathFactory with AsyncIoActorClient { +class CopyWorkflowOutputsActor(workflowId: WorkflowId, + override val ioActor: ActorRef, + val workflowDescriptor: EngineWorkflowDescriptor, + workflowOutputs: CallOutputs, + initializationData: AllBackendInitializationData +) extends Actor + with ActorLogging + with PathFactory + with AsyncIoActorClient { override lazy val ioCommandBuilder = GcsBatchCommandBuilder implicit val ec = context.dispatcher override val pathBuilders = workflowDescriptor.pathBuilders - override def receive = LoggingReceive { - case Finalize => performActionThenRespond(afterAll()(context.dispatcher), FinalizationFailed)(context.dispatcher) + override def receive = LoggingReceive { case Finalize => + performActionThenRespond(afterAll()(context.dispatcher), FinalizationFailed)(context.dispatcher) } private def performActionThenRespond(operation: => Future[BackendWorkflowLifecycleActorResponse], - onFailure: (Throwable) => BackendWorkflowLifecycleActorResponse) - (implicit ec: ExecutionContext) = { + onFailure: (Throwable) => BackendWorkflowLifecycleActorResponse + )(implicit ec: ExecutionContext) = { val respondTo: ActorRef = sender() operation onComplete { case Success(r) => respondTo ! r @@ -53,34 +73,36 @@ class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: Act // Check if there are duplicated destination paths and throw an exception if that is the case. // This creates a map of destinations and source paths which point to them in cases where there are multiple // source paths that point to the same destination. - val duplicatedDestPaths: Map[Path, List[Path]] = outputFilePaths.groupBy{ case (_, destPath) => destPath}.collect { - case (destPath, list) if list.size > 1 => destPath -> list.map {case (source, _) => source} - } + val duplicatedDestPaths: Map[Path, List[Path]] = + outputFilePaths.groupBy { case (_, destPath) => destPath }.collect { + case (destPath, list) if list.size > 1 => destPath -> list.map { case (source, _) => source } + } if (duplicatedDestPaths.nonEmpty) { val formattedCollidingCopyOptions = duplicatedDestPaths.toList - .sortBy{case(dest, _) => dest.pathAsString} // Sort by destination path + .sortBy { case (dest, _) => dest.pathAsString } // Sort by destination path // Make a '/my/src -> /my/dest' copy tape string for each source and destination. Use flat map to get a single list // srcList is also sorted to get a deterministic output order. This is necessary for making sure the tests // for the error always succeed. - .flatMap{ case (dest, srcList) => srcList.sortBy(_.pathAsString).map(_.pathAsString + s" -> $dest")} + .flatMap { case (dest, srcList) => srcList.sortBy(_.pathAsString).map(_.pathAsString + s" -> $dest") } throw new IllegalStateException( "Cannot copy output files to given final_workflow_outputs_dir" + - s" as multiple files will be copied to the same path: \n${formattedCollidingCopyOptions.mkString("\n")}")} + s" as multiple files will be copied to the same path: \n${formattedCollidingCopyOptions.mkString("\n")}" + ) + } - val copies = outputFilePaths map { - case (srcPath, dstPath) => asyncIo.copyAsync(srcPath, dstPath) + val copies = outputFilePaths map { case (srcPath, dstPath) => + asyncIo.copyAsync(srcPath, dstPath) } Future.sequence(copies) } - private def findFiles(values: Seq[WomValue]): Seq[WomSingleFile] = { + private def findFiles(values: Seq[WomValue]): Seq[WomSingleFile] = values flatMap { - _.collectAsSeq { - case file: WomSingleFile => file + _.collectAsSeq { case file: WomSingleFile => + file } } - } private def getOutputFilePaths(workflowOutputsPath: Path): List[(Path, Path)] = { @@ -100,42 +122,37 @@ class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: Act // "execution" should be optional, because its not created on AWS. // Also cacheCopy or attempt- folders are optional. lazy val truncateRegex = ".*/call-[^/]*/(shard-[0-9]+/)?(cacheCopy/)?(attempt-[0-9]+/)?(execution/)?".r - val outputFileDestinations = rootAndFiles flatMap { - case (workflowRoot, outputs) => - outputs map { output => - val outputPath = PathFactory.buildPath(output, pathBuilders) - outputPath -> { - if (useRelativeOutputPaths) { - val pathRelativeToExecDir = truncateRegex.replaceFirstIn(outputPath.pathAsString, "") - workflowOutputsPath.resolve(pathRelativeToExecDir) - } - else PathCopier.getDestinationFilePath(workflowRoot, outputPath, workflowOutputsPath) - } + val outputFileDestinations = rootAndFiles flatMap { case (workflowRoot, outputs) => + outputs map { output => + val outputPath = PathFactory.buildPath(output, pathBuilders) + outputPath -> { + if (useRelativeOutputPaths) { + val pathRelativeToExecDir = truncateRegex.replaceFirstIn(outputPath.pathAsString, "") + workflowOutputsPath.resolve(pathRelativeToExecDir) + } else PathCopier.getDestinationFilePath(workflowRoot, outputPath, workflowOutputsPath) } + } } outputFileDestinations.distinct.toList } - private def getBackendRootPath(backend: String, config: BackendConfigurationDescriptor): Option[Path] = { + private def getBackendRootPath(backend: String, config: BackendConfigurationDescriptor): Option[Path] = getBackendFactory(backend) map getRootPath(config, initializationData.get(backend)) - } - private def getBackendFactory(backend: String): Option[BackendLifecycleActorFactory] = { + private def getBackendFactory(backend: String): Option[BackendLifecycleActorFactory] = CromwellBackends.backendLifecycleFactoryActorByName(backend).toOption - } - private def getRootPath(config: BackendConfigurationDescriptor, initializationData: Option[BackendInitializationData]) - (backendFactory: BackendLifecycleActorFactory): Path = { + private def getRootPath(config: BackendConfigurationDescriptor, + initializationData: Option[BackendInitializationData] + )(backendFactory: BackendLifecycleActorFactory): Path = backendFactory.getExecutionRootPath(workflowDescriptor.backendDescriptor, config.backendConfig, initializationData) - } /** * Happens after everything else runs */ - final def afterAll()(implicit ec: ExecutionContext): Future[FinalizationResponse] = { + final def afterAll()(implicit ec: ExecutionContext): Future[FinalizationResponse] = workflowDescriptor.getWorkflowOption(FinalWorkflowOutputsDir) match { case Some(outputs) => copyWorkflowOutputs(outputs) map { _ => FinalizationSuccess } case None => Future.successful(FinalizationSuccess) } - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala index c8eb1790365..bd5547e0252 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActor.scala @@ -36,16 +36,16 @@ import java.time.Instant import java.util.concurrent.Executors import scala.util.{Failure, Success} - case class WorkflowCallbackConfig(enabled: Boolean, numThreads: Int, retryBackoff: SimpleExponentialBackoff, maxRetries: Int, defaultUri: Option[URI], // May be overridden by workflow options - authMethod: Option[WorkflowCallbackConfig.AuthMethod]) + authMethod: Option[WorkflowCallbackConfig.AuthMethod] +) object WorkflowCallbackConfig extends LazyLogging { - sealed trait AuthMethod { def getAccessToken: ErrorOr.ErrorOr[String] } + sealed trait AuthMethod { def getAccessToken: ErrorOr.ErrorOr[String] } case object AzureAuth extends AuthMethod { override def getAccessToken: ErrorOr.ErrorOr[String] = AzureCredentials.getAccessToken() } @@ -55,13 +55,19 @@ object WorkflowCallbackConfig extends LazyLogging { private lazy val defaultMaxRetries = 10 def empty: WorkflowCallbackConfig = WorkflowCallbackConfig( - false, defaultNumThreads, defaultRetryBackoff, defaultMaxRetries, None, None + false, + defaultNumThreads, + defaultRetryBackoff, + defaultMaxRetries, + None, + None ) def apply(config: Config): WorkflowCallbackConfig = { val enabled = config.as[Boolean]("enabled") val numThreads = config.as[Option[Int]]("num-threads").getOrElse(defaultNumThreads) - val backoff = config.as[Option[Config]]("request-backoff").map(SimpleExponentialBackoff(_)).getOrElse(defaultRetryBackoff) + val backoff = + config.as[Option[Config]]("request-backoff").map(SimpleExponentialBackoff(_)).getOrElse(defaultRetryBackoff) val maxRetries = config.as[Option[Int]]("max-retries").getOrElse(defaultMaxRetries) val uri = config.as[Option[String]]("endpoint").flatMap(createAndValidateUri) @@ -79,14 +85,13 @@ object WorkflowCallbackConfig extends LazyLogging { ) } - def createAndValidateUri(uriString: String): Option[URI] = { + def createAndValidateUri(uriString: String): Option[URI] = Try(new URI(uriString)) match { case Success(uri) => Option(uri) case Failure(err) => logger.warn(s"Failed to parse provided workflow callback URI (${uriString}): $err") None } - } } /** @@ -100,23 +105,22 @@ object WorkflowCallbackActor { uri: Option[String], terminalState: WorkflowState, workflowOutputs: CallOutputs, - failureMessage: List[String]) + failureMessage: List[String] + ) def props(serviceRegistryActor: ActorRef, callbackConfig: WorkflowCallbackConfig, httpClient: CallbackHttpHandler = CallbackHttpHandlerImpl - ) = Props( - new WorkflowCallbackActor( - serviceRegistryActor, - callbackConfig, - httpClient) + ) = Props( + new WorkflowCallbackActor(serviceRegistryActor, callbackConfig, httpClient) ).withDispatcher(IoDispatcher) } class WorkflowCallbackActor(serviceRegistryActor: ActorRef, config: WorkflowCallbackConfig, - httpClient: CallbackHttpHandler) - extends Actor with ActorLogging { + httpClient: CallbackHttpHandler +) extends Actor + with ActorLogging { // Create a dedicated thread pool for this actor so its damage is limited if we end up with // too many threads all taking a long time to do callbacks. If we're frequently saturating @@ -130,32 +134,50 @@ class WorkflowCallbackActor(serviceRegistryActor: ActorRef, case PerformCallbackCommand(workflowId, requestedCallbackUri, terminalState, outputs, failures) => // If no uri was provided to us here, fall back to the one in config. If there isn't // one there, do not perform a callback. - val callbackUri: Option[URI] = requestedCallbackUri.map(WorkflowCallbackConfig.createAndValidateUri).getOrElse(config.defaultUri) - callbackUri.map { uri => - performCallback(workflowId, uri, terminalState, outputs, failures) onComplete { - case Success(_) => - log.info(s"Successfully sent callback for workflow for workflow $workflowId in state $terminalState to $uri") - sendMetadata(workflowId, successful = true, uri) - case Failure(t) => - log.warning(s"Permanently failed to send callback for workflow $workflowId in state $terminalState to $uri: ${t.getMessage}") - sendMetadata(workflowId, successful = false, uri) + val callbackUri: Option[URI] = + requestedCallbackUri.map(WorkflowCallbackConfig.createAndValidateUri).getOrElse(config.defaultUri) + callbackUri + .map { uri => + performCallback(workflowId, uri, terminalState, outputs, failures) onComplete { + case Success(_) => + log.info( + s"Successfully sent callback for workflow for workflow $workflowId in state $terminalState to $uri" + ) + sendMetadata(workflowId, successful = true, uri) + case Failure(t) => + log.warning( + s"Permanently failed to send callback for workflow $workflowId in state $terminalState to $uri: ${t.getMessage}" + ) + sendMetadata(workflowId, successful = false, uri) + } } - }.getOrElse(()) + .getOrElse(()) case Broadcast(ShutdownCommand) | ShutdownCommand => context stop self case other => log.warning(s"WorkflowCallbackActor received an unexpected message: $other") } - private def makeHeaders: Future[List[HttpHeader]] = { - config.authMethod.toList.map(_.getAccessToken).map { - case Valid(header) => Future.successful(header) - case Invalid(err) => Future.failed(new RuntimeException(err.toString)) - } + private def makeHeaders: Future[List[HttpHeader]] = + config.authMethod.toList + .map(_.getAccessToken) + .map { + case Valid(header) => Future.successful(header) + case Invalid(err) => Future.failed(new RuntimeException(err.toString)) + } .map(t => t.map(t => RawHeader("Authorization", s"Bearer $t"))) .traverse(identity) - } - private def performCallback(workflowId: WorkflowId, callbackUri: URI, terminalState: WorkflowState, outputs: CallOutputs, failures: List[String]): Future[Done] = { - val callbackPostBody = CallbackMessage(workflowId.toString, terminalState.toString, outputs.outputs.map(entry => (entry._1.identifier.fullyQualifiedName.value, entry._2)), failures) + private def performCallback(workflowId: WorkflowId, + callbackUri: URI, + terminalState: WorkflowState, + outputs: CallOutputs, + failures: List[String] + ): Future[Done] = { + val callbackPostBody = CallbackMessage( + workflowId.toString, + terminalState.toString, + outputs.outputs.map(entry => (entry._1.identifier.fullyQualifiedName.value, entry._2)), + failures + ) for { entity <- Marshal(callbackPostBody).to[RequestEntity] headers <- makeHeaders @@ -164,7 +186,10 @@ class WorkflowCallbackActor(serviceRegistryActor: ActorRef, () => sendRequestOrFail(request), backoff = config.retryBackoff, maxRetries = Option(config.maxRetries), - onRetry = err => log.warning(s"Will retry after failure to send workflow callback for workflow $workflowId in state $terminalState to $callbackUri : $err") + onRetry = err => + log.warning( + s"Will retry after failure to send workflow callback for workflow $workflowId in state $terminalState to $callbackUri : $err" + ) ) result <- // Akka will get upset if we have a response body and leave it totally unread. @@ -174,15 +199,17 @@ class WorkflowCallbackActor(serviceRegistryActor: ActorRef, } private def sendRequestOrFail(request: HttpRequest): Future[HttpResponse] = - httpClient.sendRequest(request).flatMap(response => - if (response.status.isFailure()) { - response.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String) flatMap { errorBody => - Future.failed( - new RuntimeException(s"HTTP ${response.status.value}: $errorBody") - ) - } - } else Future.successful(response) - ) + httpClient + .sendRequest(request) + .flatMap(response => + if (response.status.isFailure()) { + response.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String) flatMap { errorBody => + Future.failed( + new RuntimeException(s"HTTP ${response.status.value}: $errorBody") + ) + } + } else Future.successful(response) + ) private def sendMetadata(workflowId: WorkflowId, successful: Boolean, uri: URI): Unit = { val events = List( @@ -210,7 +237,6 @@ trait CallbackHttpHandler { } object CallbackHttpHandlerImpl extends CallbackHttpHandler { - override def sendRequest(httpRequest: HttpRequest)(implicit actorSystem: ActorSystem): Future[HttpResponse] = { + override def sendRequest(httpRequest: HttpRequest)(implicit actorSystem: ActorSystem): Future[HttpResponse] = Http().singleRequest(httpRequest) - } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackJsonSupport.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackJsonSupport.scala index eb9b1c7ea9e..42eb4991c47 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackJsonSupport.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackJsonSupport.scala @@ -7,7 +7,8 @@ import wom.values.WomValue final case class CallbackMessage(workflowId: String, state: String, outputs: Map[String, WomValue], - failures: List[String]) + failures: List[String] +) object WorkflowCallbackJsonSupport extends DefaultJsonProtocol { implicit val callbackMessageFormat = jsonFormat4(CallbackMessage) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala index 8731367cfb7..d94c8033584 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala @@ -46,16 +46,18 @@ object WorkflowFinalizationActor { jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, initializationData: AllBackendInitializationData, - copyWorkflowOutputsActor: Option[Props]): Props = { - Props(new WorkflowFinalizationActor( - workflowDescriptor, - ioActor, - jobExecutionMap, - workflowOutputs, - initializationData, - copyWorkflowOutputsActor - )).withDispatcher(EngineDispatcher) - } + copyWorkflowOutputsActor: Option[Props] + ): Props = + Props( + new WorkflowFinalizationActor( + workflowDescriptor, + ioActor, + jobExecutionMap, + workflowOutputs, + initializationData, + copyWorkflowOutputsActor + ) + ).withDispatcher(EngineDispatcher) } case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescriptor, @@ -63,8 +65,8 @@ case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescripto jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, initializationData: AllBackendInitializationData, - copyWorkflowOutputsActorProps: Option[Props]) - extends WorkflowLifecycleActor[WorkflowFinalizationActorState] { + copyWorkflowOutputsActorProps: Option[Props] +) extends WorkflowLifecycleActor[WorkflowFinalizationActorState] { override lazy val workflowIdForLogging = workflowDescriptor.possiblyNotRootWorkflowId override lazy val rootWorkflowIdForLogging = workflowDescriptor.rootWorkflowId @@ -79,63 +81,72 @@ case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescripto override def failureResponse(reasons: Seq[Throwable]) = WorkflowFinalizationFailedResponse(reasons) // If an engine or backend finalization actor (children of this actor) dies, send ourselves the failure and stop the child actor - override def supervisorStrategy = OneForOneStrategy() { - case failure => - self.tell(FinalizationFailed(failure), sender()) - Stop + override def supervisorStrategy = OneForOneStrategy() { case failure => + self.tell(FinalizationFailed(failure), sender()) + Stop } startWith(FinalizationPendingState, WorkflowLifecycleActorData.empty) - when(FinalizationPendingState) { - case Event(StartFinalizationCommand, _) => - val backendFinalizationActors = Try { - for { - (backend, calls) <- workflowDescriptor.backendAssignments.groupBy(_._2).safeMapValues(_.keySet) - props <- CromwellBackends.backendLifecycleFactoryActorByName(backend).map( - _.workflowFinalizationActorProps(workflowDescriptor.backendDescriptor, ioActor, calls, filterJobExecutionsForBackend(calls), workflowOutputs, initializationData.get(backend)) - ).valueOr(errors => throw AggregatedMessageException("Cannot validate backend factories", errors.toList)) - actor = context.actorOf(props, backend) - } yield actor - } - - val engineFinalizationActor = Try { copyWorkflowOutputsActorProps.map(context.actorOf(_, "CopyWorkflowOutputsActor")).toList } - - val allActors = for { - backendFinalizationActorsFromTry <- backendFinalizationActors - engineFinalizationActorFromTry <- engineFinalizationActor - } yield backendFinalizationActorsFromTry.toList ++ engineFinalizationActorFromTry - - allActors match { - case Failure(ex) => - sender() ! WorkflowFinalizationFailedResponse(Seq(ex)) - goto(WorkflowFinalizationFailedState) - case Success(actors) if actors.isEmpty => - sender() ! WorkflowFinalizationSucceededResponse - goto(FinalizationSucceededState) - case Success(actors) => - val actorSet = actors.toSet - actorSet.foreach(_ ! Finalize) - goto(FinalizationInProgressState) using stateData.withActors(actorSet) - case _ => - goto(WorkflowFinalizationFailedState) - } + when(FinalizationPendingState) { case Event(StartFinalizationCommand, _) => + val backendFinalizationActors = Try { + for { + (backend, calls) <- workflowDescriptor.backendAssignments.groupBy(_._2).safeMapValues(_.keySet) + props <- CromwellBackends + .backendLifecycleFactoryActorByName(backend) + .map( + _.workflowFinalizationActorProps(workflowDescriptor.backendDescriptor, + ioActor, + calls, + filterJobExecutionsForBackend(calls), + workflowOutputs, + initializationData.get(backend) + ) + ) + .valueOr(errors => throw AggregatedMessageException("Cannot validate backend factories", errors.toList)) + actor = context.actorOf(props, backend) + } yield actor + } + + val engineFinalizationActor = Try { + copyWorkflowOutputsActorProps.map(context.actorOf(_, "CopyWorkflowOutputsActor")).toList + } + + val allActors = for { + backendFinalizationActorsFromTry <- backendFinalizationActors + engineFinalizationActorFromTry <- engineFinalizationActor + } yield backendFinalizationActorsFromTry.toList ++ engineFinalizationActorFromTry + + allActors match { + case Failure(ex) => + sender() ! WorkflowFinalizationFailedResponse(Seq(ex)) + goto(WorkflowFinalizationFailedState) + case Success(actors) if actors.isEmpty => + sender() ! WorkflowFinalizationSucceededResponse + goto(FinalizationSucceededState) + case Success(actors) => + val actorSet = actors.toSet + actorSet.foreach(_ ! Finalize) + goto(FinalizationInProgressState) using stateData.withActors(actorSet) + case _ => + goto(WorkflowFinalizationFailedState) + } } // Only send to each backend the jobs that it executed - private def filterJobExecutionsForBackend(calls: Set[CommandCallNode]): JobExecutionMap = { - jobExecutionMap map { - case (wd, executedKeys) => wd -> (executedKeys filter { jobKey => calls.contains(jobKey.call) }) - } filter { - case (_, keys) => keys.nonEmpty + private def filterJobExecutionsForBackend(calls: Set[CommandCallNode]): JobExecutionMap = + jobExecutionMap map { case (wd, executedKeys) => + wd -> (executedKeys filter { jobKey => calls.contains(jobKey.call) }) + } filter { case (_, keys) => + keys.nonEmpty } - } when(FinalizationInProgressState) { case Event(FinalizationSuccess, stateData) => checkForDoneAndTransition(stateData.withSuccess(sender())) - case Event(FinalizationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender(), reason)) + case Event(FinalizationFailed(reason), stateData) => + checkForDoneAndTransition(stateData.withFailure(sender(), reason)) } - when(FinalizationSucceededState) { FSM.NullFunction } - when(WorkflowFinalizationFailedState) { FSM.NullFunction } + when(FinalizationSucceededState)(FSM.NullFunction) + when(WorkflowFinalizationFailedState)(FSM.NullFunction) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala index 2c0e8cb5fd3..6af8d7e6116 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala @@ -12,7 +12,11 @@ import cromwell.engine.EngineWorkflowDescriptor import cromwell.engine.backend.CromwellBackends import cromwell.engine.workflow.lifecycle.WorkflowLifecycleActor._ import cromwell.engine.workflow.lifecycle.initialization.WorkflowInitializationActor._ -import cromwell.engine.workflow.lifecycle.{AbortableWorkflowLifecycleActor, EngineLifecycleActorAbortCommand, EngineLifecycleActorAbortedResponse} +import cromwell.engine.workflow.lifecycle.{ + AbortableWorkflowLifecycleActor, + EngineLifecycleActorAbortCommand, + EngineLifecycleActorAbortedResponse +} import scala.util.{Failure, Success, Try} @@ -22,7 +26,9 @@ object WorkflowInitializationActor { * States */ sealed trait WorkflowInitializationActorState extends WorkflowLifecycleActorState - sealed trait WorkflowInitializationActorTerminalState extends WorkflowInitializationActorState with WorkflowLifecycleActorTerminalState + sealed trait WorkflowInitializationActorTerminalState + extends WorkflowInitializationActorState + with WorkflowLifecycleActorTerminalState case object InitializationPendingState extends WorkflowInitializationActorState case object InitializationInProgressState extends WorkflowInitializationActorState @@ -41,25 +47,33 @@ object WorkflowInitializationActor { * Responses */ sealed trait WorkflowInitializationResponse - final case class WorkflowInitializationSucceededResponse(initializationData: AllBackendInitializationData) extends WorkflowLifecycleSuccessResponse with WorkflowInitializationResponse - case object WorkflowInitializationAbortedResponse extends EngineLifecycleActorAbortedResponse with WorkflowInitializationResponse - final case class WorkflowInitializationFailedResponse(reasons: Seq[Throwable]) extends WorkflowLifecycleFailureResponse with WorkflowInitializationResponse + final case class WorkflowInitializationSucceededResponse(initializationData: AllBackendInitializationData) + extends WorkflowLifecycleSuccessResponse + with WorkflowInitializationResponse + case object WorkflowInitializationAbortedResponse + extends EngineLifecycleActorAbortedResponse + with WorkflowInitializationResponse + final case class WorkflowInitializationFailedResponse(reasons: Seq[Throwable]) + extends WorkflowLifecycleFailureResponse + with WorkflowInitializationResponse def props(workflowIdForLogging: PossiblyNotRootWorkflowId, rootWorkflowIdForLogging: RootWorkflowId, workflowDescriptor: EngineWorkflowDescriptor, ioActor: ActorRef, serviceRegistryActor: ActorRef, - restarting: Boolean): Props = { - Props(new WorkflowInitializationActor( - workflowIdForLogging = workflowIdForLogging, - rootWorkflowIdForLogging = rootWorkflowIdForLogging, - workflowDescriptor = workflowDescriptor, - ioActor = ioActor, - serviceRegistryActor = serviceRegistryActor, - restarting = restarting - )).withDispatcher(EngineDispatcher) - } + restarting: Boolean + ): Props = + Props( + new WorkflowInitializationActor( + workflowIdForLogging = workflowIdForLogging, + rootWorkflowIdForLogging = rootWorkflowIdForLogging, + workflowDescriptor = workflowDescriptor, + ioActor = ioActor, + serviceRegistryActor = serviceRegistryActor, + restarting = restarting + ) + ).withDispatcher(EngineDispatcher) case class BackendActorAndBackend(actor: ActorRef, backend: String) } @@ -69,8 +83,8 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork workflowDescriptor: EngineWorkflowDescriptor, ioActor: ActorRef, serviceRegistryActor: ActorRef, - restarting: Boolean) - extends AbortableWorkflowLifecycleActor[WorkflowInitializationActorState] { + restarting: Boolean +) extends AbortableWorkflowLifecycleActor[WorkflowInitializationActorState] { startWith(InitializationPendingState, WorkflowLifecycleActorData.empty) val tag = self.path.name @@ -83,7 +97,9 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork override def successResponse(data: WorkflowLifecycleActorData) = { val actorsToBackends = backendActorsAndBackends.map(ab => ab.actor -> ab.backend).toMap val actorsToData = data.successes.map(ad => ad.actor -> ad.data).toMap - val allBackendInitializationData = AllBackendInitializationData(actorsToBackends collect { case (a, b) => b -> actorsToData(a) }) + val allBackendInitializationData = AllBackendInitializationData(actorsToBackends collect { case (a, b) => + b -> actorsToData(a) + }) WorkflowInitializationSucceededResponse(allBackendInitializationData) } override def failureResponse(reasons: Seq[Throwable]) = WorkflowInitializationFailedResponse(reasons) @@ -96,9 +112,17 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork val backendInitializationActors = Try { for { (backend, calls) <- workflowDescriptor.backendAssignments.groupBy(_._2).safeMapValues(_.keySet) - props <- CromwellBackends.backendLifecycleFactoryActorByName(backend).map(factory => - factory.workflowInitializationActorProps(workflowDescriptor.backendDescriptor, ioActor, calls, serviceRegistryActor, restarting) - ).valueOr(errors => throw AggregatedMessageException("Cannot validate backend factories", errors.toList)) + props <- CromwellBackends + .backendLifecycleFactoryActorByName(backend) + .map(factory => + factory.workflowInitializationActorProps(workflowDescriptor.backendDescriptor, + ioActor, + calls, + serviceRegistryActor, + restarting + ) + ) + .valueOr(errors => throw AggregatedMessageException("Cannot validate backend factories", errors.toList)) actor = context.actorOf(props, backend) } yield BackendActorAndBackend(actor, backend) } @@ -124,20 +148,24 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork } when(InitializationInProgressState) { - case Event(InitializationSuccess(initData), stateData) => checkForDoneAndTransition(stateData.withSuccess(sender(), initData)) - case Event(InitializationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender(), reason)) + case Event(InitializationSuccess(initData), stateData) => + checkForDoneAndTransition(stateData.withSuccess(sender(), initData)) + case Event(InitializationFailed(reason), stateData) => + checkForDoneAndTransition(stateData.withFailure(sender(), reason)) case Event(EngineLifecycleActorAbortCommand, stateData) => stateData.actors foreach { _ ! BackendWorkflowInitializationActor.Abort } goto(InitializationAbortingState) } when(InitializationAbortingState) { - case Event(InitializationSuccess(initData), stateData) => checkForDoneAndTransition(stateData.withSuccess(sender(), initData)) - case Event(InitializationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender(), reason)) + case Event(InitializationSuccess(initData), stateData) => + checkForDoneAndTransition(stateData.withSuccess(sender(), initData)) + case Event(InitializationFailed(reason), stateData) => + checkForDoneAndTransition(stateData.withFailure(sender(), reason)) case Event(BackendActorAbortedResponse, stateData) => checkForDoneAndTransition(stateData.withAborted(sender())) } - when(InitializationSucceededState) { FSM.NullFunction } - when(InitializationFailedState) { FSM.NullFunction } - when(InitializationsAbortedState) { FSM.NullFunction } + when(InitializationSucceededState)(FSM.NullFunction) + when(InitializationFailedState)(FSM.NullFunction) + when(InitializationsAbortedState)(FSM.NullFunction) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala index 70365819fe5..6b515114149 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala @@ -63,10 +63,22 @@ object MaterializeWorkflowDescriptorActor { // exception if not initialized yet. private def cromwellBackends = CromwellBackends.instance.get - def props(serviceRegistryActor: ActorRef, workflowId: WorkflowId, cromwellBackends: => CromwellBackends = cromwellBackends, - importLocalFilesystem: Boolean, ioActorProxy: ActorRef, hogGroup: HogGroup): Props = { - Props(new MaterializeWorkflowDescriptorActor(serviceRegistryActor, workflowId, cromwellBackends, importLocalFilesystem, ioActorProxy, hogGroup)).withDispatcher(EngineDispatcher) - } + def props(serviceRegistryActor: ActorRef, + workflowId: WorkflowId, + cromwellBackends: => CromwellBackends = cromwellBackends, + importLocalFilesystem: Boolean, + ioActorProxy: ActorRef, + hogGroup: HogGroup + ): Props = + Props( + new MaterializeWorkflowDescriptorActor(serviceRegistryActor, + workflowId, + cromwellBackends, + importLocalFilesystem, + ioActorProxy, + hogGroup + ) + ).withDispatcher(EngineDispatcher) /* Commands @@ -75,15 +87,19 @@ object MaterializeWorkflowDescriptorActor { case class MaterializeWorkflowDescriptorCommand(workflowSourceFiles: WorkflowSourceFilesCollection, conf: Config, callCachingEnabled: Boolean, - invalidateBadCacheResults: Boolean) extends MaterializeWorkflowDescriptorActorMessage + invalidateBadCacheResults: Boolean + ) extends MaterializeWorkflowDescriptorActorMessage case object MaterializeWorkflowDescriptorAbortCommand /* Responses */ sealed trait WorkflowDescriptorMaterializationResult extends MaterializeWorkflowDescriptorActorMessage - case class MaterializeWorkflowDescriptorSuccessResponse(workflowDescriptor: EngineWorkflowDescriptor) extends WorkflowDescriptorMaterializationResult - case class MaterializeWorkflowDescriptorFailureResponse(reason: Throwable) extends Exception with WorkflowDescriptorMaterializationResult + case class MaterializeWorkflowDescriptorSuccessResponse(workflowDescriptor: EngineWorkflowDescriptor) + extends WorkflowDescriptorMaterializationResult + case class MaterializeWorkflowDescriptorFailureResponse(reason: Throwable) + extends Exception + with WorkflowDescriptorMaterializationResult /* States @@ -102,30 +118,30 @@ object MaterializeWorkflowDescriptorActor { private[lifecycle] def validateCallCachingMode(workflowOptions: WorkflowOptions, callCachingEnabled: Boolean, - invalidateBadCacheResults: Boolean): ErrorOr[CallCachingMode] = { + invalidateBadCacheResults: Boolean + ): ErrorOr[CallCachingMode] = { - def readOptionalOption(option: WorkflowOption): ErrorOr[Boolean] = { + def readOptionalOption(option: WorkflowOption): ErrorOr[Boolean] = workflowOptions.getBoolean(option.name) match { case Success(x) => x.validNel case Failure(_: OptionNotFoundException) => true.validNel case Failure(t) => t.getMessage.invalidNel } - } if (callCachingEnabled) { val readFromCache = readOptionalOption(ReadFromCache) val writeToCache = readOptionalOption(WriteToCache) - def errorOrCallCachingMode(callCachingOptions: CallCachingOptions): ErrorOr[CallCachingMode] = { + def errorOrCallCachingMode(callCachingOptions: CallCachingOptions): ErrorOr[CallCachingMode] = (readFromCache, writeToCache) mapN { case (false, false) => CallCachingOff case (true, false) => CallCachingActivity(ReadCache, callCachingOptions) case (false, true) => CallCachingActivity(WriteCache, callCachingOptions) case (true, true) => CallCachingActivity(ReadAndWriteCache, callCachingOptions) } - } - val errorOrMaybePrefixes: ErrorOr[Option[Vector[String]]] = workflowOptions.getVectorOfStrings("call_cache_hit_path_prefixes") + val errorOrMaybePrefixes: ErrorOr[Option[Vector[String]]] = + workflowOptions.getVectorOfStrings("call_cache_hit_path_prefixes") val errorOrCallCachingOptions: ErrorOr[CallCachingOptions] = errorOrMaybePrefixes.map { maybePrefixes => CallCachingOptions(invalidateBadCacheResults, maybePrefixes) @@ -134,8 +150,7 @@ object MaterializeWorkflowDescriptorActor { options <- errorOrCallCachingOptions mode <- errorOrCallCachingMode(options) } yield mode - } - else { + } else { CallCachingOff.validNel } } @@ -144,23 +159,25 @@ object MaterializeWorkflowDescriptorActor { def validateMemoryRetryMultiplier(workflowOptions: WorkflowOptions): ErrorOr[Unit] = { val optionName = WorkflowOptions.MemoryRetryMultiplier.name - def refineMultiplier(value: Double): ErrorOr[Unit] = { + def refineMultiplier(value: Double): ErrorOr[Unit] = refineV[MemoryRetryMultiplier](value.toDouble) match { case Left(_) => s"Workflow option '$optionName' is invalid. It should be in the range 1.0 ≤ n ≤ 99.0".invalidNel case Right(_) => ().validNel } - } workflowOptions.get(optionName) match { - case Success(value) => Try(value.toDouble) match { - case Success(v) => refineMultiplier(v) - case Failure(e) => (s"Workflow option '$optionName' is invalid. It should be of type Double and in the range " + - s"1.0 ≤ n ≤ 99.0. Error: ${ExceptionUtils.getMessage(e)}").invalidNel - } + case Success(value) => + Try(value.toDouble) match { + case Success(v) => refineMultiplier(v) + case Failure(e) => + (s"Workflow option '$optionName' is invalid. It should be of type Double and in the range " + + s"1.0 ≤ n ≤ 99.0. Error: ${ExceptionUtils.getMessage(e)}").invalidNel + } case Failure(OptionNotFoundException(_)) => // This is an optional... option, so "not found" is fine ().validNel - case Failure(e) => s"'$optionName' is specified in workflow options but value is not of expected Double type: ${e.getMessage}".invalidNel + case Failure(e) => + s"'$optionName' is specified in workflow options but value is not of expected Double type: ${e.getMessage}".invalidNel } } } @@ -171,7 +188,10 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, cromwellBackends: => CromwellBackends, importLocalFilesystem: Boolean, ioActorProxy: ActorRef, - hogGroup: HogGroup) extends LoggingFSM[MaterializeWorkflowDescriptorActorState, Unit] with StrictLogging with WorkflowLogging { + hogGroup: HogGroup +) extends LoggingFSM[MaterializeWorkflowDescriptorActorState, Unit] + with StrictLogging + with WorkflowLogging { import MaterializeWorkflowDescriptorActor._ val tag = self.path.name @@ -186,27 +206,32 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, startWith(ReadyToMaterializeState, ()) - when(ReadyToMaterializeState) { - case Event(MaterializeWorkflowDescriptorCommand(workflowSourceFiles, conf, callCachingEnabled, invalidateBadCacheResults), _) => + case Event(MaterializeWorkflowDescriptorCommand(workflowSourceFiles, + conf, + callCachingEnabled, + invalidateBadCacheResults + ), + _ + ) => val replyTo = sender() workflowOptionsAndPathBuilders(workflowSourceFiles) match { case (workflowOptions, pathBuilders) => val futureDescriptor: Future[ErrorOr[EngineWorkflowDescriptor]] = pathBuilders flatMap { pb => - val engineIoFunctions = new EngineIoFunctions(pb, new AsyncIo(ioActorProxy, GcsBatchCommandBuilder), iOExecutionContext) - buildWorkflowDescriptor( - workflowId, - workflowSourceFiles, - conf, - callCachingEnabled, - invalidateBadCacheResults, - workflowOptions, - pb, - engineIoFunctions) - .value - .unsafeToFuture(). - map(_.toValidated) + val engineIoFunctions = + new EngineIoFunctions(pb, new AsyncIo(ioActorProxy, GcsBatchCommandBuilder), iOExecutionContext) + buildWorkflowDescriptor(workflowId, + workflowSourceFiles, + conf, + callCachingEnabled, + invalidateBadCacheResults, + workflowOptions, + pb, + engineIoFunctions + ).value + .unsafeToFuture() + .map(_.toValidated) } // Pipe the response to self, but make it look like it comes from the sender of the command @@ -223,18 +248,21 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, case Event(Valid(descriptor: EngineWorkflowDescriptor), _) => sender() ! MaterializeWorkflowDescriptorSuccessResponse(descriptor) goto(MaterializationSuccessfulState) - case Event(Invalid(error: NonEmptyList[String]@unchecked), _) => + case Event(Invalid(error: NonEmptyList[String] @unchecked), _) => workflowInitializationFailed(error, sender()) goto(MaterializationFailedState) case Event(Status.Failure(failure), _) => - workflowInitializationFailed(NonEmptyList.of(failure.getMessage, failure.getStackTrace.toList.map(_.toString):_*), sender()) + workflowInitializationFailed( + NonEmptyList.of(failure.getMessage, failure.getStackTrace.toList.map(_.toString): _*), + sender() + ) goto(MaterializationFailedState) } // Let these fall through to the whenUnhandled handler: - when(MaterializationSuccessfulState) { FSM.NullFunction } - when(MaterializationFailedState) { FSM.NullFunction } - when(MaterializationAbortedState) { FSM.NullFunction } + when(MaterializationSuccessfulState)(FSM.NullFunction) + when(MaterializationFailedState)(FSM.NullFunction) + when(MaterializationAbortedState)(FSM.NullFunction) onTransition { case oldState -> terminalState if terminalState.terminal => @@ -252,17 +280,18 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, stay() } - private def workflowInitializationFailed(errors: NonEmptyList[String], replyTo: ActorRef) = { - sender() ! MaterializeWorkflowDescriptorFailureResponse( - new IllegalArgumentException with MessageAggregation { - val exceptionContext = "Workflow input processing failed" - val errorMessages = errors.toList - }) - } + private def workflowInitializationFailed(errors: NonEmptyList[String], replyTo: ActorRef) = + sender() ! MaterializeWorkflowDescriptorFailureResponse(new IllegalArgumentException with MessageAggregation { + val exceptionContext = "Workflow input processing failed" + val errorMessages = errors.toList + }) - private def workflowOptionsAndPathBuilders(sourceFiles: WorkflowSourceFilesCollection): (WorkflowOptions, Future[List[PathBuilder]]) = { + private def workflowOptionsAndPathBuilders( + sourceFiles: WorkflowSourceFilesCollection + ): (WorkflowOptions, Future[List[PathBuilder]]) = { sourceFiles.workflowOptions - val pathBuilders = EngineFilesystems.pathBuildersForWorkflow(sourceFiles.workflowOptions, pathBuilderFactories)(context.system) + val pathBuilders = + EngineFilesystems.pathBuildersForWorkflow(sourceFiles.workflowOptions, pathBuilderFactories)(context.system) (sourceFiles.workflowOptions, pathBuilders) } @@ -273,7 +302,8 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, invalidateBadCacheResults: Boolean, workflowOptions: WorkflowOptions, pathBuilders: List[PathBuilder], - engineIoFunctions: EngineIoFunctions): IOChecked[EngineWorkflowDescriptor] = { + engineIoFunctions: EngineIoFunctions + ): IOChecked[EngineWorkflowDescriptor] = { def findFactory(workflowSource: WorkflowSource): ErrorOr[LanguageFactory] = { @@ -287,7 +317,10 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, factory } - def buildValidatedNamespace(factory: LanguageFactory, workflowSource: WorkflowSource, importResolvers: List[ImportResolver]): IOChecked[ValidatedWomNamespace] = { + def buildValidatedNamespace(factory: LanguageFactory, + workflowSource: WorkflowSource, + importResolvers: List[ImportResolver] + ): IOChecked[ValidatedWomNamespace] = factory.validateNamespace( sourceFiles, workflowSource, @@ -297,16 +330,16 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, engineIoFunctions, importResolvers ) - } val localFilesystemResolvers = if (importLocalFilesystem) DirectoryResolver.localFilesystemResolvers(None) else List.empty - val zippedResolverCheck: IOChecked[Option[DirectoryResolver]] = fromEither[IO](sourceFiles.importsZipFileOption match { - case None => None.validNelCheck - case Some(zipContent) => zippedImportResolver(zipContent, workflowId).toEither.map(Option.apply) - }) + val zippedResolverCheck: IOChecked[Option[DirectoryResolver]] = + fromEither[IO](sourceFiles.importsZipFileOption match { + case None => None.validNelCheck + case Some(zipContent) => zippedImportResolver(zipContent, workflowId).toEither.map(Option.apply) + }) val labels = convertJsonToLabels(sourceFiles.labelsJson) @@ -314,15 +347,25 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, _ <- publishLabelsToMetadata(id, labels.asMap, serviceRegistryActor) zippedImportResolver <- zippedResolverCheck importResolvers = zippedImportResolver.toList ++ localFilesystemResolvers :+ HttpResolver(None, Map.empty) - sourceAndResolvers <- fromEither[IO](LanguageFactoryUtil.findWorkflowSource(sourceFiles.workflowSource, sourceFiles.workflowUrl, importResolvers)) - _ = if(sourceFiles.workflowUrl.isDefined) publishWorkflowSourceToMetadata(id, sourceAndResolvers._1) + sourceAndResolvers <- fromEither[IO]( + LanguageFactoryUtil.findWorkflowSource(sourceFiles.workflowSource, sourceFiles.workflowUrl, importResolvers) + ) + _ = if (sourceFiles.workflowUrl.isDefined) publishWorkflowSourceToMetadata(id, sourceAndResolvers._1) factory <- findFactory(sourceAndResolvers._1).toIOChecked outputRuntimeExtractor <- factory.womOutputRuntimeExtractor.toValidated.toIOChecked validatedNamespace <- buildValidatedNamespace(factory, sourceAndResolvers._1, sourceAndResolvers._2) closeResult = sourceAndResolvers._2.traverse(_.cleanupIfNecessary()) _ = pushNamespaceMetadata(validatedNamespace) - ewd <- buildWorkflowDescriptor(id, validatedNamespace, workflowOptions, labels, conf, callCachingEnabled, - invalidateBadCacheResults, pathBuilders, outputRuntimeExtractor).toIOChecked + ewd <- buildWorkflowDescriptor(id, + validatedNamespace, + workflowOptions, + labels, + conf, + callCachingEnabled, + invalidateBadCacheResults, + pathBuilders, + outputRuntimeExtractor + ).toIOChecked } yield { closeResult match { case Valid(_) => () @@ -335,7 +378,10 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, } private def publishWorkflowSourceToMetadata(id: WorkflowId, workflowSource: WorkflowSource): Unit = { - val event = MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Workflow), MetadataValue(workflowSource)) + val event = MetadataEvent( + MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Workflow), + MetadataValue(workflowSource) + ) serviceRegistryActor ! PutMetadataAction(event) } @@ -347,7 +393,7 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, } private def pushLanguageToMetadata(languageName: String, languageVersion: String): Unit = { - val events = List ( + val events = List( MetadataEvent(MetadataKey(workflowId, None, WorkflowMetadataKeys.LanguageName), MetadataValue(languageName)), MetadataEvent( MetadataKey(workflowId, None, WorkflowMetadataKeys.LanguageVersionName), @@ -366,7 +412,9 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, case inputs => inputs flatMap { case (outputPort, womValue) => val inputName = outputPort.fullyQualifiedName - womValueToMetadataEvents(MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Inputs}:$inputName"), womValue) + womValueToMetadataEvents(MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Inputs}:$inputName"), + womValue + ) } } } @@ -385,28 +433,27 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, imported map { case (uri, value) => metadataEventForImportedFile(uri, value) } } - private def wfNameMetadata(name: String): MetadataEvent = { + private def wfNameMetadata(name: String): MetadataEvent = // Workflow name: MetadataEvent(MetadataKey(workflowId, None, WorkflowMetadataKeys.Name), MetadataValue(name)) - } - private def convertJsonToLabels(json: String): Labels = { + private def convertJsonToLabels(json: String): Labels = json.parseJson match { - case JsObject(inputs) => Labels(inputs.toVector.collect({ - case (key, JsString(value)) => Label(key, value) - })) + case JsObject(inputs) => + Labels(inputs.toVector.collect { case (key, JsString(value)) => + Label(key, value) + }) case _ => Labels(Vector.empty) } - } // Perform a fail-fast validation that the `use_reference_disks` workflow option is boolean if present. - private def validateUseReferenceDisks(workflowOptions: WorkflowOptions) : ErrorOr[Unit] = { + private def validateUseReferenceDisks(workflowOptions: WorkflowOptions): ErrorOr[Unit] = { val optionName = WorkflowOptions.UseReferenceDisks.name workflowOptions.getBoolean(optionName) match { case Success(_) => // If present must be boolean ().validNel - case Failure (OptionNotFoundException(_)) => + case Failure(OptionNotFoundException(_)) => // This is an optional... option, so "not found" is fine ().validNel case Failure(e) => @@ -423,29 +470,51 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, callCachingEnabled: Boolean, invalidateBadCacheResults: Boolean, pathBuilders: List[PathBuilder], - outputRuntimeExtractor: Option[WomOutputRuntimeExtractor]): ErrorOr[EngineWorkflowDescriptor] = { + outputRuntimeExtractor: Option[WomOutputRuntimeExtractor] + ): ErrorOr[EngineWorkflowDescriptor] = { val taskCalls = womNamespace.executable.graph.allNodes collect { case taskNode: CommandCallNode => taskNode } val defaultBackendName = conf.as[Option[String]]("backend.default") val failureModeValidation = validateWorkflowFailureMode(workflowOptions, conf) val backendAssignmentsValidation = validateBackendAssignments(taskCalls, workflowOptions, defaultBackendName) - val callCachingModeValidation = validateCallCachingMode(workflowOptions, callCachingEnabled, - invalidateBadCacheResults) + val callCachingModeValidation = + validateCallCachingMode(workflowOptions, callCachingEnabled, invalidateBadCacheResults) val useReferenceDisksValidation: ErrorOr[Unit] = validateUseReferenceDisks(workflowOptions) val memoryRetryMultiplierValidation: ErrorOr[Unit] = validateMemoryRetryMultiplier(workflowOptions) - (failureModeValidation, backendAssignmentsValidation, callCachingModeValidation, useReferenceDisksValidation, memoryRetryMultiplierValidation) mapN { - case (failureMode, backendAssignments, callCachingMode, _, _) => - val callable = womNamespace.executable.entryPoint - val backendDescriptor = BackendWorkflowDescriptor(id, callable, womNamespace.womValueInputs, workflowOptions, labels, hogGroup, List.empty, outputRuntimeExtractor) - EngineWorkflowDescriptor(callable, backendDescriptor, backendAssignments, failureMode, pathBuilders, callCachingMode) + (failureModeValidation, + backendAssignmentsValidation, + callCachingModeValidation, + useReferenceDisksValidation, + memoryRetryMultiplierValidation + ) mapN { case (failureMode, backendAssignments, callCachingMode, _, _) => + val callable = womNamespace.executable.entryPoint + val backendDescriptor = BackendWorkflowDescriptor(id, + callable, + womNamespace.womValueInputs, + workflowOptions, + labels, + hogGroup, + List.empty, + outputRuntimeExtractor + ) + EngineWorkflowDescriptor(callable, + backendDescriptor, + backendAssignments, + failureMode, + pathBuilders, + callCachingMode + ) } } - private def validateBackendAssignments(calls: Set[CommandCallNode], workflowOptions: WorkflowOptions, defaultBackendName: Option[String]): ErrorOr[Map[CommandCallNode, String]] = { + private def validateBackendAssignments(calls: Set[CommandCallNode], + workflowOptions: WorkflowOptions, + defaultBackendName: Option[String] + ): ErrorOr[Map[CommandCallNode, String]] = { val callToBackendMap = Try { calls map { call => val backendPriorities = Seq( @@ -456,7 +525,10 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, backendPriorities.flatten.headOption match { case Some(backendName) if cromwellBackends.isValidBackendName(backendName) => call -> backendName - case Some(backendName) => throw new Exception(s"Backend for call ${call.fullyQualifiedName} ('$backendName') not registered in configuration file") + case Some(backendName) => + throw new Exception( + s"Backend for call ${call.fullyQualifiedName} ('$backendName') not registered in configuration file" + ) case None => throw new Exception(s"No backend could be found for call ${call.fullyQualifiedName}") } } toMap @@ -464,7 +536,7 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, callToBackendMap match { case Success(backendMap) => - val backendMapAsString = backendMap.map({case (k, v) => s"${k.fullyQualifiedName} -> $v"}).mkString(", ") + val backendMapAsString = backendMap.map { case (k, v) => s"${k.fullyQualifiedName} -> $v" }.mkString(", ") workflowLogger.info(s"Call-to-Backend assignments: $backendMapAsString") backendMap.validNel case Failure(t) => t.getMessage.invalidNel @@ -476,25 +548,34 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, */ private def assignBackendUsingRuntimeAttrs(call: CommandCallNode): Option[String] = { val runtimeAttributesMap = call.callable.runtimeAttributes.attributes - runtimeAttributesMap.get(RuntimeBackendKey) map { wdlExpr => evaluateBackendNameExpression(call.fullyQualifiedName, wdlExpr) } + runtimeAttributesMap.get(RuntimeBackendKey) map { wdlExpr => + evaluateBackendNameExpression(call.fullyQualifiedName, wdlExpr) + } } - private def evaluateBackendNameExpression(callName: String, backendNameAsExp: WomExpression): String = { + private def evaluateBackendNameExpression(callName: String, backendNameAsExp: WomExpression): String = backendNameAsExp.evaluateValue(Map.empty, NoIoFunctionSet) match { case Valid(runtimeString: WomString) => runtimeString.valueString case Valid(x: WomValue) => - throw new Exception(s"Non-string values are not currently supported for backends! Cannot use backend '${x.valueString}' to backend to Call: $callName") + throw new Exception( + s"Non-string values are not currently supported for backends! Cannot use backend '${x.valueString}' to backend to Call: $callName" + ) case Invalid(errors) => // TODO WOM: need access to a "source string" for WomExpressions // TODO WOM: ErrorOrify this ? - throw AggregatedMessageException(s"Dynamic backends are not currently supported! Cannot assign backend '$backendNameAsExp' for Call: $callName", errors.toList) + throw AggregatedMessageException( + s"Dynamic backends are not currently supported! Cannot assign backend '$backendNameAsExp' for Call: $callName", + errors.toList + ) } - } - private def validateWorkflowFailureMode(workflowOptions: WorkflowOptions, conf: Config): ErrorOr[WorkflowFailureMode] = { + private def validateWorkflowFailureMode(workflowOptions: WorkflowOptions, + conf: Config + ): ErrorOr[WorkflowFailureMode] = { val modeString: Try[String] = workflowOptions.get(WorkflowOptions.WorkflowFailureMode) match { case Success(x) => Success(x) - case Failure(_: OptionNotFoundException) => Success(conf.as[Option[String]]("workflow-options.workflow-failure-mode") getOrElse DefaultWorkflowFailureMode) + case Failure(_: OptionNotFoundException) => + Success(conf.as[Option[String]]("workflow-options.workflow-failure-mode") getOrElse DefaultWorkflowFailureMode) case Failure(t) => Failure(t) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/DynamicRateLimiter.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/DynamicRateLimiter.scala index 6f1c068af5a..16b3026fc16 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/DynamicRateLimiter.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/DynamicRateLimiter.scala @@ -28,9 +28,8 @@ trait DynamicRateLimiter { this: Actor with Timers with ActorLogging => timers.startPeriodicTimer(ResetKey, ResetAction, dispensingRate.per) } - private def releaseTokens() = { + private def releaseTokens() = self ! TokensAvailable(dispensingRate.n) - } // When load is high, freeze token distribution private def highLoad(doLogging: Boolean = true) = { diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala index e97ae9974f4..e201d0917ba 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala @@ -29,23 +29,25 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, override val dispensingRate: DynamicRateLimiter.Rate, logInterval: Option[FiniteDuration], dispenserType: String, - tokenAllocatedDescription: String) - extends Actor + tokenAllocatedDescription: String +) extends Actor with ActorLogging with JobInstrumentation with CromwellInstrumentationScheduler with Timers with DynamicRateLimiter - with CromwellInstrumentation -{ + with CromwellInstrumentation { // Metrics paths are based on the dispenser type private val tokenDispenserMetricsBasePath: NonEmptyList[String] = NonEmptyList.of("token_dispenser", dispenserType) - private val tokenLeaseDurationMetricPath: NonEmptyList[String] = tokenDispenserMetricsBasePath :+ "token_hold_duration" + private val tokenLeaseDurationMetricPath: NonEmptyList[String] = + tokenDispenserMetricsBasePath :+ "token_hold_duration" - private val tokenDispenserMetricsActivityRates: NonEmptyList[String] = tokenDispenserMetricsBasePath :+ "activity_rate" - private val requestsEnqueuedMetricPath: NonEmptyList[String] = tokenDispenserMetricsActivityRates :+ "requests_enqueued" + private val tokenDispenserMetricsActivityRates: NonEmptyList[String] = + tokenDispenserMetricsBasePath :+ "activity_rate" + private val requestsEnqueuedMetricPath: NonEmptyList[String] = + tokenDispenserMetricsActivityRates :+ "requests_enqueued" private val tokensLeasedMetricPath: NonEmptyList[String] = tokenDispenserMetricsActivityRates :+ "tokens_dispensed" private val tokensReturnedMetricPath: NonEmptyList[String] = tokenDispenserMetricsActivityRates :+ "tokens_returned" @@ -90,7 +92,8 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, super.preStart() } - override def receive: Actor.Receive = tokenDispensingReceive.orElse(rateReceive).orElse(instrumentationReceive(instrumentationAction)) + override def receive: Actor.Receive = + tokenDispensingReceive.orElse(rateReceive).orElse(instrumentationReceive(instrumentationAction)) private def tokenDispensingReceive: Receive = { case JobTokenRequest(hogGroup, tokenType) => enqueue(sender(), hogGroup.value, tokenType) @@ -111,7 +114,7 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, count(tokensReturnedMetricPath, 0L, ServicesPrefix) } - private def enqueue(sndr: ActorRef, hogGroup: String, tokenType: JobTokenType): Unit = { + private def enqueue(sndr: ActorRef, hogGroup: String, tokenType: JobTokenType): Unit = if (tokenAssignments.contains(sndr)) { sndr ! JobTokenDispensed } else { @@ -121,12 +124,12 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, increment(requestsEnqueuedMetricPath, ServicesPrefix) () } - } private def dispense(n: Int) = if (tokenQueues.nonEmpty) { // Sort by backend name to avoid re-ordering across iterations: - val iterator = new RoundRobinQueueIterator(tokenQueues.toList.sortBy(_._1.backend).map(_._2), currentTokenQueuePointer) + val iterator = + new RoundRobinQueueIterator(tokenQueues.toList.sortBy(_._1.backend).map(_._2), currentTokenQueuePointer) // In rare cases, an abort might empty an inner queue between "available" and "dequeue", which could cause an // exception. @@ -139,11 +142,12 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, } if (nextTokens.nonEmpty) { - val hogGroupCounts = nextTokens.groupBy(t => t.queuePlaceholder.hogGroup).map { case (hogGroup, list) => s"$hogGroup: ${list.size}" } + val hogGroupCounts = + nextTokens.groupBy(t => t.queuePlaceholder.hogGroup).map { case (hogGroup, list) => s"$hogGroup: ${list.size}" } log.info(s"Assigned new job $dispenserType tokens to the following groups: ${hogGroupCounts.mkString(", ")}") } - nextTokens.foreach({ + nextTokens.foreach { case LeasedActor(queuePlaceholder, lease) if !tokenAssignments.contains(queuePlaceholder.actor) => tokenAssignments = tokenAssignments + (queuePlaceholder.actor -> TokenLeaseRecord(lease, OffsetDateTime.now())) incrementJob("Started") @@ -151,19 +155,21 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, queuePlaceholder.actor ! JobTokenDispensed // Only one token per actor, so if you've already got one, we don't need to use this new one: case LeasedActor(queuePlaceholder, lease) => - log.error(s"Programmer Error: Actor ${queuePlaceholder.actor.path} requested a job $dispenserType token more than once.") + log.error( + s"Programmer Error: Actor ${queuePlaceholder.actor.path} requested a job $dispenserType token more than once." + ) // Because this actor already has a lease assigned to it: // a) tell the actor that it has a lease // b) don't hold onto this new lease - release it and let some other actor take it instead queuePlaceholder.actor ! JobTokenDispensed lease.release() - }) + } tokenQueues = iterator.updatedQueues.map(queue => queue.tokenType -> queue).toMap currentTokenQueuePointer = iterator.updatedPointer } - private def release(actor: ActorRef): Unit = { + private def release(actor: ActorRef): Unit = tokenAssignments.get(actor) match { case Some(TokenLeaseRecord(leasedToken, timestamp)) => tokenAssignments -= actor @@ -175,7 +181,6 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, case None => log.error(s"Job {} token returned from incorrect actor: {}", dispenserType, actor.path.name) } - } private def onTerminate(terminee: ActorRef): Unit = { tokenAssignments.get(terminee) match { @@ -185,8 +190,8 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, case None => log.debug("Actor {} stopped before receiving a token, removing it from any queues if necessary", terminee) // This is a very inefficient way to remove the actor from the queue and can lead to very poor performance for a large queue and a large number of actors to remove - tokenQueues = tokenQueues map { - case (tokenType, tokenQueue) => tokenType -> tokenQueue.removeTokenlessActor(terminee) + tokenQueues = tokenQueues map { case (tokenType, tokenQueue) => + tokenType -> tokenQueue.removeTokenlessActor(terminee) } } context.unwatch(terminee) @@ -214,7 +219,9 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, } // Schedule the next log event: - context.system.scheduler.scheduleOnce(someInterval) { self ! LogJobTokenAllocation(someInterval) }(context.dispatcher) + context.system.scheduler.scheduleOnce(someInterval)(self ! LogJobTokenAllocation(someInterval))( + context.dispatcher + ) () } @@ -225,19 +232,23 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, so it's desirable that a group submitting to two or more backends pause workflow pickup globally when it exhausts tokens in one of the backends */ - private def tokenExhaustedGroups: ReplyLimitedGroups = { + private def tokenExhaustedGroups: ReplyLimitedGroups = ReplyLimitedGroups( tokenQueues.values.flatMap(_.eventLogger.tokenExhaustedGroups).toSet ) - } } object JobTokenDispenserActor { case object TokensTimerKey - def props(serviceRegistryActor: ActorRef, rate: DynamicRateLimiter.Rate, logInterval: Option[FiniteDuration], - dispenserType: String, tokenAllocatedDescription: String): Props = - Props(new JobTokenDispenserActor(serviceRegistryActor, rate, logInterval, dispenserType, tokenAllocatedDescription)).withDispatcher(EngineDispatcher) + def props(serviceRegistryActor: ActorRef, + rate: DynamicRateLimiter.Rate, + logInterval: Option[FiniteDuration], + dispenserType: String, + tokenAllocatedDescription: String + ): Props = + Props(new JobTokenDispenserActor(serviceRegistryActor, rate, logInterval, dispenserType, tokenAllocatedDescription)) + .withDispatcher(EngineDispatcher) case class JobTokenRequest(hogGroup: HogGroup, jobTokenType: JobTokenType) @@ -250,7 +261,11 @@ object JobTokenDispenserActor { implicit val tokenEncoder = deriveEncoder[JobTokenType] @JsonCodec(encodeOnly = true) - final case class TokenDispenserState(dispenserType: String, tokenTypes: Vector[TokenTypeState], pointer: Int, leased: Int) + final case class TokenDispenserState(dispenserType: String, + tokenTypes: Vector[TokenTypeState], + pointer: Int, + leased: Int + ) @JsonCodec(encodeOnly = true) final case class TokenTypeState(tokenType: JobTokenType, queue: TokenQueueState) diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala index 36e555be98b..472ca190912 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala @@ -7,7 +7,8 @@ import cromwell.engine.workflow.tokens.TokenQueue.{DequeueResult, LeasedActor} * It will keep rotating the list until it finds a queue with an element that can be dequeued. * If no queue can be dequeued, the iterator is empty. */ -final class RoundRobinQueueIterator(initialTokenQueue: List[TokenQueue], initialPointer: Int) extends Iterator[LeasedActor] { +final class RoundRobinQueueIterator(initialTokenQueue: List[TokenQueue], initialPointer: Int) + extends Iterator[LeasedActor] { // Assumes the number of queues won't change during iteration (it really shouldn't !) private val numberOfQueues = initialTokenQueue.size // Indicate the index of next queue to try to dequeue from. @@ -45,14 +46,14 @@ final class RoundRobinQueueIterator(initialTokenQueue: List[TokenQueue], initial val indexStream = ((pointer until numberOfQueues) ++ (0 until pointer)).to(LazyList) val dequeuedTokenStream = indexStream.map(index => tokenQueues(index).dequeue -> index) - val firstLeasedActor = dequeuedTokenStream.collectFirst({ + val firstLeasedActor = dequeuedTokenStream.collectFirst { case (DequeueResult(Some(dequeuedActor), newTokenQueue), index) => // Update the tokenQueues with the new queue tokenQueues = tokenQueues.updated(index, newTokenQueue) // Update the index. Add 1 to force trying all the queues as we call next, even if the first one is available pointer = (index + 1) % numberOfQueues dequeuedActor - }) + } firstLeasedActor } diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala index 7ee35f86cd5..e61c17c1949 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala @@ -22,29 +22,28 @@ case object NullTokenEventLogger extends TokenEventLogger { class CachingTokenEventLogger(cacheEntryTTL: FiniteDuration) extends TokenEventLogger { - private val groupCache = CacheBuilder.newBuilder() + private val groupCache = CacheBuilder + .newBuilder() .expireAfterWrite(cacheEntryTTL._1, cacheEntryTTL._2) .maximumSize(10000) .build[String, Object]() - override def flagTokenHog(hogGroup: String): Unit = { + override def flagTokenHog(hogGroup: String): Unit = groupCache.put(hogGroup, new Object()) - } override def tokenExhaustedGroups: Set[String] = { import scala.jdk.CollectionConverters._ groupCache.asMap().keySet().asScala.toSet } - - private val backendCache = CacheBuilder.newBuilder() + private val backendCache = CacheBuilder + .newBuilder() .expireAfterWrite(cacheEntryTTL._1, cacheEntryTTL._2) .maximumSize(10000) .build[String, Object]() - override def outOfTokens(backend: String): Unit = { + override def outOfTokens(backend: String): Unit = backendCache.put(backend, new Object()) - } override def tokenExhaustedBackends: Set[String] = { import scala.jdk.CollectionConverters._ diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenQueue.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenQueue.scala index af31384f415..c16ae9dad91 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenQueue.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenQueue.scala @@ -18,7 +18,8 @@ import scala.collection.immutable.Queue final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], queueOrder: Vector[String], eventLogger: TokenEventLogger, - private [tokens] val pool: UnhoggableTokenPool) extends StrictLogging { + private[tokens] val pool: UnhoggableTokenPool +) extends StrictLogging { val tokenType = pool.tokenType /** @@ -31,7 +32,7 @@ final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], * * @return the new token queue */ - def enqueue(placeholder: TokenQueuePlaceholder): TokenQueue = { + def enqueue(placeholder: TokenQueuePlaceholder): TokenQueue = if (queues.contains(placeholder.hogGroup)) { this.copy( queues = queues + (placeholder.hogGroup -> queues(placeholder.hogGroup).enqueue(placeholder)) @@ -42,7 +43,6 @@ final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], queueOrder = queueOrder :+ placeholder.hogGroup ) } - } /** * Returns a dequeue'd actor if one exists and there's a token available for it @@ -57,7 +57,10 @@ final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], recursingDequeue(guaranteedNonEmptyQueues, Vector.empty, queueOrder) } - private def recursingDequeue(queues: Map[String, Queue[TokenQueuePlaceholder]], queuesTried: Vector[String], queuesRemaining: Vector[String]): DequeueResult = { + private def recursingDequeue(queues: Map[String, Queue[TokenQueuePlaceholder]], + queuesTried: Vector[String], + queuesRemaining: Vector[String] + ): DequeueResult = if (queuesRemaining.isEmpty) { DequeueResult(None, this) } else { @@ -69,7 +72,9 @@ final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], if (oldQueue.isEmpty) { // We should have caught this above. But just in case: - logger.warn(s"Programmer error: Empty token queue value still present in TokenQueue: $hogGroup *and* made it through into recursiveDequeue(!): $hogGroup") + logger.warn( + s"Programmer error: Empty token queue value still present in TokenQueue: $hogGroup *and* made it through into recursiveDequeue(!): $hogGroup" + ) recursingDequeue(queues, queuesTried :+ hogGroup, remainingHogGroups) } else { leaseTry match { @@ -80,7 +85,9 @@ final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], } else { (queues + (hogGroup -> newQueue), remainingHogGroups ++ queuesTried :+ hogGroup) } - DequeueResult(Option(LeasedActor(placeholder, thl)), TokenQueue(newQueues, newQueueOrder, eventLogger, pool)) + DequeueResult(Option(LeasedActor(placeholder, thl)), + TokenQueue(newQueues, newQueueOrder, eventLogger, pool) + ) case TokenTypeExhausted => // The pool is completely full right now, so there's no benefit trying the other hog groups: eventLogger.outOfTokens(tokenType.backend) @@ -91,7 +98,6 @@ final case class TokenQueue(queues: Map[String, Queue[TokenQueuePlaceholder]], } } } - } def removeTokenlessActor(actor: ActorRef): TokenQueue = { val actorRemovedQueues = queues.map { case (hogGroup, queue) => @@ -139,7 +145,8 @@ object TokenQueue { case class LeasedActor(queuePlaceholder: TokenQueuePlaceholder, lease: Lease[JobToken]) { def actor: ActorRef = queuePlaceholder.actor } - def apply(tokenType: JobTokenType, logger: TokenEventLogger) = new TokenQueue(Map.empty, Vector.empty, logger, new UnhoggableTokenPool(tokenType)) + def apply(tokenType: JobTokenType, logger: TokenEventLogger) = + new TokenQueue(Map.empty, Vector.empty, logger, new UnhoggableTokenPool(tokenType)) final case class TokenQueuePlaceholder(actor: ActorRef, hogGroup: String) @JsonCodec diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala index 761748abb49..e4c3885b60c 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala @@ -12,13 +12,15 @@ import io.github.andrebeat.pool._ import scala.collection.immutable.HashSet import scala.collection.mutable -final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[JobToken]( - capacity = tokenType.maxPoolSize.getOrElse(UnhoggableTokenPool.MaxCapacity), - referenceType = ReferenceType.Strong, - _factory = () => JobToken(tokenType, UUID.randomUUID()), - _reset = Function.const(()), - _dispose = Function.const(()), - _healthCheck = Function.const(true)) { +final class UnhoggableTokenPool(val tokenType: JobTokenType) + extends SimplePool[JobToken]( + capacity = tokenType.maxPoolSize.getOrElse(UnhoggableTokenPool.MaxCapacity), + referenceType = ReferenceType.Strong, + _factory = () => JobToken(tokenType, UUID.randomUUID()), + _reset = Function.const(()), + _dispose = Function.const(()), + _healthCheck = Function.const(true) + ) { lazy val hogLimitOption: Option[Int] = tokenType match { case JobTokenType(_, Some(limit), hogFactor) if hogFactor > 1 => @@ -28,10 +30,11 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ private[this] val hogGroupAssignments: mutable.Map[String, HashSet[JobToken]] = mutable.Map.empty - override def tryAcquire(): Option[Lease[JobToken]] = throw new UnsupportedOperationException("Use tryAcquire(hogGroup)") - - def available(hogGroup: String): UnhoggableTokenPoolAvailability = { + override def tryAcquire(): Option[Lease[JobToken]] = throw new UnsupportedOperationException( + "Use tryAcquire(hogGroup)" + ) + def available(hogGroup: String): UnhoggableTokenPoolAvailability = hogLimitOption match { case None if leased() < capacity => TokensAvailable case None => TokenTypeExhausted @@ -46,10 +49,8 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ } } else TokenTypeExhausted } - } - - def tryAcquire(hogGroup: String): UnhoggableTokenPoolResult = { + def tryAcquire(hogGroup: String): UnhoggableTokenPoolResult = hogLimitOption match { case Some(hogLimit) => synchronized { @@ -75,9 +76,8 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ case None => TokenTypeExhausted } } - } - def unhog(hogGroup: String, lease: Lease[JobToken]): Unit = { + def unhog(hogGroup: String, lease: Lease[JobToken]): Unit = hogLimitOption foreach { _ => synchronized { val newAssignment = hogGroupAssignments.getOrElse(hogGroup, HashSet.empty) - lease.get() @@ -89,15 +89,15 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ } } } - } def poolState: TokenPoolState = { val (hogGroupUsages, hogLimitValue): (Option[Set[HogGroupState]], Option[Int]) = hogLimitOption match { case Some(hogLimit) => synchronized { - val entries: Set[HogGroupState] = hogGroupAssignments.toSet[(String, HashSet[JobToken])].map { case (hogGroup, set) => - HogGroupState(hogGroup, set.size, !hogGroupAssignments.get(hogGroup).forall(_.size < hogLimit)) - } + val entries: Set[HogGroupState] = + hogGroupAssignments.toSet[(String, HashSet[JobToken])].map { case (hogGroup, set) => + HogGroupState(hogGroup, set.size, !hogGroupAssignments.get(hogGroup).forall(_.size < hogLimit)) + } (Option(entries), Option(hogLimit)) } case None => (None, None) @@ -113,7 +113,9 @@ object UnhoggableTokenPool { sealed trait UnhoggableTokenPoolResult - final class TokenHoggingLease(lease: Lease[JobToken], hogGroup: String, pool: UnhoggableTokenPool) extends Lease[JobToken] with UnhoggableTokenPoolResult { + final class TokenHoggingLease(lease: Lease[JobToken], hogGroup: String, pool: UnhoggableTokenPool) + extends Lease[JobToken] + with UnhoggableTokenPoolResult { private[this] val dirty = new AtomicBoolean(false) override protected[this] def a: JobToken = lease.get() @@ -153,6 +155,11 @@ object UnhoggableTokenPool { final case class HogGroupState(hogGroup: String, used: Int, atLimit: Boolean) @JsonCodec - final case class TokenPoolState(hogGroups: Option[Set[HogGroupState]], hogLimit: Option[Int], capacity: Int, leased: Int, available: Boolean) + final case class TokenPoolState(hogGroups: Option[Set[HogGroupState]], + hogLimit: Option[Int], + capacity: Int, + leased: Int, + available: Boolean + ) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala index 1e565886941..6e8a8f96ea3 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala @@ -5,19 +5,26 @@ import com.google.common.cache.CacheBuilder import cromwell.core.{CacheConfig, WorkflowId} import cromwell.engine.workflow.WorkflowManagerActor import cromwell.engine.workflow.workflowstore.AbortRequestScanningActor.{AbortConfig, RunScan} -import cromwell.engine.workflow.workflowstore.WorkflowStoreActor.{FindWorkflowsWithAbortRequested, FindWorkflowsWithAbortRequestedFailure, FindWorkflowsWithAbortRequestedSuccess} +import cromwell.engine.workflow.workflowstore.WorkflowStoreActor.{ + FindWorkflowsWithAbortRequested, + FindWorkflowsWithAbortRequestedFailure, + FindWorkflowsWithAbortRequestedSuccess +} import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ - class AbortRequestScanningActor(workflowStoreActor: ActorRef, workflowManagerActor: ActorRef, abortConfig: AbortConfig, - workflowHeartbeatConfig: WorkflowHeartbeatConfig) extends Actor with Timers with ActorLogging { + workflowHeartbeatConfig: WorkflowHeartbeatConfig +) extends Actor + with Timers + with ActorLogging { private val cache = { val cacheConfig = abortConfig.cacheConfig - CacheBuilder.newBuilder() + CacheBuilder + .newBuilder() .concurrencyLevel(cacheConfig.concurrency) .expireAfterWrite(cacheConfig.ttl.length, cacheConfig.ttl.unit) .build[WorkflowId, java.lang.Boolean]() @@ -52,6 +59,10 @@ object AbortRequestScanningActor { case object RunScan - def props(workflowStoreActor: ActorRef, workflowManagerActor: ActorRef, abortConfig: AbortConfig, workflowHeartbeatConfig: WorkflowHeartbeatConfig): Props = + def props(workflowStoreActor: ActorRef, + workflowManagerActor: ActorRef, + abortConfig: AbortConfig, + workflowHeartbeatConfig: WorkflowHeartbeatConfig + ): Props = Props(new AbortRequestScanningActor(workflowStoreActor, workflowManagerActor, abortConfig, workflowHeartbeatConfig)) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemorySubWorkflowStore.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemorySubWorkflowStore.scala index c17a38b36fc..16557207b1b 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemorySubWorkflowStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemorySubWorkflowStore.scala @@ -14,7 +14,8 @@ class InMemorySubWorkflowStore(workflowStore: InMemoryWorkflowStore) extends Sub callFullyQualifiedName: String, jobIndex: Int, jobAttempt: Int, - subWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Unit] = { + subWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Unit] = if (workflowStore.workflowStore.exists { case (wf, _) => wf.id.toString == rootWorkflowExecutionUuid }) { subWorkflowStore = subWorkflowStore + SubWorkflowStoreEntry( @@ -27,24 +28,24 @@ class InMemorySubWorkflowStore(workflowStore: InMemoryWorkflowStore) extends Sub ) Future.successful(()) } else Future.failed(new Throwable(s"No such root workflow: $rootWorkflowExecutionUuid")) - } override def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, - jobAttempt: Int)(implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] = { + jobAttempt: Int + )(implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] = Future.successful( - subWorkflowStore.find( - k => - k.parentWorkflowExecutionUuid == parentWorkflowExecutionUuid && - k.callFullyQualifiedName == callFqn && - k.callIndex == jobIndex && - k.callAttempt == jobAttempt + subWorkflowStore.find(k => + k.parentWorkflowExecutionUuid == parentWorkflowExecutionUuid && + k.callFullyQualifiedName == callFqn && + k.callIndex == jobIndex && + k.callAttempt == jobAttempt ) ) - } - override def removeSubWorkflowStoreEntries(parentWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Int] = { + override def removeSubWorkflowStoreEntries( + parentWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Int] = { val toRemove = subWorkflowStore.filter(k => k.parentWorkflowExecutionUuid == parentWorkflowExecutionUuid) subWorkflowStore = subWorkflowStore -- toRemove Future.successful(toRemove.size) diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemoryWorkflowStore.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemoryWorkflowStore.scala index b6913f6172c..ebce8c92720 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemoryWorkflowStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/InMemoryWorkflowStore.scala @@ -5,7 +5,11 @@ import cats.data.NonEmptyList import cromwell.core.{HogGroup, WorkflowId, WorkflowSourceFilesCollection} import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreAbortResponse.WorkflowStoreAbortResponse import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreState.WorkflowStoreState -import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.{WorkflowStoreAbortResponse, WorkflowStoreState, WorkflowSubmissionResponse} +import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.{ + WorkflowStoreAbortResponse, + WorkflowStoreState, + WorkflowSubmissionResponse +} import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ExecutionContext, Future} @@ -18,12 +22,15 @@ class InMemoryWorkflowStore extends WorkflowStore { * Adds the requested WorkflowSourceFiles to the store and returns a WorkflowId for each one (in order) * for tracking purposes. */ - override def add(sources: NonEmptyList[WorkflowSourceFilesCollection])(implicit ec: ExecutionContext): Future[NonEmptyList[WorkflowSubmissionResponse]] = { - val actualWorkflowState = if (sources.head.workflowOnHold) WorkflowStoreState.OnHold else WorkflowStoreState.Submitted + override def add( + sources: NonEmptyList[WorkflowSourceFilesCollection] + )(implicit ec: ExecutionContext): Future[NonEmptyList[WorkflowSubmissionResponse]] = { + val actualWorkflowState = + if (sources.head.workflowOnHold) WorkflowStoreState.OnHold else WorkflowStoreState.Submitted val addedWorkflows = sources map { WorkflowIdAndSources(WorkflowId.randomId(), _) -> actualWorkflowState } workflowStore ++= addedWorkflows.toList.toMap - Future.successful(addedWorkflows map { - case (WorkflowIdAndSources(id, _), _) => WorkflowSubmissionResponse(actualWorkflowState, id) + Future.successful(addedWorkflows map { case (WorkflowIdAndSources(id, _), _) => + WorkflowSubmissionResponse(actualWorkflowState, id) }) } @@ -31,9 +38,15 @@ class InMemoryWorkflowStore extends WorkflowStore { * Retrieves up to n workflows which have not already been pulled into the engine and sets their pickedUp * flag to true */ - override def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String])(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = { + override def fetchStartableWorkflows(n: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = { if (excludedGroups.nonEmpty) - throw new UnsupportedOperationException("Programmer Error: group filtering not supported for single-tenant/in-memory workflow store") + throw new UnsupportedOperationException( + "Programmer Error: group filtering not supported for single-tenant/in-memory workflow store" + ) val startableWorkflows = workflowStore filter { _._2 == WorkflowStoreState.Submitted } take n val updatedWorkflows = startableWorkflows map { _._1 -> WorkflowStoreState.Running } @@ -51,17 +64,18 @@ class InMemoryWorkflowStore extends WorkflowStore { override def initialize(implicit ec: ExecutionContext): Future[Unit] = Future.successful(()) - override def stats(implicit ec: ExecutionContext): Future[Map[WorkflowStoreState, Int]] = Future.successful(Map(WorkflowStoreState.Submitted -> workflowStore.size)) + override def stats(implicit ec: ExecutionContext): Future[Map[WorkflowStoreState, Int]] = + Future.successful(Map(WorkflowStoreState.Submitted -> workflowStore.size)) override def abortAllRunning()(implicit ec: ExecutionContext): Future[Unit] = { - workflowStore = workflowStore.map({ + workflowStore = workflowStore.map { case (workflow, WorkflowStoreState.Running) => workflow -> WorkflowStoreState.Aborting case (workflow, state) => workflow -> state - }) + } Future.successful(()) } - override def abort(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = { + override def abort(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = workflowStore collectFirst { case (workflowIdAndSources, workflowStoreState) if workflowIdAndSources.id == id => (workflowIdAndSources, workflowStoreState) @@ -76,21 +90,24 @@ class InMemoryWorkflowStore extends WorkflowStore { case None => Future.successful(WorkflowStoreAbortResponse.NotFound) } - } override def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit ec: ExecutionContext): Future[Int] = { + heartbeatDateTime: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Int] = Future.successful(workflowIds.size) - } - override def switchOnHoldToSubmitted(id: WorkflowId)(implicit ec: ExecutionContext): Future[Unit] = Future.successful(()) + override def switchOnHoldToSubmitted(id: WorkflowId)(implicit ec: ExecutionContext): Future[Unit] = + Future.successful(()) - override def findWorkflowsWithAbortRequested(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[WorkflowId]] = Future.successful(List.empty) + override def findWorkflowsWithAbortRequested(cromwellId: String)(implicit + ec: ExecutionContext + ): Future[Iterable[WorkflowId]] = Future.successful(List.empty) - override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[WorkflowId]] = Future.successful(workflowStore.keys.map(_.id)) + override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[WorkflowId]] = + Future.successful(workflowStore.keys.map(_.id)) - override def deleteFromStore(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Int] = Future.successful(0) + override def deleteFromStore(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Int] = + Future.successful(0) } final case class WorkflowIdAndSources(id: WorkflowId, sources: WorkflowSourceFilesCollection) diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStore.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStore.scala index b36cb008933..2243ff32573 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStore.scala @@ -13,7 +13,14 @@ import cromwell.database.sql.{MetadataSqlDatabase, WorkflowStoreSqlDatabase} import cromwell.database.sql.tables.WorkflowStoreEntry import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreAbortResponse.WorkflowStoreAbortResponse import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreState.WorkflowStoreState -import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.{DuplicateWorkflowIdsRequested, NotInOnHoldStateException, WorkflowIdsAlreadyInUseException, WorkflowStoreAbortResponse, WorkflowStoreState, WorkflowSubmissionResponse} +import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.{ + DuplicateWorkflowIdsRequested, + NotInOnHoldStateException, + WorkflowIdsAlreadyInUseException, + WorkflowStoreAbortResponse, + WorkflowStoreState, + WorkflowSubmissionResponse +} import eu.timepit.refined.api.Refined import eu.timepit.refined.collection._ @@ -23,17 +30,17 @@ import scala.concurrent.{ExecutionContext, Future} object SqlWorkflowStore { case class WorkflowSubmissionResponse(state: WorkflowStoreState, id: WorkflowId) - case class DuplicateWorkflowIdsRequested(workflowIds: Seq[WorkflowId]) extends - Exception (s"Requested workflow IDs are duplicated: ${workflowIds.mkString(", ")}") + case class DuplicateWorkflowIdsRequested(workflowIds: Seq[WorkflowId]) + extends Exception(s"Requested workflow IDs are duplicated: ${workflowIds.mkString(", ")}") - case class WorkflowIdsAlreadyInUseException(workflowIds: Seq[WorkflowId]) extends - Exception (s"Requested workflow IDs are already in use: ${workflowIds.mkString(", ")}") + case class WorkflowIdsAlreadyInUseException(workflowIds: Seq[WorkflowId]) + extends Exception(s"Requested workflow IDs are already in use: ${workflowIds.mkString(", ")}") - case class NotInOnHoldStateException(workflowId: WorkflowId) extends - Exception( - s"Couldn't change status of workflow $workflowId to " + - "'Submitted' because the workflow is not in 'On Hold' state" - ) + case class NotInOnHoldStateException(workflowId: WorkflowId) + extends Exception( + s"Couldn't change status of workflow $workflowId to " + + "'Submitted' because the workflow is not in 'On Hold' state" + ) object WorkflowStoreState extends Enumeration { type WorkflowStoreState = Value @@ -51,12 +58,14 @@ object SqlWorkflowStore { } } -case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDatabase: MetadataSqlDatabase) extends WorkflowStore { +case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDatabase: MetadataSqlDatabase) + extends WorkflowStore { + /** This is currently hardcoded to success but used to do stuff, left in place for now as a useful * startup initialization hook. */ override def initialize(implicit ec: ExecutionContext): Future[Unit] = Future.successful(()) - override def abort(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = { + override def abort(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = sqlDatabase.deleteOrUpdateWorkflowToState( workflowExecutionUuid = id.toString, workflowStateToDelete1 = WorkflowStoreState.OnHold.toString, @@ -70,33 +79,34 @@ case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDa case None => WorkflowStoreAbortResponse.NotFound } - } - override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[WorkflowId]] = { + override def findWorkflows(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[WorkflowId]] = sqlDatabase.findWorkflows(cromwellId) map { _ map WorkflowId.fromString } - } - override def findWorkflowsWithAbortRequested(cromwellId: String)(implicit ec: ExecutionContext): Future[Iterable[WorkflowId]] = { + override def findWorkflowsWithAbortRequested(cromwellId: String)(implicit + ec: ExecutionContext + ): Future[Iterable[WorkflowId]] = sqlDatabase.findWorkflowsWithAbortRequested(cromwellId) map { _ map WorkflowId.fromString } - } - override def abortAllRunning()(implicit ec: ExecutionContext): Future[Unit] = { + override def abortAllRunning()(implicit ec: ExecutionContext): Future[Unit] = sqlDatabase.setStateToState(WorkflowStoreState.Running.toString, WorkflowStoreState.Aborting.toString) - } - override def stats(implicit ec: ExecutionContext): Future[Map[WorkflowStoreState, Int]] = { + override def stats(implicit ec: ExecutionContext): Future[Map[WorkflowStoreState, Int]] = sqlDatabase.workflowStateCounts.map { - _ map { - case (key, value) => WorkflowStoreState.withName(key) -> value + _ map { case (key, value) => + WorkflowStoreState.withName(key) -> value } } - } /** * Retrieves up to n workflows which have not already been pulled into the engine and sets their pickedUp * flag to true */ - override def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String])(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = { + override def fetchStartableWorkflows(n: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = { import cats.syntax.traverse._ import common.validation.Validation._ sqlDatabase.fetchWorkflowsInState( @@ -115,38 +125,44 @@ case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDa } override def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit ec: ExecutionContext): Future[Int] = { - val sortedWorkflowIds = workflowIds.toList sortBy(_._2) map (_._1.toString) + heartbeatDateTime: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Int] = { + val sortedWorkflowIds = workflowIds.toList sortBy (_._2) map (_._1.toString) sqlDatabase.writeWorkflowHeartbeats(sortedWorkflowIds, heartbeatDateTime.toSystemTimestamp) } - def workflowAlreadyExists(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Boolean] = { - Future.sequence(Seq( - sqlDatabase.checkWhetherWorkflowExists(workflowId.id.toString), - metadataSqlDatabase.getWorkflowStatus(workflowId.id.toString).map(_.nonEmpty) - )).map(_.exists(_ == true)) - } - - def findPreexistingWorkflowIds(workflowIds: Seq[WorkflowId])(implicit ec: ExecutionContext): Future[Seq[WorkflowId]] = { - Future.sequence(workflowIds.map(wfid => { - workflowAlreadyExists(wfid).map { - case true => Option(wfid) - case false => None - } - })).map { _.collect { case Some(existingId) => existingId } } - } + def workflowAlreadyExists(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Boolean] = + Future + .sequence( + Seq( + sqlDatabase.checkWhetherWorkflowExists(workflowId.id.toString), + metadataSqlDatabase.getWorkflowStatus(workflowId.id.toString).map(_.nonEmpty) + ) + ) + .map(_.exists(_ == true)) + + def findPreexistingWorkflowIds(workflowIds: Seq[WorkflowId])(implicit ec: ExecutionContext): Future[Seq[WorkflowId]] = + Future + .sequence(workflowIds.map { wfid => + workflowAlreadyExists(wfid).map { + case true => Option(wfid) + case false => None + } + }) + .map(_.collect { case Some(existingId) => existingId }) /** * Adds the requested WorkflowSourceFiles to the store and returns a WorkflowId for each one (in order) * for tracking purposes. */ - override def add(sources: NonEmptyList[WorkflowSourceFilesCollection])(implicit ec: ExecutionContext): Future[NonEmptyList[WorkflowSubmissionResponse]] = { + override def add( + sources: NonEmptyList[WorkflowSourceFilesCollection] + )(implicit ec: ExecutionContext): Future[NonEmptyList[WorkflowSubmissionResponse]] = { val requestedWorkflowIds = sources.map(_.requestedWorkflowId).collect { case Some(id) => id } val duplicatedIds = requestedWorkflowIds.diff(requestedWorkflowIds.toSet.toSeq) - if(duplicatedIds.nonEmpty) { + if (duplicatedIds.nonEmpty) { Future.failed(DuplicateWorkflowIdsRequested(duplicatedIds)) } else { findPreexistingWorkflowIds(requestedWorkflowIds) flatMap { preexistingIds => @@ -168,7 +184,7 @@ case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDa } } - override def switchOnHoldToSubmitted(id: WorkflowId)(implicit ec: ExecutionContext): Future[Unit] = { + override def switchOnHoldToSubmitted(id: WorkflowId)(implicit ec: ExecutionContext): Future[Unit] = for { updated <- sqlDatabase.updateWorkflowState( id.toString, @@ -177,19 +193,17 @@ case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDa ) _ <- if (updated == 0) Future.failed(NotInOnHoldStateException(id)) else Future.successful(()) } yield () - } - override def deleteFromStore(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Int] = { + override def deleteFromStore(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Int] = sqlDatabase.removeWorkflowStoreEntry(workflowId.toString) - } private def fromWorkflowStoreEntry(workflowStoreEntry: WorkflowStoreEntry): ErrorOr[WorkflowToStart] = { - val workflowOptionsValidation: ErrorOr[WorkflowOptions] = WorkflowOptions.fromJsonString(workflowStoreEntry.workflowOptions.toRawString).toErrorOr + val workflowOptionsValidation: ErrorOr[WorkflowOptions] = + WorkflowOptions.fromJsonString(workflowStoreEntry.workflowOptions.toRawString).toErrorOr val startableStateValidation = workflowStoreStateToStartableState(workflowStoreEntry) (startableStateValidation, workflowOptionsValidation) mapN { (startableState, workflowOptions) => - val id = WorkflowId.fromString(workflowStoreEntry.workflowExecutionUuid) val sources = WorkflowSourceFilesCollection( @@ -207,7 +221,8 @@ case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDa requestedWorkflowId = Option(id) ) - val hogGroup: HogGroup = workflowStoreEntry.hogGroup.map(HogGroup(_)).getOrElse(HogGroup.decide(workflowOptions, id)) + val hogGroup: HogGroup = + workflowStoreEntry.hogGroup.map(HogGroup(_)).getOrElse(HogGroup.decide(workflowOptions, id)) WorkflowToStart( id = id, @@ -219,16 +234,15 @@ case class SqlWorkflowStore(sqlDatabase: WorkflowStoreSqlDatabase, metadataSqlDa } } - private def workflowSubmissionState(workflowSourceFiles: WorkflowSourceFilesCollection) = { + private def workflowSubmissionState(workflowSourceFiles: WorkflowSourceFilesCollection) = if (workflowSourceFiles.workflowOnHold) WorkflowStoreState.OnHold else WorkflowStoreState.Submitted - } private def toWorkflowStoreEntry(workflowSourceFiles: WorkflowSourceFilesCollection): WorkflowStoreEntry = { import eu.timepit.refined._ - val nonEmptyJsonString: String Refined NonEmpty = refineMV[NonEmpty]("{}") + val nonEmptyJsonString: String Refined NonEmpty = refineMV[NonEmpty]("{}") val actualWorkflowState = workflowSubmissionState(workflowSourceFiles) diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfig.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfig.scala index d27d6d5b1f6..99d35c8a02d 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfig.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfig.scala @@ -25,13 +25,13 @@ import scala.concurrent.duration._ * @param writeBatchSize The maximum size of a write batch. * @param writeThreshold The threshold of heartbeat writes above which load is considered high. */ -case class WorkflowHeartbeatConfig( - cromwellId: String, - heartbeatInterval: FiniteDuration, - ttl: FiniteDuration, - failureShutdownDuration: FiniteDuration, - writeBatchSize: Int, - writeThreshold: Int) { +case class WorkflowHeartbeatConfig(cromwellId: String, + heartbeatInterval: FiniteDuration, + ttl: FiniteDuration, + failureShutdownDuration: FiniteDuration, + writeBatchSize: Int, + writeThreshold: Int +) { override def toString: String = this.asInstanceOf[WorkflowHeartbeatConfig].asJson.spaces2 } @@ -41,14 +41,12 @@ object WorkflowHeartbeatConfig { // compiler flag settings then promote to an error. // NOTE: This is a different encoding than circe's finiteDurationEncoder: https://github.com/circe/circe/pull/978 - private[engine] implicit lazy val encodeFiniteDuration: Encoder[FiniteDuration] = { + implicit private[engine] lazy val encodeFiniteDuration: Encoder[FiniteDuration] = Encoder.encodeString.contramap(_.toString) - } - private[engine] implicit lazy val encodeWorkflowHeartbeatConfig: Encoder[WorkflowHeartbeatConfig] = deriveEncoder + implicit private[engine] lazy val encodeWorkflowHeartbeatConfig: Encoder[WorkflowHeartbeatConfig] = deriveEncoder - def apply(config: Config): WorkflowHeartbeatConfig = { + def apply(config: Config): WorkflowHeartbeatConfig = validate(config).toTry("Errors parsing WorkflowHeartbeatConfig").get - } private def validate(config: Config): ErrorOr[WorkflowHeartbeatConfig] = { val randomSuffix = config diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStore.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStore.scala index 375a24db8b3..8ca4d5314ef 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStore.scala @@ -32,17 +32,21 @@ trait WorkflowStore { * Adds the requested WorkflowSourceFiles to the store and returns a WorkflowId for each one (in order) * for tracking purposes. */ - def add(sources: NonEmptyList[WorkflowSourceFilesCollection])(implicit ec: ExecutionContext): Future[NonEmptyList[WorkflowSubmissionResponse]] + def add(sources: NonEmptyList[WorkflowSourceFilesCollection])(implicit + ec: ExecutionContext + ): Future[NonEmptyList[WorkflowSubmissionResponse]] /** * Retrieves up to n workflows which have not already been pulled into the engine and sets their pickedUp * flag to true */ - def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String])(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] + def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String])( + implicit ec: ExecutionContext + ): Future[List[WorkflowToStart]] - def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit ec: ExecutionContext): Future[Int] + def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], heartbeatDateTime: OffsetDateTime)( + implicit ec: ExecutionContext + ): Future[Int] def switchOnHoldToSubmitted(id: WorkflowId)(implicit ec: ExecutionContext): Future[Unit] diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreAccess.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreAccess.scala index 2086cd34256..16ad4ee669c 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreAccess.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreAccess.scala @@ -24,17 +24,20 @@ import scala.concurrent.{ExecutionContext, Future} */ sealed trait WorkflowStoreAccess { def writeWorkflowHeartbeats(workflowIds: NonEmptyVector[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] + heartbeatDateTime: OffsetDateTime + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] - def fetchStartableWorkflows(maxWorkflows: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[List[WorkflowToStart]] + def fetchStartableWorkflows(maxWorkflows: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[List[WorkflowToStart]] - def abort(workflowId: WorkflowId) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[WorkflowStoreAbortResponse] + def abort( + workflowId: WorkflowId + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[WorkflowStoreAbortResponse] - def deleteFromStore(workflowId: WorkflowId) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] + def deleteFromStore(workflowId: WorkflowId)(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] } @@ -45,23 +48,26 @@ sealed trait WorkflowStoreAccess { case class UncoordinatedWorkflowStoreAccess(store: WorkflowStore) extends WorkflowStoreAccess { override def writeWorkflowHeartbeats(workflowIds: NonEmptyVector[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = { + heartbeatDateTime: OffsetDateTime + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = store.writeWorkflowHeartbeats(workflowIds.toVector.toSet, heartbeatDateTime) - } - override def fetchStartableWorkflows(maxWorkflows: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[List[WorkflowToStart]] = { + override def fetchStartableWorkflows(maxWorkflows: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[List[WorkflowToStart]] = store.fetchStartableWorkflows(maxWorkflows, cromwellId, heartbeatTtl, excludedGroups) - } - override def deleteFromStore(workflowId: WorkflowId)(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = { + override def deleteFromStore( + workflowId: WorkflowId + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = store.deleteFromStore(workflowId) - } - override def abort(workflowId: WorkflowId)(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = { + override def abort( + workflowId: WorkflowId + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = store.abort(workflowId) - } } /** @@ -72,31 +78,42 @@ case class CoordinatedWorkflowStoreAccess(coordinatedWorkflowStoreAccessActor: A implicit val timeout = Timeout(WorkflowStoreCoordinatedAccessActor.Timeout) override def writeWorkflowHeartbeats(workflowIds: NonEmptyVector[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = { - withRetryForTransactionRollback( - () => coordinatedWorkflowStoreAccessActor.ask(WorkflowStoreCoordinatedAccessActor.WriteHeartbeats(workflowIds, heartbeatDateTime)).mapTo[Int] + heartbeatDateTime: OffsetDateTime + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = + withRetryForTransactionRollback(() => + coordinatedWorkflowStoreAccessActor + .ask(WorkflowStoreCoordinatedAccessActor.WriteHeartbeats(workflowIds, heartbeatDateTime)) + .mapTo[Int] ) - } - override def fetchStartableWorkflows(maxWorkflows: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) - (implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[List[WorkflowToStart]] = { - val message = WorkflowStoreCoordinatedAccessActor.FetchStartableWorkflows(maxWorkflows, cromwellId, heartbeatTtl, excludedGroups) - withRetryForTransactionRollback( - () => coordinatedWorkflowStoreAccessActor.ask(message).mapTo[List[WorkflowToStart]] + override def fetchStartableWorkflows(maxWorkflows: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[List[WorkflowToStart]] = { + val message = WorkflowStoreCoordinatedAccessActor.FetchStartableWorkflows(maxWorkflows, + cromwellId, + heartbeatTtl, + excludedGroups ) + withRetryForTransactionRollback(() => coordinatedWorkflowStoreAccessActor.ask(message).mapTo[List[WorkflowToStart]]) } - override def deleteFromStore(workflowId: WorkflowId)(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = { - withRetryForTransactionRollback( - () => coordinatedWorkflowStoreAccessActor.ask(WorkflowStoreCoordinatedAccessActor.DeleteFromStore(workflowId)).mapTo[Int] + override def deleteFromStore( + workflowId: WorkflowId + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[Int] = + withRetryForTransactionRollback(() => + coordinatedWorkflowStoreAccessActor + .ask(WorkflowStoreCoordinatedAccessActor.DeleteFromStore(workflowId)) + .mapTo[Int] ) - } - override def abort(workflowId: WorkflowId)(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = { - withRetryForTransactionRollback( - () => coordinatedWorkflowStoreAccessActor.ask(WorkflowStoreCoordinatedAccessActor.Abort(workflowId)).mapTo[WorkflowStoreAbortResponse] + override def abort( + workflowId: WorkflowId + )(implicit actorSystem: ActorSystem, ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = + withRetryForTransactionRollback(() => + coordinatedWorkflowStoreAccessActor + .ask(WorkflowStoreCoordinatedAccessActor.Abort(workflowId)) + .mapTo[WorkflowStoreAbortResponse] ) - } } - diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala index 4bb31db4a6d..2d309f369c2 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala @@ -11,23 +11,23 @@ import cromwell.engine.CromwellTerminator import cromwell.util.GracefulShutdownHelper import cromwell.util.GracefulShutdownHelper.ShutdownCommand -final case class WorkflowStoreActor private( - workflowStore: WorkflowStore, +final case class WorkflowStoreActor private (workflowStore: WorkflowStore, workflowStoreAccess: WorkflowStoreAccess, serviceRegistryActor: ActorRef, terminator: CromwellTerminator, abortAllJobsOnTerminate: Boolean, - workflowHeartbeatConfig: WorkflowHeartbeatConfig) - extends Actor with ActorLogging with GracefulShutdownHelper { + workflowHeartbeatConfig: WorkflowHeartbeatConfig +) extends Actor + with ActorLogging + with GracefulShutdownHelper { import WorkflowStoreActor._ implicit val ec = context.dispatcher lazy val workflowStoreSubmitActor: ActorRef = context.actorOf( - WorkflowStoreSubmitActor.props( - workflowStoreDatabase = workflowStore, - serviceRegistryActor = serviceRegistryActor), - "WorkflowStoreSubmitActor") + WorkflowStoreSubmitActor.props(workflowStoreDatabase = workflowStore, serviceRegistryActor = serviceRegistryActor), + "WorkflowStoreSubmitActor" + ) lazy val workflowStoreEngineActor: ActorRef = context.actorOf( WorkflowStoreEngineActor.props( @@ -35,19 +35,24 @@ final case class WorkflowStoreActor private( workflowStoreAccess = workflowStoreAccess, serviceRegistryActor = serviceRegistryActor, abortAllJobsOnTerminate = abortAllJobsOnTerminate, - workflowHeartbeatConfig = workflowHeartbeatConfig), - "WorkflowStoreEngineActor") + workflowHeartbeatConfig = workflowHeartbeatConfig + ), + "WorkflowStoreEngineActor" + ) lazy val workflowStoreHeartbeatWriteActor: ActorRef = context.actorOf( WorkflowStoreHeartbeatWriteActor.props( workflowStoreAccess = workflowStoreAccess, workflowHeartbeatConfig = workflowHeartbeatConfig, terminator = terminator, - serviceRegistryActor = serviceRegistryActor), - "WorkflowStoreHeartbeatWriteActor") + serviceRegistryActor = serviceRegistryActor + ), + "WorkflowStoreHeartbeatWriteActor" + ) override def receive = { - case ShutdownCommand => waitForActorsAndShutdown(NonEmptyList.of(workflowStoreSubmitActor, workflowStoreEngineActor)) + case ShutdownCommand => + waitForActorsAndShutdown(NonEmptyList.of(workflowStoreSubmitActor, workflowStoreEngineActor)) case cmd: WorkflowStoreActorSubmitCommand => workflowStoreSubmitActor forward cmd case cmd: WorkflowStoreActorEngineCommand => workflowStoreEngineActor forward cmd case cmd: WorkflowStoreWriteHeartbeatCommand => workflowStoreHeartbeatWriteActor forward cmd @@ -75,26 +80,32 @@ object WorkflowStoreActor { sealed trait WorkflowStoreActorSubmitCommand final case class SubmitWorkflow(source: WorkflowSourceFilesCollection) extends WorkflowStoreActorSubmitCommand - final case class BatchSubmitWorkflows(sources: NonEmptyList[WorkflowSourceFilesCollection]) extends WorkflowStoreActorSubmitCommand + final case class BatchSubmitWorkflows(sources: NonEmptyList[WorkflowSourceFilesCollection]) + extends WorkflowStoreActorSubmitCommand final case object GetWorkflowStoreStats - case class WorkflowStoreWriteHeartbeatCommand(workflowId: WorkflowId, submissionTime: OffsetDateTime, heartbeatTime: OffsetDateTime = OffsetDateTime.now()) + case class WorkflowStoreWriteHeartbeatCommand(workflowId: WorkflowId, + submissionTime: OffsetDateTime, + heartbeatTime: OffsetDateTime = OffsetDateTime.now() + ) def props( - workflowStoreDatabase: WorkflowStore, - workflowStoreAccess: WorkflowStoreAccess, - serviceRegistryActor: ActorRef, - terminator: CromwellTerminator, - abortAllJobsOnTerminate: Boolean, - workflowHeartbeatConfig: WorkflowHeartbeatConfig - ) = { - Props(WorkflowStoreActor( - workflowStore = workflowStoreDatabase, - workflowStoreAccess = workflowStoreAccess, - serviceRegistryActor = serviceRegistryActor, - terminator = terminator, - abortAllJobsOnTerminate = abortAllJobsOnTerminate, - workflowHeartbeatConfig = workflowHeartbeatConfig)).withDispatcher(EngineDispatcher) - } + workflowStoreDatabase: WorkflowStore, + workflowStoreAccess: WorkflowStoreAccess, + serviceRegistryActor: ActorRef, + terminator: CromwellTerminator, + abortAllJobsOnTerminate: Boolean, + workflowHeartbeatConfig: WorkflowHeartbeatConfig + ) = + Props( + WorkflowStoreActor( + workflowStore = workflowStoreDatabase, + workflowStoreAccess = workflowStoreAccess, + serviceRegistryActor = serviceRegistryActor, + terminator = terminator, + abortAllJobsOnTerminate = abortAllJobsOnTerminate, + workflowHeartbeatConfig = workflowHeartbeatConfig + ) + ).withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActor.scala index 3d36a5a8062..2adbfe25da5 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActor.scala @@ -13,7 +13,6 @@ import scala.concurrent.{Await, ExecutionContext, Future} import scala.language.postfixOps import scala.util.{Failure, Success, Try} - /** * Serializes access to the workflow store for workflow store writers that acquire locks to multiple rows inside a single * transaction and otherwise are prone to deadlock. @@ -44,12 +43,18 @@ class WorkflowStoreCoordinatedAccessActor(workflowStore: WorkflowStore) extends object WorkflowStoreCoordinatedAccessActor { final case class WriteHeartbeats(workflowIds: NonEmptyVector[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - final case class FetchStartableWorkflows(count: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) + heartbeatDateTime: OffsetDateTime + ) + final case class FetchStartableWorkflows(count: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + ) final case class DeleteFromStore(workflowId: WorkflowId) final case class Abort(workflowId: WorkflowId) val Timeout = 1 minute - def props(workflowStore: WorkflowStore): Props = Props(new WorkflowStoreCoordinatedAccessActor(workflowStore)).withDispatcher(Dispatcher.IoDispatcher) + def props(workflowStore: WorkflowStore): Props = + Props(new WorkflowStoreCoordinatedAccessActor(workflowStore)).withDispatcher(Dispatcher.IoDispatcher) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala index 44ecc09f1c8..a2fec62eda9 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala @@ -5,7 +5,7 @@ import cats.data.NonEmptyList import cromwell.core.Dispatcher._ import cromwell.core.WorkflowProcessingEvents.DescriptionEventValue.PickedUp import cromwell.core._ -import cromwell.core.abort.{WorkflowAbortFailureResponse, WorkflowAbortRequestedResponse, WorkflowAbortedResponse} +import cromwell.core.abort.{WorkflowAbortedResponse, WorkflowAbortFailureResponse, WorkflowAbortRequestedResponse} import cromwell.engine.instrumentation.WorkflowInstrumentation import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException import cromwell.engine.workflow.{WorkflowMetadataHelper, WorkflowProcessingEventPublishing} @@ -20,12 +20,17 @@ import org.apache.commons.lang3.exception.ExceptionUtils import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} -final case class WorkflowStoreEngineActor private(store: WorkflowStore, - workflowStoreAccess: WorkflowStoreAccess, - serviceRegistryActor: ActorRef, - abortAllJobsOnTerminate: Boolean, - workflowHeartbeatConfig: WorkflowHeartbeatConfig) - extends LoggingFSM[WorkflowStoreActorState, WorkflowStoreActorData] with ActorLogging with WorkflowInstrumentation with CromwellInstrumentationScheduler with WorkflowMetadataHelper with Timers { +final case class WorkflowStoreEngineActor private (store: WorkflowStore, + workflowStoreAccess: WorkflowStoreAccess, + serviceRegistryActor: ActorRef, + abortAllJobsOnTerminate: Boolean, + workflowHeartbeatConfig: WorkflowHeartbeatConfig +) extends LoggingFSM[WorkflowStoreActorState, WorkflowStoreActorData] + with ActorLogging + with WorkflowInstrumentation + with CromwellInstrumentationScheduler + with WorkflowMetadataHelper + with Timers { implicit val actorSystem: ActorSystem = context.system implicit val ec: ExecutionContext = context.dispatcher @@ -63,12 +68,11 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, stay() using stateData.withPendingCommand(x, sender()) } - when(Idle) { - case Event(cmd: WorkflowStoreActorEngineCommand, _) => - if (stateData.currentOperation.nonEmpty || stateData.pendingOperations.nonEmpty) { - log.error("Non-empty WorkflowStoreActorData when in Idle state: {}", stateData) - } - startNewWork(cmd, sender(), stateData.withCurrentCommand(cmd, sender())) + when(Idle) { case Event(cmd: WorkflowStoreActorEngineCommand, _) => + if (stateData.currentOperation.nonEmpty || stateData.pendingOperations.nonEmpty) { + log.error("Non-empty WorkflowStoreActorData when in Idle state: {}", stateData) + } + startNewWork(cmd, sender(), stateData.withCurrentCommand(cmd, sender())) } when(Working) { @@ -93,12 +97,14 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, stay() } - onTransition { - case fromState -> toState => - log.debug("WorkflowStore moving from {} (using {}) to {} (using {})", fromState, stateData, toState, nextStateData) + onTransition { case fromState -> toState => + log.debug("WorkflowStore moving from {} (using {}) to {} (using {})", fromState, stateData, toState, nextStateData) } - private def startNewWork(command: WorkflowStoreActorEngineCommand, sndr: ActorRef, nextData: WorkflowStoreActorData) = { + private def startNewWork(command: WorkflowStoreActorEngineCommand, + sndr: ActorRef, + nextData: WorkflowStoreActorData + ) = { val work: Future[Any] = command match { case FetchRunnableWorkflows(count, excludedGroups) => newWorkflowMessage(count, excludedGroups) map { response => @@ -113,7 +119,11 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, ) workflowsIds foreach { w => - WorkflowProcessingEventPublishing.publish(w, workflowHeartbeatConfig.cromwellId, PickedUp, serviceRegistryActor) + WorkflowProcessingEventPublishing.publish(w, + workflowHeartbeatConfig.cromwellId, + PickedUp, + serviceRegistryActor + ) } case NoNewWorkflowsToStart => log.debug("No workflows fetched by {}", workflowHeartbeatConfig.cromwellId) @@ -122,10 +132,10 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, sndr ! response } case FindWorkflowsWithAbortRequested(cromwellId) => - store.findWorkflowsWithAbortRequested(cromwellId) map { - ids => sndr ! FindWorkflowsWithAbortRequestedSuccess(ids) - } recover { - case t => sndr ! FindWorkflowsWithAbortRequestedFailure(t) + store.findWorkflowsWithAbortRequested(cromwellId) map { ids => + sndr ! FindWorkflowsWithAbortRequestedSuccess(ids) + } recover { case t => + sndr ! FindWorkflowsWithAbortRequestedFailure(t) } case AbortWorkflowCommand(id) => workflowStoreAccess.abort(id) map { workflowStoreAbortResponse => @@ -139,14 +149,16 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, pushCurrentStateToMetadataService(id, WorkflowAborting) sndr ! WorkflowAbortRequestedResponse(id) case WorkflowStoreAbortResponse.NotFound => - sndr ! WorkflowAbortFailureResponse(id, new WorkflowNotFoundException(s"Couldn't abort $id because no workflow with that ID is in progress")) - } recover { - case t => - val message = s"Unable to update workflow store to abort $id" - log.error(t, message) - // A generic exception type like RuntimeException will produce a 500 at the API layer, which seems appropriate - // given we don't know much about what went wrong here. `t.getMessage` so the cause propagates to the client. - sndr ! WorkflowAbortFailureResponse(id, new RuntimeException(s"$message: ${t.getMessage}", t)) + sndr ! WorkflowAbortFailureResponse( + id, + new WorkflowNotFoundException(s"Couldn't abort $id because no workflow with that ID is in progress") + ) + } recover { case t => + val message = s"Unable to update workflow store to abort $id" + log.error(t, message) + // A generic exception type like RuntimeException will produce a 500 at the API layer, which seems appropriate + // given we don't know much about what went wrong here. `t.getMessage` so the cause propagates to the client. + sndr ! WorkflowAbortFailureResponse(id, new RuntimeException(s"$message: ${t.getMessage}", t)) } case AbortAllRunningWorkflowsCommandAndStop => store.abortAllRunning() map { _ => @@ -158,11 +170,10 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, sndr ! WorkflowOnHoldToSubmittedSuccess(id) pushCurrentStateToMetadataService(id, WorkflowSubmitted) log.info(s"Status changed to 'Submitted' for $id") - } recover { - case t => - val message = s"Couldn't change the status to 'Submitted' from 'On Hold' for workflow $id" - log.error(message) - sndr ! WorkflowOnHoldToSubmittedFailure(id, t) + } recover { case t => + val message = s"Couldn't change the status to 'Submitted' from 'On Hold' for workflow $id" + log.error(message) + sndr ! WorkflowOnHoldToSubmittedFailure(id, t) } case oops => log.error("Unexpected type of start work command: {}", oops.getClass.getSimpleName) @@ -172,64 +183,81 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, goto(Working) using nextData } - private def addWorkCompletionHooks[A](command: WorkflowStoreActorEngineCommand, work: Future[A]) = { + private def addWorkCompletionHooks[A](command: WorkflowStoreActorEngineCommand, work: Future[A]) = work.onComplete { case Success(_) => self ! WorkDone case Failure(t) => - log.error("Error occurred during {}: {} because {}", command.getClass.getSimpleName, t.toString, ExceptionUtils.getStackTrace(t)) + log.error("Error occurred during {}: {} because {}", + command.getClass.getSimpleName, + t.toString, + ExceptionUtils.getStackTrace(t) + ) self ! WorkDone } - } /** * Fetches at most n workflows, and builds the correct response message based on if there were any workflows or not */ - private def newWorkflowMessage(maxWorkflows: Int, excludedGroups: Set[String]): Future[WorkflowStoreEngineActorResponse] = { - def fetchStartableWorkflowsIfNeeded = { + private def newWorkflowMessage(maxWorkflows: Int, + excludedGroups: Set[String] + ): Future[WorkflowStoreEngineActorResponse] = { + def fetchStartableWorkflowsIfNeeded = if (maxWorkflows > 0) { - workflowStoreAccess.fetchStartableWorkflows(maxWorkflows, workflowHeartbeatConfig.cromwellId, workflowHeartbeatConfig.ttl, excludedGroups) + workflowStoreAccess.fetchStartableWorkflows(maxWorkflows, + workflowHeartbeatConfig.cromwellId, + workflowHeartbeatConfig.ttl, + excludedGroups + ) } else { Future.successful(List.empty[WorkflowToStart]) } - } fetchStartableWorkflowsIfNeeded map { case x :: xs => NewWorkflowsToStart(NonEmptyList.of(x, xs: _*)) case _ => NoNewWorkflowsToStart - } recover { - case e => - // Log the error but return a successful Future so as not to hang future workflow store polls. - log.error(e, "Error trying to fetch new workflows") - NoNewWorkflowsToStart + } recover { case e => + // Log the error but return a successful Future so as not to hang future workflow store polls. + log.error(e, "Error trying to fetch new workflows") + NoNewWorkflowsToStart } } } object WorkflowStoreEngineActor { def props( - workflowStore: WorkflowStore, - workflowStoreAccess: WorkflowStoreAccess, - serviceRegistryActor: ActorRef, - abortAllJobsOnTerminate: Boolean, - workflowHeartbeatConfig: WorkflowHeartbeatConfig - ) = { - Props(WorkflowStoreEngineActor(workflowStore, workflowStoreAccess, serviceRegistryActor, abortAllJobsOnTerminate, workflowHeartbeatConfig)).withDispatcher(EngineDispatcher) - } + workflowStore: WorkflowStore, + workflowStoreAccess: WorkflowStoreAccess, + serviceRegistryActor: ActorRef, + abortAllJobsOnTerminate: Boolean, + workflowHeartbeatConfig: WorkflowHeartbeatConfig + ) = + Props( + WorkflowStoreEngineActor(workflowStore, + workflowStoreAccess, + serviceRegistryActor, + abortAllJobsOnTerminate, + workflowHeartbeatConfig + ) + ).withDispatcher(EngineDispatcher) sealed trait WorkflowStoreEngineActorResponse case object NoNewWorkflowsToStart extends WorkflowStoreEngineActorResponse - final case class NewWorkflowsToStart(workflows: NonEmptyList[WorkflowToStart]) extends WorkflowStoreEngineActorResponse + final case class NewWorkflowsToStart(workflows: NonEmptyList[WorkflowToStart]) + extends WorkflowStoreEngineActorResponse final case class WorkflowStoreActorCommandWithSender(command: WorkflowStoreActorEngineCommand, sender: ActorRef) - final case class WorkflowStoreActorData(currentOperation: Option[WorkflowStoreActorCommandWithSender], pendingOperations: List[WorkflowStoreActorCommandWithSender]) { - def withCurrentCommand(command: WorkflowStoreActorEngineCommand, sender: ActorRef) = this.copy(currentOperation = Option(WorkflowStoreActorCommandWithSender(command, sender))) - def withPendingCommand(newCommand: WorkflowStoreActorEngineCommand, sender: ActorRef) = this.copy(pendingOperations = this.pendingOperations :+ WorkflowStoreActorCommandWithSender(newCommand, sender)) - def pop = { + final case class WorkflowStoreActorData(currentOperation: Option[WorkflowStoreActorCommandWithSender], + pendingOperations: List[WorkflowStoreActorCommandWithSender] + ) { + def withCurrentCommand(command: WorkflowStoreActorEngineCommand, sender: ActorRef) = + this.copy(currentOperation = Option(WorkflowStoreActorCommandWithSender(command, sender))) + def withPendingCommand(newCommand: WorkflowStoreActorEngineCommand, sender: ActorRef) = + this.copy(pendingOperations = this.pendingOperations :+ WorkflowStoreActorCommandWithSender(newCommand, sender)) + def pop = if (pendingOperations.isEmpty) { WorkflowStoreActorData(None, List.empty) } else { WorkflowStoreActorData(Option(pendingOperations.head), pendingOperations.tail) } - } } sealed trait WorkflowStoreActorState @@ -239,5 +267,6 @@ object WorkflowStoreEngineActor { sealed trait WorkflowOnHoldToSubmittedResponse case class WorkflowOnHoldToSubmittedSuccess(workflowId: WorkflowId) extends WorkflowOnHoldToSubmittedResponse - case class WorkflowOnHoldToSubmittedFailure(workflowId: WorkflowId, failure: Throwable) extends WorkflowOnHoldToSubmittedResponse + case class WorkflowOnHoldToSubmittedFailure(workflowId: WorkflowId, failure: Throwable) + extends WorkflowOnHoldToSubmittedResponse } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActor.scala index ee88627cdfe..2cefd06ad7f 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActor.scala @@ -1,6 +1,6 @@ package cromwell.engine.workflow.workflowstore -import java.time.{OffsetDateTime, Duration => JDuration} +import java.time.{Duration => JDuration, OffsetDateTime} import java.util.concurrent.TimeUnit import akka.actor.{ActorRef, ActorSystem, CoordinatedShutdown, Props} @@ -20,11 +20,10 @@ import scala.util.{Failure, Success, Try} case class WorkflowStoreHeartbeatWriteActor(workflowStoreAccess: WorkflowStoreAccess, workflowHeartbeatConfig: WorkflowHeartbeatConfig, terminator: CromwellTerminator, - override val serviceRegistryActor: ActorRef) - - extends EnhancedBatchActor[WorkflowStoreWriteHeartbeatCommand]( - flushRate = workflowHeartbeatConfig.heartbeatInterval, - batchSize = workflowHeartbeatConfig.writeBatchSize) { + override val serviceRegistryActor: ActorRef +) extends EnhancedBatchActor[WorkflowStoreWriteHeartbeatCommand](flushRate = workflowHeartbeatConfig.heartbeatInterval, + batchSize = workflowHeartbeatConfig.writeBatchSize + ) { implicit val actorSystem: ActorSystem = context.system @@ -32,7 +31,7 @@ case class WorkflowStoreHeartbeatWriteActor(workflowStoreAccess: WorkflowStoreAc private val failureShutdownDuration = workflowHeartbeatConfig.failureShutdownDuration - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection private var lastSuccessOption: Option[OffsetDateTime] = None /** @@ -40,50 +39,52 @@ case class WorkflowStoreHeartbeatWriteActor(workflowStoreAccess: WorkflowStoreAc * * @return the number of elements processed */ - override protected def process(data: NonEmptyVector[WorkflowStoreWriteHeartbeatCommand]): Future[Int] = instrumentedProcess { - val now = OffsetDateTime.now() - - val warnDuration = (failureShutdownDuration + workflowHeartbeatConfig.heartbeatInterval) / 2 - val warnThreshold = warnDuration.toNanos - val errorThreshold = failureShutdownDuration.toNanos - - // Traverse these heartbeats looking for staleness of warning or error severity. - val (warningIds, errorIds) = data.foldLeft((Seq.empty[WorkflowId], Seq.empty[WorkflowId])) { case ((w, e), h) => - val staleness = JDuration.between(h.heartbeatTime, now).toNanos - - // w = warning ids, e = error ids, h = heartbeat datum - if (staleness > errorThreshold) (w, e :+ h.workflowId) - else if (staleness > warnThreshold) (w :+ h.workflowId, e) - else (w, e) - } + override protected def process(data: NonEmptyVector[WorkflowStoreWriteHeartbeatCommand]): Future[Int] = + instrumentedProcess { + val now = OffsetDateTime.now() + + val warnDuration = (failureShutdownDuration + workflowHeartbeatConfig.heartbeatInterval) / 2 + val warnThreshold = warnDuration.toNanos + val errorThreshold = failureShutdownDuration.toNanos + + // Traverse these heartbeats looking for staleness of warning or error severity. + val (warningIds, errorIds) = data.foldLeft((Seq.empty[WorkflowId], Seq.empty[WorkflowId])) { case ((w, e), h) => + val staleness = JDuration.between(h.heartbeatTime, now).toNanos + + // w = warning ids, e = error ids, h = heartbeat datum + if (staleness > errorThreshold) (w, e :+ h.workflowId) + else if (staleness > warnThreshold) (w :+ h.workflowId, e) + else (w, e) + } - if (warningIds.nonEmpty) { - log.warning( - "Found {} stale workflow heartbeats (more than {} old): {}", - warningIds.size.toString, - warnDuration.toString(), - warningIds.mkString(", ") - ) - } + if (warningIds.nonEmpty) { + log.warning( + "Found {} stale workflow heartbeats (more than {} old): {}", + warningIds.size.toString, + warnDuration.toString(), + warningIds.mkString(", ") + ) + } - if (errorIds.isEmpty) { - val processFuture = workflowStoreAccess.writeWorkflowHeartbeats(data.map { h => (h.workflowId, h.submissionTime) }, now) - processFuture transform { - // Track the `Try`, and then return the original `Try`. Similar to `andThen` but doesn't swallow exceptions. - _ <| trackRepeatedFailures(now, data.length) + if (errorIds.isEmpty) { + val processFuture = + workflowStoreAccess.writeWorkflowHeartbeats(data.map(h => (h.workflowId, h.submissionTime)), now) + processFuture transform { + // Track the `Try`, and then return the original `Try`. Similar to `andThen` but doesn't swallow exceptions. + _ <| trackRepeatedFailures(now, data.length) + } + } else { + log.error( + "Shutting down Cromwell instance {} as {} stale workflow heartbeats (more than {} old) were found: {}", + workflowHeartbeatConfig.cromwellId, + errorIds.size.toString, + failureShutdownDuration.toString(), + errorIds.mkString(", ") + ) + terminator.beginCromwellShutdown(WorkflowStoreHeartbeatWriteActor.Shutdown) + Future.successful(0) } - } else { - log.error( - "Shutting down Cromwell instance {} as {} stale workflow heartbeats (more than {} old) were found: {}", - workflowHeartbeatConfig.cromwellId, - errorIds.size.toString, - failureShutdownDuration.toString(), - errorIds.mkString(", ") - ) - terminator.beginCromwellShutdown(WorkflowStoreHeartbeatWriteActor.Shutdown) - Future.successful(0) } - } override def receive: Receive = enhancedReceive.orElse(super.receive) override protected def weightFunction(command: WorkflowStoreWriteHeartbeatCommand) = 1 @@ -100,7 +101,7 @@ case class WorkflowStoreHeartbeatWriteActor(workflowStoreAccess: WorkflowStoreAc We are expecting the underlying FSM to ensure that the call to this method does NOT occur in parallel, waiting for the call to `process` to complete. */ - private def trackRepeatedFailures(heartbeatDateTime: OffsetDateTime, workflowCount: Int)(processTry: Try[Int]): Unit = { + private def trackRepeatedFailures(heartbeatDateTime: OffsetDateTime, workflowCount: Int)(processTry: Try[Int]): Unit = processTry match { case Success(_) => lastSuccessOption = Option(heartbeatDateTime) @@ -118,15 +119,17 @@ case class WorkflowStoreHeartbeatWriteActor(workflowStoreAccess: WorkflowStoreAc if (failureJDuration.toNanos >= failureShutdownDuration.toNanos) { val failureUnits = failureShutdownDuration.unit val failureLength = FiniteDuration(failureJDuration.toNanos, TimeUnit.NANOSECONDS).toUnit(failureUnits) - log.error(String.format( - "Shutting down %s as at least %d heartbeat write errors have occurred between %s and %s (%s %s)", - workflowHeartbeatConfig.cromwellId, - Integer.valueOf(workflowCount), - lastSuccess, - now, - failureLength.toString, - failureUnits.toString.toLowerCase - )) + log.error( + String.format( + "Shutting down %s as at least %d heartbeat write errors have occurred between %s and %s (%s %s)", + workflowHeartbeatConfig.cromwellId, + Integer.valueOf(workflowCount), + lastSuccess, + now, + failureLength.toString, + failureUnits.toString.toLowerCase + ) + ) terminator.beginCromwellShutdown(WorkflowStoreHeartbeatWriteActor.Shutdown) } () @@ -135,7 +138,6 @@ case class WorkflowStoreHeartbeatWriteActor(workflowStoreAccess: WorkflowStoreAc terminator.beginCromwellShutdown(WorkflowStoreHeartbeatWriteActor.Shutdown) () } - } } @@ -143,16 +145,17 @@ object WorkflowStoreHeartbeatWriteActor { object Shutdown extends CoordinatedShutdown.Reason def props( - workflowStoreAccess: WorkflowStoreAccess, - workflowHeartbeatConfig: WorkflowHeartbeatConfig, - terminator: CromwellTerminator, - serviceRegistryActor: ActorRef - ): Props = + workflowStoreAccess: WorkflowStoreAccess, + workflowHeartbeatConfig: WorkflowHeartbeatConfig, + terminator: CromwellTerminator, + serviceRegistryActor: ActorRef + ): Props = Props( WorkflowStoreHeartbeatWriteActor( workflowStoreAccess = workflowStoreAccess, workflowHeartbeatConfig = workflowHeartbeatConfig, terminator = terminator, serviceRegistryActor = serviceRegistryActor - )).withDispatcher(EngineDispatcher) + ) + ).withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreSubmitActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreSubmitActor.scala index 5ba36f8befe..6920e1a40be 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreSubmitActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreSubmitActor.scala @@ -14,7 +14,11 @@ import cromwell.engine.workflow.WorkflowProcessingEventPublishing._ import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreState.WorkflowStoreState import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.{WorkflowStoreState, WorkflowSubmissionResponse} import cromwell.engine.workflow.workflowstore.WorkflowStoreActor._ -import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{WorkflowSubmitFailed, WorkflowSubmittedToStore, WorkflowsBatchSubmittedToStore} +import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{ + WorkflowsBatchSubmittedToStore, + WorkflowSubmitFailed, + WorkflowSubmittedToStore +} import cromwell.services.metadata.MetadataService.PutMetadataAction import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue} import spray.json._ @@ -22,8 +26,12 @@ import spray.json._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} -final case class WorkflowStoreSubmitActor(store: WorkflowStore, serviceRegistryActor: ActorRef) extends Actor - with ActorLogging with WorkflowMetadataHelper with MonitoringCompanionHelper with WorkflowInstrumentation { +final case class WorkflowStoreSubmitActor(store: WorkflowStore, serviceRegistryActor: ActorRef) + extends Actor + with ActorLogging + with WorkflowMetadataHelper + with MonitoringCompanionHelper + with WorkflowInstrumentation { implicit val ec: ExecutionContext = context.dispatcher val workflowStoreReceive: Receive = { @@ -80,32 +88,32 @@ final case class WorkflowStoreSubmitActor(store: WorkflowStore, serviceRegistryA removeWork() } } - + override def receive = workflowStoreReceive.orElse(monitoringReceive) - private def convertDatabaseStateToApiState(workflowStoreState: WorkflowStoreState): WorkflowState ={ + private def convertDatabaseStateToApiState(workflowStoreState: WorkflowStoreState): WorkflowState = workflowStoreState match { case WorkflowStoreState.Submitted => WorkflowSubmitted case WorkflowStoreState.OnHold => WorkflowOnHold case WorkflowStoreState.Aborting => WorkflowAborting case WorkflowStoreState.Running => WorkflowRunning } - } - private def storeWorkflowSources(sources: NonEmptyList[WorkflowSourceFilesCollection]): Future[NonEmptyList[WorkflowSubmissionResponse]] = { + private def storeWorkflowSources( + sources: NonEmptyList[WorkflowSourceFilesCollection] + ): Future[NonEmptyList[WorkflowSubmissionResponse]] = for { workflowSubmissionResponses <- store.add(sources) } yield workflowSubmissionResponses - } - private def convertJsonToLabelsMap(json: String): Map[String, String] = { + private def convertJsonToLabelsMap(json: String): Map[String, String] = json.parseJson match { - case JsObject(inputs) => inputs.collect({ - case (key, JsString(value)) => key -> value - }) + case JsObject(inputs) => + inputs.collect { case (key, JsString(value)) => + key -> value + } case _ => Map.empty } - } /** * Runs processing on workflow source files before they are stored. @@ -114,61 +122,97 @@ final case class WorkflowStoreSubmitActor(store: WorkflowStore, serviceRegistryA * @param source Original workflow source * @return Attempted updated workflow source */ - private def processSource(processOptions: WorkflowOptions => WorkflowOptions) - (source: WorkflowSourceFilesCollection): WorkflowSourceFilesCollection = { - + private def processSource(processOptions: WorkflowOptions => WorkflowOptions)( + source: WorkflowSourceFilesCollection + ): WorkflowSourceFilesCollection = source.setOptions(processOptions(source.workflowOptions)) - } /** * Takes the workflow id and sends it over to the metadata service w/ default empty values for inputs/outputs */ - private def registerSubmission( - id: WorkflowId, - originalSourceFiles: WorkflowSourceFilesCollection): Unit = { + private def registerSubmission(id: WorkflowId, originalSourceFiles: WorkflowSourceFilesCollection): Unit = { // Increment the workflow submitted count incrementWorkflowState(WorkflowSubmitted) - val actualWorkflowState = if(originalSourceFiles.workflowOnHold) - WorkflowOnHold - else - WorkflowSubmitted + val actualWorkflowState = + if (originalSourceFiles.workflowOnHold) + WorkflowOnHold + else + WorkflowSubmitted val sourceFiles = processSource(_.clearEncryptedValues)(originalSourceFiles) - val submissionEvents: List[MetadataEvent] = List( - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionTime), MetadataValue(OffsetDateTime.now)), - MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Inputs)), - MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Outputs)), - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.Status), MetadataValue(actualWorkflowState)), - - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Workflow), MetadataValue(sourceFiles.workflowSource.orNull)), - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_WorkflowUrl), MetadataValue(sourceFiles.workflowUrl.orNull)), - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Root), MetadataValue(sourceFiles.workflowRoot.orNull)), - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Inputs), MetadataValue(sourceFiles.inputsJson)), - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Options), MetadataValue(sourceFiles.workflowOptions.asPrettyJson)), - MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Labels), MetadataValue(sourceFiles.labelsJson)) - ) - - // Don't publish metadata for either workflow type or workflow type version if not defined. - val workflowTypeAndVersionEvents: List[Option[MetadataEvent]] = List( - sourceFiles.workflowType map { wt => MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_WorkflowType), MetadataValue(wt)) }, - sourceFiles.workflowTypeVersion map { wtv => MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_WorkflowTypeVersion), MetadataValue(wtv)) } + val submissionEvents: List[MetadataEvent] = List( + MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.SubmissionTime), MetadataValue(OffsetDateTime.now)), + MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Inputs)), + MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Outputs)), + MetadataEvent(MetadataKey(id, None, WorkflowMetadataKeys.Status), MetadataValue(actualWorkflowState)), + MetadataEvent( + MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Workflow), + MetadataValue(sourceFiles.workflowSource.orNull) + ), + MetadataEvent( + MetadataKey(id, + None, + WorkflowMetadataKeys.SubmissionSection, + WorkflowMetadataKeys.SubmissionSection_WorkflowUrl + ), + MetadataValue(sourceFiles.workflowUrl.orNull) + ), + MetadataEvent( + MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Root), + MetadataValue(sourceFiles.workflowRoot.orNull) + ), + MetadataEvent( + MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Inputs), + MetadataValue(sourceFiles.inputsJson) + ), + MetadataEvent( + MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Options), + MetadataValue(sourceFiles.workflowOptions.asPrettyJson) + ), + MetadataEvent( + MetadataKey(id, None, WorkflowMetadataKeys.SubmissionSection, WorkflowMetadataKeys.SubmissionSection_Labels), + MetadataValue(sourceFiles.labelsJson) ) + ) + + // Don't publish metadata for either workflow type or workflow type version if not defined. + val workflowTypeAndVersionEvents: List[Option[MetadataEvent]] = List( + sourceFiles.workflowType map { wt => + MetadataEvent(MetadataKey(id, + None, + WorkflowMetadataKeys.SubmissionSection, + WorkflowMetadataKeys.SubmissionSection_WorkflowType + ), + MetadataValue(wt) + ) + }, + sourceFiles.workflowTypeVersion map { wtv => + MetadataEvent(MetadataKey(id, + None, + WorkflowMetadataKeys.SubmissionSection, + WorkflowMetadataKeys.SubmissionSection_WorkflowTypeVersion + ), + MetadataValue(wtv) + ) + } + ) - serviceRegistryActor ! PutMetadataAction(submissionEvents ++ workflowTypeAndVersionEvents.flatten) - () + serviceRegistryActor ! PutMetadataAction(submissionEvents ++ workflowTypeAndVersionEvents.flatten) + () } } object WorkflowStoreSubmitActor { - def props(workflowStoreDatabase: WorkflowStore, serviceRegistryActor: ActorRef) = { + def props(workflowStoreDatabase: WorkflowStore, serviceRegistryActor: ActorRef) = Props(WorkflowStoreSubmitActor(workflowStoreDatabase, serviceRegistryActor)).withDispatcher(ApiDispatcher) - } sealed trait WorkflowStoreSubmitActorResponse - final case class WorkflowSubmittedToStore(workflowId: WorkflowId, state: WorkflowState) extends WorkflowStoreSubmitActorResponse - final case class WorkflowsBatchSubmittedToStore(workflowIds: NonEmptyList[WorkflowId], state: WorkflowState) extends WorkflowStoreSubmitActorResponse + final case class WorkflowSubmittedToStore(workflowId: WorkflowId, state: WorkflowState) + extends WorkflowStoreSubmitActorResponse + final case class WorkflowsBatchSubmittedToStore(workflowIds: NonEmptyList[WorkflowId], state: WorkflowState) + extends WorkflowStoreSubmitActorResponse final case class WorkflowSubmitFailed(throwable: Throwable) extends WorkflowStoreSubmitActorResponse } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/workflowstore_.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/workflowstore_.scala index 382b963670d..c8e9a19ac78 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/workflowstore_.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/workflowstore_.scala @@ -27,4 +27,5 @@ final case class WorkflowToStart(id: WorkflowId, submissionTime: OffsetDateTime, sources: WorkflowSourceFilesCollection, state: StartableState, - hogGroup: HogGroup) extends HasWorkflowIdAndSources + hogGroup: HogGroup +) extends HasWorkflowIdAndSources diff --git a/engine/src/main/scala/cromwell/jobstore/JobResultJsonFormatter.scala b/engine/src/main/scala/cromwell/jobstore/JobResultJsonFormatter.scala index 204998d115d..92c8dd407cb 100644 --- a/engine/src/main/scala/cromwell/jobstore/JobResultJsonFormatter.scala +++ b/engine/src/main/scala/cromwell/jobstore/JobResultJsonFormatter.scala @@ -17,8 +17,12 @@ object JobResultJsonFormatter extends DefaultJsonProtocol { } implicit object CallOutputsFormat extends RootJsonFormat[CallOutputs] { - def write(value: CallOutputs) = value.outputs.map({case (port, v) => port.identifier.fullyQualifiedName.value -> v }).toJson - def read(value: JsValue): CallOutputs = throw new UnsupportedOperationException("Cannot deserialize outputs to output ports") + def write(value: CallOutputs) = value.outputs.map { case (port, v) => + port.identifier.fullyQualifiedName.value -> v + }.toJson + def read(value: JsValue): CallOutputs = throw new UnsupportedOperationException( + "Cannot deserialize outputs to output ports" + ) } implicit val JobResultSuccessFormat = jsonFormat2(JobResultSuccess) diff --git a/engine/src/main/scala/cromwell/jobstore/JobStore.scala b/engine/src/main/scala/cromwell/jobstore/JobStore.scala index abc5e8765b4..a98928686fe 100644 --- a/engine/src/main/scala/cromwell/jobstore/JobStore.scala +++ b/engine/src/main/scala/cromwell/jobstore/JobStore.scala @@ -7,8 +7,12 @@ import wom.graph.GraphNodePort.OutputPort import scala.concurrent.{ExecutionContext, Future} trait JobStore { - def writeToDatabase(workflowCompletions: Seq[WorkflowCompletion], jobCompletions: Seq[JobCompletion], batchSize: Int)(implicit ec: ExecutionContext): Future[Unit] - def readJobResult(jobStoreKey: JobStoreKey, taskOutputs: Seq[OutputPort])(implicit ec: ExecutionContext): Future[Option[JobResult]] + def writeToDatabase(workflowCompletions: Seq[WorkflowCompletion], jobCompletions: Seq[JobCompletion], batchSize: Int)( + implicit ec: ExecutionContext + ): Future[Unit] + def readJobResult(jobStoreKey: JobStoreKey, taskOutputs: Seq[OutputPort])(implicit + ec: ExecutionContext + ): Future[Option[JobResult]] } object JobStore { diff --git a/engine/src/main/scala/cromwell/jobstore/JobStoreActor.scala b/engine/src/main/scala/cromwell/jobstore/JobStoreActor.scala index 6e14f6b8682..fd86080bc0d 100644 --- a/engine/src/main/scala/cromwell/jobstore/JobStoreActor.scala +++ b/engine/src/main/scala/cromwell/jobstore/JobStoreActor.scala @@ -12,14 +12,25 @@ import wom.graph.GraphNodePort.OutputPort import scala.concurrent.duration._ import scala.language.postfixOps + /** * Joins the service registry API to the JobStoreReaderActor and JobStoreWriterActor. * * This level of indirection is a tiny bit awkward but allows the database to be injected. */ -class JobStoreActor(jobStore: JobStore, dbBatchSize: Int, dbFlushRate: FiniteDuration, registryActor: ActorRef, workflowStoreAccess: WorkflowStoreAccess) extends Actor with ActorLogging with GracefulShutdownHelper { +class JobStoreActor(jobStore: JobStore, + dbBatchSize: Int, + dbFlushRate: FiniteDuration, + registryActor: ActorRef, + workflowStoreAccess: WorkflowStoreAccess +) extends Actor + with ActorLogging + with GracefulShutdownHelper { import JobStoreActor._ - val jobStoreWriterActor = context.actorOf(JobStoreWriterActor.props(jobStore, dbBatchSize, dbFlushRate, registryActor, workflowStoreAccess), "JobStoreWriterActor") + val jobStoreWriterActor = context.actorOf( + JobStoreWriterActor.props(jobStore, dbBatchSize, dbFlushRate, registryActor, workflowStoreAccess), + "JobStoreWriterActor" + ) val jobStoreReaderActor = context.actorOf(JobStoreReaderActor.props(jobStore, registryActor), "JobStoreReaderActor") override def receive: Receive = { @@ -47,16 +58,19 @@ object JobStoreActor { case class JobStoreWriteFailure(reason: Throwable) extends JobStoreWriterResponse sealed trait JobStoreReaderCommand extends JobStoreCommand + /** * Message to query the JobStoreReaderActor, asks whether the specified job has already been completed. */ case class QueryJobCompletion(jobKey: JobStoreKey, taskOutputs: Seq[OutputPort]) extends JobStoreReaderCommand sealed trait JobStoreReaderResponse + /** * Message which indicates that a job has already completed, and contains the results of the job */ case class JobComplete(jobResult: JobResult) extends JobStoreReaderResponse + /** * Indicates that the job has not been completed yet. Makes no statement about whether the job is * running versus unstarted or (maybe?) doesn't even exist! @@ -65,7 +79,9 @@ object JobStoreActor { case class JobStoreReadFailure(reason: Throwable) extends JobStoreReaderResponse - def props(database: JobStore, registryActor: ActorRef, workflowStoreAccess: WorkflowStoreAccess) = Props(new JobStoreActor(database, dbBatchSize, dbFlushRate, registryActor, workflowStoreAccess)).withDispatcher(EngineDispatcher) + def props(database: JobStore, registryActor: ActorRef, workflowStoreAccess: WorkflowStoreAccess) = Props( + new JobStoreActor(database, dbBatchSize, dbFlushRate, registryActor, workflowStoreAccess) + ).withDispatcher(EngineDispatcher) val dbFlushRate = 1 second diff --git a/engine/src/main/scala/cromwell/jobstore/JobStoreReaderActor.scala b/engine/src/main/scala/cromwell/jobstore/JobStoreReaderActor.scala index ba8d1665aa3..341c757fdb8 100644 --- a/engine/src/main/scala/cromwell/jobstore/JobStoreReaderActor.scala +++ b/engine/src/main/scala/cromwell/jobstore/JobStoreReaderActor.scala @@ -12,11 +12,13 @@ import cromwell.services.EnhancedThrottlerActor import scala.util.{Failure, Success} object JobStoreReaderActor { - def props(database: JobStore, registryActor: ActorRef) = Props(new JobStoreReaderActor(database, registryActor, LoadConfig.JobStoreReadThreshold)).withDispatcher(EngineDispatcher) + def props(database: JobStore, registryActor: ActorRef) = Props( + new JobStoreReaderActor(database, registryActor, LoadConfig.JobStoreReadThreshold) + ).withDispatcher(EngineDispatcher) } class JobStoreReaderActor(database: JobStore, override val serviceRegistryActor: ActorRef, override val threshold: Int) - extends EnhancedThrottlerActor[CommandAndReplyTo[QueryJobCompletion]] + extends EnhancedThrottlerActor[CommandAndReplyTo[QueryJobCompletion]] with ActorLogging { override def processHead(head: CommandAndReplyTo[QueryJobCompletion]) = instrumentedProcess { @@ -35,7 +37,7 @@ class JobStoreReaderActor(database: JobStore, override val serviceRegistryActor: override def receive = enhancedReceive.orElse(super.receive) override protected def instrumentationPath = NonEmptyList.of("store", "read") override protected def instrumentationPrefix = InstrumentationPrefixes.JobPrefix - override def commandToData(snd: ActorRef) = { - case query: QueryJobCompletion => CommandAndReplyTo(query, sender()) + override def commandToData(snd: ActorRef) = { case query: QueryJobCompletion => + CommandAndReplyTo(query, sender()) } } diff --git a/engine/src/main/scala/cromwell/jobstore/JobStoreWriterActor.scala b/engine/src/main/scala/cromwell/jobstore/JobStoreWriterActor.scala index f00693d1daf..ea9113bd101 100644 --- a/engine/src/main/scala/cromwell/jobstore/JobStoreWriterActor.scala +++ b/engine/src/main/scala/cromwell/jobstore/JobStoreWriterActor.scala @@ -16,50 +16,51 @@ import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.{Failure, Success} - case class JobStoreWriterActor(jsd: JobStore, override val batchSize: Int, override val flushRate: FiniteDuration, serviceRegistryActor: ActorRef, threshold: Int, workflowStoreAccess: WorkflowStoreAccess - ) - extends EnhancedBatchActor[CommandAndReplyTo[JobStoreWriterCommand]](flushRate, batchSize) { +) extends EnhancedBatchActor[CommandAndReplyTo[JobStoreWriterCommand]](flushRate, batchSize) { - override protected def process(nonEmptyData: NonEmptyVector[CommandAndReplyTo[JobStoreWriterCommand]]): Future[Int] = instrumentedProcess { - implicit val actorSystem: ActorSystem = context.system + override protected def process(nonEmptyData: NonEmptyVector[CommandAndReplyTo[JobStoreWriterCommand]]): Future[Int] = + instrumentedProcess { + implicit val actorSystem: ActorSystem = context.system - val data = nonEmptyData.toVector - log.debug("Flushing {} job store commands to the DB", data.length) - val completions = data.collect({ case CommandAndReplyTo(c: JobStoreWriterCommand, _) => c.completion }) + val data = nonEmptyData.toVector + log.debug("Flushing {} job store commands to the DB", data.length) + val completions = data.collect { case CommandAndReplyTo(c: JobStoreWriterCommand, _) => c.completion } - if (completions.nonEmpty) { - val workflowCompletions = completions collect { case w: WorkflowCompletion => w } - val completedWorkflowIds = workflowCompletions map { _.workflowId } toSet - // Filter job completions that also have a corresponding workflow completion; these would just be - // immediately deleted anyway. - val jobCompletions = completions.toList collect { case j: JobCompletion if !completedWorkflowIds.contains(j.key.workflowId) => j } - val jobStoreAction: Future[Unit] = jsd.writeToDatabase(workflowCompletions, jobCompletions, batchSize) - val workflowStoreAction: Future[List[Int]] = Future.sequence { - completedWorkflowIds.map(workflowStoreAccess.deleteFromStore(_)).toList - } + if (completions.nonEmpty) { + val workflowCompletions = completions collect { case w: WorkflowCompletion => w } + val completedWorkflowIds = workflowCompletions map { _.workflowId } toSet + // Filter job completions that also have a corresponding workflow completion; these would just be + // immediately deleted anyway. + val jobCompletions = completions.toList collect { + case j: JobCompletion if !completedWorkflowIds.contains(j.key.workflowId) => j + } + val jobStoreAction: Future[Unit] = jsd.writeToDatabase(workflowCompletions, jobCompletions, batchSize) + val workflowStoreAction: Future[List[Int]] = Future.sequence { + completedWorkflowIds.map(workflowStoreAccess.deleteFromStore(_)).toList + } - val combinedAction: Future[Unit] = for { - _ <- jobStoreAction - _ <- workflowStoreAction - } yield () + val combinedAction: Future[Unit] = for { + _ <- jobStoreAction + _ <- workflowStoreAction + } yield () - combinedAction onComplete { - case Success(_) => - data foreach { case CommandAndReplyTo(c: JobStoreWriterCommand, r) => r ! JobStoreWriteSuccess(c) } - case Failure(error) => - log.error(error, "Failed to write job store entries to database") - data foreach { case CommandAndReplyTo(_, r) => r ! JobStoreWriteFailure(error) } - } + combinedAction onComplete { + case Success(_) => + data foreach { case CommandAndReplyTo(c: JobStoreWriterCommand, r) => r ! JobStoreWriteSuccess(c) } + case Failure(error) => + log.error(error, "Failed to write job store entries to database") + data foreach { case CommandAndReplyTo(_, r) => r ! JobStoreWriteFailure(error) } + } - combinedAction.map(_ => 1) - } else Future.successful(0) - } + combinedAction.map(_ => 1) + } else Future.successful(0) + } // EnhancedBatchActor overrides override def receive = enhancedReceive.orElse(super.receive) @@ -76,7 +77,15 @@ object JobStoreWriterActor { dbBatchSize: Int, dbFlushRate: FiniteDuration, registryActor: ActorRef, - workflowStoreAccess: WorkflowStoreAccess): Props = { - Props(new JobStoreWriterActor(jobStoreDatabase, dbBatchSize, dbFlushRate, registryActor, LoadConfig.JobStoreWriteThreshold, workflowStoreAccess)).withDispatcher(EngineDispatcher) - } + workflowStoreAccess: WorkflowStoreAccess + ): Props = + Props( + new JobStoreWriterActor(jobStoreDatabase, + dbBatchSize, + dbFlushRate, + registryActor, + LoadConfig.JobStoreWriteThreshold, + workflowStoreAccess + ) + ).withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/jobstore/SqlJobStore.scala b/engine/src/main/scala/cromwell/jobstore/SqlJobStore.scala index baeaa79221d..b6f28a8a24d 100644 --- a/engine/src/main/scala/cromwell/jobstore/SqlJobStore.scala +++ b/engine/src/main/scala/cromwell/jobstore/SqlJobStore.scala @@ -19,7 +19,10 @@ import scala.concurrent.{ExecutionContext, Future} class SqlJobStore(sqlDatabase: EngineSqlDatabase) extends JobStore { val log = LoggerFactory.getLogger(classOf[SqlJobStore]) - override def writeToDatabase(workflowCompletions: Seq[WorkflowCompletion], jobCompletions: Seq[JobCompletion], batchSize: Int)(implicit ec: ExecutionContext): Future[Unit] = { + override def writeToDatabase(workflowCompletions: Seq[WorkflowCompletion], + jobCompletions: Seq[JobCompletion], + batchSize: Int + )(implicit ec: ExecutionContext): Future[Unit] = { val completedWorkflowIds = workflowCompletions.toList.map(_.workflowId.toString) for { _ <- sqlDatabase.addJobStores(jobCompletions map toDatabase, batchSize) @@ -28,23 +31,24 @@ class SqlJobStore(sqlDatabase: EngineSqlDatabase) extends JobStore { } yield () } - private def toDatabase(jobCompletion: JobCompletion): JobStoreJoin = { + private def toDatabase(jobCompletion: JobCompletion): JobStoreJoin = jobCompletion match { case JobCompletion(key, JobResultSuccess(returnCode, jobOutputs)) => - val entry = JobStoreEntry( - key.workflowId.toString, - key.callFqn, - key.index.fromIndex, - key.attempt, - jobSuccessful = true, - returnCode, - None, - None) + val entry = JobStoreEntry(key.workflowId.toString, + key.callFqn, + key.index.fromIndex, + key.attempt, + jobSuccessful = true, + returnCode, + None, + None + ) val jobStoreResultSimpletons = - jobOutputs.outputs.simplify.map { - womValueSimpleton => JobStoreSimpletonEntry( - womValueSimpleton.simpletonKey, womValueSimpleton.simpletonValue.valueString.toClobOption, - womValueSimpleton.simpletonValue.womType.stableName) + jobOutputs.outputs.simplify.map { womValueSimpleton => + JobStoreSimpletonEntry(womValueSimpleton.simpletonKey, + womValueSimpleton.simpletonValue.valueString.toClobOption, + womValueSimpleton.simpletonValue.womType.stableName + ) } JobStoreJoin(entry, jobStoreResultSimpletons.toSeq) case JobCompletion(key, JobResultFailure(returnCode, throwable, retryable)) => @@ -56,14 +60,19 @@ class SqlJobStore(sqlDatabase: EngineSqlDatabase) extends JobStore { jobSuccessful = false, returnCode, Option(throwable.getMessage).toClobOption, - Option(retryable)) + Option(retryable) + ) JobStoreJoin(entry, Seq.empty) } - } - override def readJobResult(jobStoreKey: JobStoreKey, taskOutputs: Seq[OutputPort])(implicit ec: ExecutionContext): Future[Option[JobResult]] = { - sqlDatabase.queryJobStores(jobStoreKey.workflowId.toString, jobStoreKey.callFqn, jobStoreKey.index.fromIndex, - jobStoreKey.attempt) map { + override def readJobResult(jobStoreKey: JobStoreKey, taskOutputs: Seq[OutputPort])(implicit + ec: ExecutionContext + ): Future[Option[JobResult]] = + sqlDatabase.queryJobStores(jobStoreKey.workflowId.toString, + jobStoreKey.callFqn, + jobStoreKey.index.fromIndex, + jobStoreKey.attempt + ) map { _ map { case JobStoreJoin(entry, simpletonEntries) => entry match { case JobStoreEntry(_, _, _, _, true, returnCode, None, None, _) => @@ -72,12 +81,12 @@ class SqlJobStore(sqlDatabase: EngineSqlDatabase) extends JobStore { JobResultSuccess(returnCode, jobOutputs) case JobStoreEntry(_, _, _, _, false, returnCode, Some(_), Some(retryable), _) => JobResultFailure(returnCode, - JobAlreadyFailedInJobStore(jobStoreKey.tag, entry.exceptionMessage.toRawString), - retryable) + JobAlreadyFailedInJobStore(jobStoreKey.tag, entry.exceptionMessage.toRawString), + retryable + ) case bad => throw new Exception(s"Invalid contents of JobStore table: $bad") } } } - } } diff --git a/engine/src/main/scala/cromwell/jobstore/jobstore_.scala b/engine/src/main/scala/cromwell/jobstore/jobstore_.scala index 886de33ebdb..9bcd641330e 100644 --- a/engine/src/main/scala/cromwell/jobstore/jobstore_.scala +++ b/engine/src/main/scala/cromwell/jobstore/jobstore_.scala @@ -1,7 +1,12 @@ package cromwell.jobstore import cromwell.backend.BackendJobDescriptorKey -import cromwell.backend.BackendJobExecutionActor.{FetchedFromJobStore, JobFailedNonRetryableResponse, JobFailedRetryableResponse, JobSucceededResponse} +import cromwell.backend.BackendJobExecutionActor.{ + FetchedFromJobStore, + JobFailedNonRetryableResponse, + JobFailedRetryableResponse, + JobSucceededResponse +} import cromwell.core.{CallOutputs, WorkflowId} case class JobStoreKey(workflowId: WorkflowId, callFqn: String, index: Option[Int], attempt: Int) { @@ -13,7 +18,15 @@ sealed trait JobResult { def toBackendJobResponse(key: BackendJobDescriptorKey) = this match { // Always puts `None` for `dockerImageUsed` for a successfully completed job on restart. This shouldn't be a // problem since `saveJobCompletionToJobStore` in EJEA will already have sent this to metadata. - case JobResultSuccess(returnCode, jobOutputs) => JobSucceededResponse(key, returnCode, jobOutputs, None, Seq.empty, None, resultGenerationMode = FetchedFromJobStore) + case JobResultSuccess(returnCode, jobOutputs) => + JobSucceededResponse(key, + returnCode, + jobOutputs, + None, + Seq.empty, + None, + resultGenerationMode = FetchedFromJobStore + ) case JobResultFailure(returnCode, reason, false) => JobFailedNonRetryableResponse(key, reason, returnCode) case JobResultFailure(returnCode, reason, true) => JobFailedRetryableResponse(key, reason, returnCode) } diff --git a/engine/src/main/scala/cromwell/jobstore/package.scala b/engine/src/main/scala/cromwell/jobstore/package.scala index 1e4dbaecbbb..6d5bdb834e1 100644 --- a/engine/src/main/scala/cromwell/jobstore/package.scala +++ b/engine/src/main/scala/cromwell/jobstore/package.scala @@ -4,6 +4,7 @@ import cromwell.core.{JobKey, WorkflowId} package object jobstore { implicit class EnhancedJobKey(val jobKey: JobKey) extends AnyVal { - def toJobStoreKey(workflowId: WorkflowId): JobStoreKey = JobStoreKey(workflowId, jobKey.node.fullyQualifiedName, jobKey.index, jobKey.attempt) + def toJobStoreKey(workflowId: WorkflowId): JobStoreKey = + JobStoreKey(workflowId, jobKey.node.fullyQualifiedName, jobKey.index, jobKey.attempt) } } diff --git a/engine/src/main/scala/cromwell/logging/TerminalLayout.scala b/engine/src/main/scala/cromwell/logging/TerminalLayout.scala index 8871341f976..6bb67e5a1af 100644 --- a/engine/src/main/scala/cromwell/logging/TerminalLayout.scala +++ b/engine/src/main/scala/cromwell/logging/TerminalLayout.scala @@ -20,15 +20,14 @@ object TerminalLayout { } implicit class ColorString(msg: String) { - def colorizeUuids: String = { - "UUID\\((.*?)\\)".r.findAllMatchIn(msg).foldLeft(msg) { - case (l, r) => - val color = if (Option(System.getProperty("RAINBOW_UUID")).isDefined) - Math.abs(17 * r.group(1).substring(0,8).map(_.toInt).product) % 209 + 22 + def colorizeUuids: String = + "UUID\\((.*?)\\)".r.findAllMatchIn(msg).foldLeft(msg) { case (l, r) => + val color = + if (Option(System.getProperty("RAINBOW_UUID")).isDefined) + Math.abs(17 * r.group(1).substring(0, 8).map(_.toInt).product) % 209 + 22 else 2 - l.replace(r.group(0), TerminalUtil.highlight(color, r.group(1))) + l.replace(r.group(0), TerminalUtil.highlight(color, r.group(1))) } - } def colorizeCommand: String = msg.replaceAll("`([^`]*?)`", TerminalUtil.highlight(5, "$1")) } } diff --git a/engine/src/main/scala/cromwell/server/CromwellAkkaLogFilter.scala b/engine/src/main/scala/cromwell/server/CromwellAkkaLogFilter.scala index 31dc6408b77..9789b354f37 100644 --- a/engine/src/main/scala/cromwell/server/CromwellAkkaLogFilter.scala +++ b/engine/src/main/scala/cromwell/server/CromwellAkkaLogFilter.scala @@ -4,17 +4,19 @@ import akka.actor.ActorSystem import akka.event.EventStream import akka.event.slf4j.Slf4jLoggingFilter -class CromwellAkkaLogFilter(settings: ActorSystem.Settings, eventStream: EventStream) extends Slf4jLoggingFilter(settings, eventStream) { - override def isErrorEnabled(logClass: Class[_], logSource: String) = { +class CromwellAkkaLogFilter(settings: ActorSystem.Settings, eventStream: EventStream) + extends Slf4jLoggingFilter(settings, eventStream) { + override def isErrorEnabled(logClass: Class[_], logSource: String) = /* * This might filter out too much but it's the finest granularity we have here - * The goal is to not log the - * "Outgoing request stream error akka.stream.AbruptTerminationException: + * The goal is to not log the + * "Outgoing request stream error akka.stream.AbruptTerminationException: * Processor actor [Actor[akka://cromwell-system/user/StreamSupervisor-1/flow-6-0-mergePreferred#1200284127]] terminated abruptly" * type of message - * + * * See https://github.com/akka/akka-http/issues/907 and https://github.com/akka/akka/issues/18747 */ - super.isErrorEnabled(logClass, logSource) && !(logSource.startsWith("akka.actor.ActorSystemImpl") && CromwellShutdown.shutdownInProgress()) - } + super.isErrorEnabled(logClass, logSource) && !(logSource.startsWith( + "akka.actor.ActorSystemImpl" + ) && CromwellShutdown.shutdownInProgress()) } diff --git a/engine/src/main/scala/cromwell/server/CromwellDeadLetterListener.scala b/engine/src/main/scala/cromwell/server/CromwellDeadLetterListener.scala index 5cea81c37d2..c7af7991b6c 100644 --- a/engine/src/main/scala/cromwell/server/CromwellDeadLetterListener.scala +++ b/engine/src/main/scala/cromwell/server/CromwellDeadLetterListener.scala @@ -6,14 +6,15 @@ import cats.Show import cats.syntax.show._ class CromwellDeadLetterListener extends DeadLetterListener with ActorLogging { - implicit val showActor: Show[ActorRef] = Show.show(actor => - s"Actor of path ${actor.path} toString ${actor.toString()}" - ) - + implicit val showActor: Show[ActorRef] = + Show.show(actor => s"Actor of path ${actor.path} toString ${actor.toString()}") + def shutdownReceive: Receive = { // This silences the dead letter messages when Cromwell is shutting down case DeadLetter(msg, from, to) if CromwellShutdown.shutdownInProgress() => - log.debug(s"Got a dead letter during Cromwell shutdown. Sent by\n${from.show}\nto ${to.show}\n consisting of message: $msg\n ") + log.debug( + s"Got a dead letter during Cromwell shutdown. Sent by\n${from.show}\nto ${to.show}\n consisting of message: $msg\n " + ) } override def receive = shutdownReceive.orElse(super.receive) } diff --git a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala index c37d6466fcc..b4cdbef735b 100644 --- a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala +++ b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala @@ -20,7 +20,11 @@ import cromwell.engine.io.{IoActor, IoActorProxy} import cromwell.engine.workflow.WorkflowManagerActor import cromwell.engine.workflow.WorkflowManagerActor.AbortAllWorkflowsCommand import cromwell.engine.workflow.lifecycle.execution.callcaching.{CallCache, CallCacheReadActor, CallCacheWriteActor} -import cromwell.engine.workflow.lifecycle.finalization.{CopyWorkflowLogsActor, WorkflowCallbackActor, WorkflowCallbackConfig} +import cromwell.engine.workflow.lifecycle.finalization.{ + CopyWorkflowLogsActor, + WorkflowCallbackActor, + WorkflowCallbackConfig +} import cromwell.engine.workflow.tokens.{DynamicRateLimiter, JobTokenDispenserActor} import cromwell.engine.workflow.workflowstore.AbortRequestScanningActor.AbortConfig import cromwell.engine.workflow.workflowstore._ @@ -54,9 +58,11 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean, val serverMode: Boolean, - protected val config: Config) - (implicit materializer: ActorMaterializer) - extends Actor with ActorLogging with GracefulShutdownHelper { + protected val config: Config +)(implicit materializer: ActorMaterializer) + extends Actor + with ActorLogging + with GracefulShutdownHelper { import CromwellRootActor._ @@ -72,9 +78,11 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, lazy val systemConfig = config.getConfig("system") lazy val serviceRegistryActor: ActorRef = context.actorOf(ServiceRegistryActor.props(config), "ServiceRegistryActor") - lazy val numberOfWorkflowLogCopyWorkers = systemConfig.as[Option[Int]]("number-of-workflow-log-copy-workers").getOrElse(DefaultNumberOfWorkflowLogCopyWorkers) + lazy val numberOfWorkflowLogCopyWorkers = + systemConfig.as[Option[Int]]("number-of-workflow-log-copy-workers").getOrElse(DefaultNumberOfWorkflowLogCopyWorkers) - lazy val workflowStore: WorkflowStore = SqlWorkflowStore(EngineServicesStore.engineDatabaseInterface, MetadataServicesStore.metadataDatabaseInterface) + lazy val workflowStore: WorkflowStore = + SqlWorkflowStore(EngineServicesStore.engineDatabaseInterface, MetadataServicesStore.metadataDatabaseInterface) val workflowStoreAccess: WorkflowStoreAccess = { val coordinatedWorkflowStoreAccess = config.as[Option[Boolean]]("system.coordinated-workflow-store-access") @@ -90,41 +98,49 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, } lazy val workflowStoreActor = - context.actorOf(WorkflowStoreActor.props( - workflowStoreDatabase = workflowStore, - workflowStoreAccess = workflowStoreAccess, - serviceRegistryActor = serviceRegistryActor, - terminator = terminator, - abortAllJobsOnTerminate = abortJobsOnTerminate, - workflowHeartbeatConfig = workflowHeartbeatConfig), - "WorkflowStoreActor") + context.actorOf( + WorkflowStoreActor.props( + workflowStoreDatabase = workflowStore, + workflowStoreAccess = workflowStoreAccess, + serviceRegistryActor = serviceRegistryActor, + terminator = terminator, + abortAllJobsOnTerminate = abortJobsOnTerminate, + workflowHeartbeatConfig = workflowHeartbeatConfig + ), + "WorkflowStoreActor" + ) lazy val jobStore: JobStore = new SqlJobStore(EngineServicesStore.engineDatabaseInterface) - lazy val jobStoreActor: ActorRef = context.actorOf(JobStoreActor.props(jobStore, serviceRegistryActor, workflowStoreAccess), "JobStoreActor") + lazy val jobStoreActor: ActorRef = + context.actorOf(JobStoreActor.props(jobStore, serviceRegistryActor, workflowStoreAccess), "JobStoreActor") lazy val subWorkflowStore: SubWorkflowStore = new SqlSubWorkflowStore(EngineServicesStore.engineDatabaseInterface) - lazy val subWorkflowStoreActor: ActorRef = context.actorOf(SubWorkflowStoreActor.props(subWorkflowStore), "SubWorkflowStoreActor") + lazy val subWorkflowStoreActor: ActorRef = + context.actorOf(SubWorkflowStoreActor.props(subWorkflowStore), "SubWorkflowStoreActor") lazy val ioConfig: IoConfig = config.as[IoConfig] - lazy val ioActor: ActorRef = context.actorOf( - IoActor.props( - ioConfig = ioConfig, - serviceRegistryActor = serviceRegistryActor, - applicationName = GoogleConfiguration(config).applicationName), - "IoActor") + lazy val ioActor: ActorRef = context.actorOf(IoActor.props(ioConfig = ioConfig, + serviceRegistryActor = serviceRegistryActor, + applicationName = + GoogleConfiguration(config).applicationName + ), + "IoActor" + ) lazy val ioActorProxy: ActorRef = context.actorOf(IoActorProxy.props(ioActor), "IoProxy") // Register the IoActor with the service registry: serviceRegistryActor ! IoActorRef(ioActorProxy) - lazy val workflowLogCopyRouter: ActorRef = context.actorOf(RoundRobinPool(numberOfWorkflowLogCopyWorkers) - .withSupervisorStrategy(CopyWorkflowLogsActor.strategy) - .props(CopyWorkflowLogsActor.props(serviceRegistryActor, ioActor)), - "WorkflowLogCopyRouter") + lazy val workflowLogCopyRouter: ActorRef = context.actorOf( + RoundRobinPool(numberOfWorkflowLogCopyWorkers) + .withSupervisorStrategy(CopyWorkflowLogsActor.strategy) + .props(CopyWorkflowLogsActor.props(serviceRegistryActor, ioActor)), + "WorkflowLogCopyRouter" + ) private val workflowCallbackConfig = WorkflowCallbackConfig(config.getConfig("workflow-state-callback")) - lazy val workflowCallbackActor: Option[ActorRef] = { + lazy val workflowCallbackActor: Option[ActorRef] = if (workflowCallbackConfig.enabled) { val props = WorkflowCallbackActor.props( serviceRegistryActor, @@ -132,21 +148,25 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, ) Option(context.actorOf(props, "WorkflowCallbackActor")) } else None - } - //Call-caching config validation + // Call-caching config validation lazy val callCachingConfig = config.getConfig("call-caching") lazy val callCachingEnabled = callCachingConfig.getBoolean("enabled") lazy val callInvalidateBadCacheResults = callCachingConfig.getBoolean("invalidate-bad-cache-results") lazy val callCache: CallCache = new CallCache(EngineServicesStore.engineDatabaseInterface) - lazy val numberOfCacheReadWorkers = config.getConfig("system").as[Option[Int]]("number-of-cache-read-workers").getOrElse(DefaultNumberOfCacheReadWorkers) + lazy val numberOfCacheReadWorkers = config + .getConfig("system") + .as[Option[Int]]("number-of-cache-read-workers") + .getOrElse(DefaultNumberOfCacheReadWorkers) lazy val callCacheReadActor = context.actorOf(RoundRobinPool(numberOfCacheReadWorkers) - .props(CallCacheReadActor.props(callCache, serviceRegistryActor)), - "CallCacheReadActor") + .props(CallCacheReadActor.props(callCache, serviceRegistryActor)), + "CallCacheReadActor" + ) - lazy val callCacheWriteActor = context.actorOf(CallCacheWriteActor.props(callCache, serviceRegistryActor), "CallCacheWriteActor") + lazy val callCacheWriteActor = + context.actorOf(CallCacheWriteActor.props(callCache, serviceRegistryActor), "CallCacheWriteActor") // Docker Actor lazy val ioEc = context.system.dispatchers.lookup(Dispatcher.IoDispatcher) @@ -160,22 +180,49 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, case DockerRemoteLookup => DockerInfoActor.remoteRegistriesFromConfig(DockerConfiguration.dockerHashLookupConfig) } - lazy val dockerHashActor = context.actorOf(DockerInfoActor.props(dockerFlows, dockerActorQueueSize, - dockerConf.cacheEntryTtl, dockerConf.cacheSize), "DockerHashActor") + lazy val dockerHashActor = context.actorOf( + DockerInfoActor.props(dockerFlows, dockerActorQueueSize, dockerConf.cacheEntryTtl, dockerConf.cacheSize), + "DockerHashActor" + ) lazy val backendSingletons = CromwellBackends.instance.get.backendLifecycleActorFactories map { - case (name, factory) => name -> (factory.backendSingletonActorProps(serviceRegistryActor) map { context.actorOf(_, s"$name-Singleton") }) + case (name, factory) => + name -> (factory.backendSingletonActorProps(serviceRegistryActor) map { context.actorOf(_, s"$name-Singleton") }) } lazy val backendSingletonCollection = BackendSingletonCollection(backendSingletons) - lazy val jobRestartCheckRate: DynamicRateLimiter.Rate = DynamicRateLimiter.Rate(systemConfig.as[Int]("job-restart-check-rate-control.jobs"), systemConfig.as[FiniteDuration]("job-restart-check-rate-control.per")) - lazy val jobExecutionRate: DynamicRateLimiter.Rate = DynamicRateLimiter.Rate(systemConfig.as[Int]("job-rate-control.jobs"), systemConfig.as[FiniteDuration]("job-rate-control.per")) - - lazy val restartCheckTokenLogInterval: Option[FiniteDuration] = systemConfig.as[Option[Int]]("job-restart-check-rate-control.token-log-interval-seconds").map(_.seconds) - lazy val executionTokenLogInterval: Option[FiniteDuration] = systemConfig.as[Option[Int]]("hog-safety.token-log-interval-seconds").map(_.seconds) - - lazy val jobRestartCheckTokenDispenserActor: ActorRef = context.actorOf(JobTokenDispenserActor.props(serviceRegistryActor, jobRestartCheckRate, restartCheckTokenLogInterval, "restart checking", "CheckingRestart"), "JobRestartCheckTokenDispenser") - lazy val jobExecutionTokenDispenserActor: ActorRef = context.actorOf(JobTokenDispenserActor.props(serviceRegistryActor, jobExecutionRate, executionTokenLogInterval, "execution", ExecutionStatus.Running.toString), "JobExecutionTokenDispenser") + lazy val jobRestartCheckRate: DynamicRateLimiter.Rate = DynamicRateLimiter.Rate( + systemConfig.as[Int]("job-restart-check-rate-control.jobs"), + systemConfig.as[FiniteDuration]("job-restart-check-rate-control.per") + ) + lazy val jobExecutionRate: DynamicRateLimiter.Rate = DynamicRateLimiter.Rate( + systemConfig.as[Int]("job-rate-control.jobs"), + systemConfig.as[FiniteDuration]("job-rate-control.per") + ) + + lazy val restartCheckTokenLogInterval: Option[FiniteDuration] = + systemConfig.as[Option[Int]]("job-restart-check-rate-control.token-log-interval-seconds").map(_.seconds) + lazy val executionTokenLogInterval: Option[FiniteDuration] = + systemConfig.as[Option[Int]]("hog-safety.token-log-interval-seconds").map(_.seconds) + + lazy val jobRestartCheckTokenDispenserActor: ActorRef = context.actorOf( + JobTokenDispenserActor.props(serviceRegistryActor, + jobRestartCheckRate, + restartCheckTokenLogInterval, + "restart checking", + "CheckingRestart" + ), + "JobRestartCheckTokenDispenser" + ) + lazy val jobExecutionTokenDispenserActor: ActorRef = context.actorOf( + JobTokenDispenserActor.props(serviceRegistryActor, + jobExecutionRate, + executionTokenLogInterval, + "execution", + ExecutionStatus.Running.toString + ), + "JobExecutionTokenDispenser" + ) lazy val workflowManagerActor = context.actorOf( WorkflowManagerActor.props( @@ -196,19 +243,24 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, backendSingletonCollection = backendSingletonCollection, serverMode = serverMode, - workflowHeartbeatConfig = workflowHeartbeatConfig), - "WorkflowManagerActor") + workflowHeartbeatConfig = workflowHeartbeatConfig + ), + "WorkflowManagerActor" + ) val abortRequestScanningActor = { val abortConfigBlock = config.as[Option[Config]]("system.abort") - val abortCacheConfig = CacheConfig.config(caching = abortConfigBlock.flatMap { _.as[Option[Config]]("cache") }, + val abortCacheConfig = CacheConfig.config(caching = abortConfigBlock.flatMap(_.as[Option[Config]]("cache")), defaultConcurrency = 1, defaultSize = 100000L, - defaultTtl = 20 minutes) + defaultTtl = 20 minutes + ) val abortConfig = AbortConfig( - scanFrequency = abortConfigBlock.flatMap { _.as[Option[FiniteDuration]]("scan-frequency") } getOrElse (30 seconds), + scanFrequency = abortConfigBlock.flatMap { + _.as[Option[FiniteDuration]]("scan-frequency") + } getOrElse (30 seconds), cacheConfig = abortCacheConfig ) @@ -262,8 +314,8 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, } } - override def receive = { - case message => logger.error(s"Unknown message received by CromwellRootActor: $message") + override def receive = { case message => + logger.error(s"Unknown message received by CromwellRootActor: $message") } /** diff --git a/engine/src/main/scala/cromwell/server/CromwellServer.scala b/engine/src/main/scala/cromwell/server/CromwellServer.scala index 76f784875fc..cc2a1d409e9 100644 --- a/engine/src/main/scala/cromwell/server/CromwellServer.scala +++ b/engine/src/main/scala/cromwell/server/CromwellServer.scala @@ -20,26 +20,29 @@ object CromwellServer { def run(gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean)(cromwellSystem: CromwellSystem): Future[Any] = { implicit val actorSystem = cromwellSystem.actorSystem implicit val materializer = cromwellSystem.materializer - actorSystem.actorOf(CromwellServerActor.props(cromwellSystem, gracefulShutdown, abortJobsOnTerminate), "cromwell-service") + actorSystem.actorOf(CromwellServerActor.props(cromwellSystem, gracefulShutdown, abortJobsOnTerminate), + "cromwell-service" + ) actorSystem.whenTerminated } } -class CromwellServerActor(cromwellSystem: CromwellSystem, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean)(override implicit val materializer: ActorMaterializer) - extends CromwellRootActor( - terminator = cromwellSystem, - gracefulShutdown = gracefulShutdown, - abortJobsOnTerminate = abortJobsOnTerminate, - serverMode = true, - config = cromwellSystem.config - ) +class CromwellServerActor(cromwellSystem: CromwellSystem, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean)( + implicit override val materializer: ActorMaterializer +) extends CromwellRootActor( + terminator = cromwellSystem, + gracefulShutdown = gracefulShutdown, + abortJobsOnTerminate = abortJobsOnTerminate, + serverMode = true, + config = cromwellSystem.config + ) with CromwellApiService with CromwellInstrumentationActor with WesRouteSupport with SwaggerService with ActorLogging { implicit val actorSystem = context.system - override implicit val ec = context.dispatcher + implicit override val ec = context.dispatcher override def actorRefFactory: ActorContext = context val webserviceConf = cromwellSystem.config.getConfig("webservice") @@ -71,7 +74,7 @@ class CromwellServerActor(cromwellSystem: CromwellSystem, gracefulShutdown: Bool If/when CromwellServer behaves like a better async citizen, we may be less paranoid about our async log messages not appearing due to the actor system shutdown. For now, synchronously print to the stderr so that the user has some idea of why the server failed to start up. - */ + */ Console.err.println(s"Binding failed interface $interface port $port") e.printStackTrace(Console.err) cromwellSystem.shutdownActorSystem() @@ -85,7 +88,9 @@ class CromwellServerActor(cromwellSystem: CromwellSystem, gracefulShutdown: Bool } object CromwellServerActor { - def props(cromwellSystem: CromwellSystem, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean)(implicit materializer: ActorMaterializer): Props = { - Props(new CromwellServerActor(cromwellSystem, gracefulShutdown, abortJobsOnTerminate)).withDispatcher(EngineDispatcher) - } + def props(cromwellSystem: CromwellSystem, gracefulShutdown: Boolean, abortJobsOnTerminate: Boolean)(implicit + materializer: ActorMaterializer + ): Props = + Props(new CromwellServerActor(cromwellSystem, gracefulShutdown, abortJobsOnTerminate)) + .withDispatcher(EngineDispatcher) } diff --git a/engine/src/main/scala/cromwell/server/CromwellShutdown.scala b/engine/src/main/scala/cromwell/server/CromwellShutdown.scala index ce204249b1e..991d8dfabb6 100644 --- a/engine/src/main/scala/cromwell/server/CromwellShutdown.scala +++ b/engine/src/main/scala/cromwell/server/CromwellShutdown.scala @@ -59,17 +59,16 @@ object CromwellShutdown extends GracefulStopSupport { /** * Register a task to unbind from the port during the ServiceUnbind phase. */ - def registerUnbindTask(actorSystem: ActorSystem, serverBinding: Future[Http.ServerBinding]) = { + def registerUnbindTask(actorSystem: ActorSystem, serverBinding: Future[Http.ServerBinding]) = instance(actorSystem).addTask(CoordinatedShutdown.PhaseServiceUnbind, "UnbindingServerPort") { () => // At this point it's still safe to schedule work on the actor system's dispatcher implicit val ec = actorSystem.dispatcher for { binding <- serverBinding _ <- binding.unbind() - _ = logger.info("Http server unbound.") + _ = logger.info("Http server unbound.") } yield Done } - } /** * Register tasks on the coordinated shutdown instance allowing a controlled, ordered shutdown process @@ -77,29 +76,28 @@ object CromwellShutdown extends GracefulStopSupport { * Calling this method will add a JVM shutdown hook. */ def registerShutdownTasks( - cromwellId: String, - abortJobsOnTerminate: Boolean, - actorSystem: ActorSystem, - workflowManagerActor: ActorRef, - logCopyRouter: ActorRef, - workflowCallbackActor: Option[ActorRef], - jobTokenDispenser: ActorRef, - jobStoreActor: ActorRef, - workflowStoreActor: ActorRef, - subWorkflowStoreActor: ActorRef, - callCacheWriteActor: ActorRef, - ioActor: ActorRef, - dockerHashActor: ActorRef, - serviceRegistryActor: ActorRef, - materializer: ActorMaterializer - ): Unit = { + cromwellId: String, + abortJobsOnTerminate: Boolean, + actorSystem: ActorSystem, + workflowManagerActor: ActorRef, + logCopyRouter: ActorRef, + workflowCallbackActor: Option[ActorRef], + jobTokenDispenser: ActorRef, + jobStoreActor: ActorRef, + workflowStoreActor: ActorRef, + subWorkflowStoreActor: ActorRef, + callCacheWriteActor: ActorRef, + ioActor: ActorRef, + dockerHashActor: ActorRef, + serviceRegistryActor: ActorRef, + materializer: ActorMaterializer + ): Unit = { val coordinatedShutdown = this.instance(actorSystem) - def shutdownActor(actor: ActorRef, - phase: String, - message: AnyRef, - customTimeout: Option[FiniteDuration] = None)(implicit executionContext: ExecutionContext) = { + def shutdownActor(actor: ActorRef, phase: String, message: AnyRef, customTimeout: Option[FiniteDuration] = None)( + implicit executionContext: ExecutionContext + ) = coordinatedShutdown.addTask(phase, s"stop${actor.path.name.capitalize}") { () => val timeout = coordinatedShutdown.timeout(phase) logger.info(s"Shutting down ${actor.path.name} - Timeout = ${timeout.toSeconds} seconds") @@ -115,7 +113,6 @@ object CromwellShutdown extends GracefulStopSupport { action map { _ => Done } } - } implicit val ec = actorSystem.dispatcher @@ -131,61 +128,70 @@ object CromwellShutdown extends GracefulStopSupport { } /* 2) The socket is unbound from the port in the CoordinatedShutdown.PhaseServiceUnbind - * See cromwell.engine.server.CromwellServer + * See cromwell.engine.server.CromwellServer */ /* 3) Finish processing all requests: - * - Release any WorkflowStore entries held by this Cromwell instance. - * - Publish workflow processing event metadata for the released workflows. - * - Stop the WorkflowStore: The port is not bound anymore so we can't have new submissions. - * Process what's left in the message queue and stop. - * Note that it's possible that some submissions are still asynchronously being prepared at the - * akka http API layer (CromwellApiService) to be sent to the WorkflowStore. - * Those submissions might be lost if the WorkflowStore shuts itself down when it's finished processing its current work. - * In that case the "ask" over in the CromwellAPIService will fail with a AskTimeoutException and should be handled appropriately. - * This process still ensures that no submission can make it to the database without a response being sent back to the client. - * - * - Stop WorkflowManagerActor: We've already stopped starting new workflows but the Running workflows are still - * going. This is tricky because all the actor hierarchy under the WMA can be in a variety of state combinations. - * Specifically there is an asynchronous gap in several cases between emission of messages towards engine level - * actors (job store, cache store, etc...) and emission of messages towards the metadata service for the same logical - * event (e.g: job complete). - * The current behavior upon restart however is to re-play the graph, skipping execution of completed jobs (determined by - * engine job store) but still re-submitting all related metadata events. This is likely sub-optimal, but is used here - * to simply stop the WMA (which will trigger all its descendants to be stopped recursively) without more coordination. - * Indeed even if the actor is stopped in between the above mentioned gap, metadata will be re-submitted anyway on restart, - * even for completed jobs. - * - * - Stop the LogCopyRouter: it can generate metadata events and must therefore be stopped before the service registry. - * Wrap the ShutdownCommand in a Broadcast message so the router forwards it to all its routees - * Use the ShutdownCommand because a PoisonPill could stop the routees in the middle of "transaction" - * with the IoActor. The routees handle the ShutdownCommand properly and shutdown only when they have - * no outstanding requests to the IoActor. When all routees are dead the router automatically stops itself. - * - * - Stop the job token dispenser: stop it before stopping WMA and its EJEA descendants because - * the dispenser is watching all EJEAs and would be flooded by Terminated messages otherwise - */ + * - Release any WorkflowStore entries held by this Cromwell instance. + * - Publish workflow processing event metadata for the released workflows. + * - Stop the WorkflowStore: The port is not bound anymore so we can't have new submissions. + * Process what's left in the message queue and stop. + * Note that it's possible that some submissions are still asynchronously being prepared at the + * akka http API layer (CromwellApiService) to be sent to the WorkflowStore. + * Those submissions might be lost if the WorkflowStore shuts itself down when it's finished processing its current work. + * In that case the "ask" over in the CromwellAPIService will fail with a AskTimeoutException and should be handled appropriately. + * This process still ensures that no submission can make it to the database without a response being sent back to the client. + * + * - Stop WorkflowManagerActor: We've already stopped starting new workflows but the Running workflows are still + * going. This is tricky because all the actor hierarchy under the WMA can be in a variety of state combinations. + * Specifically there is an asynchronous gap in several cases between emission of messages towards engine level + * actors (job store, cache store, etc...) and emission of messages towards the metadata service for the same logical + * event (e.g: job complete). + * The current behavior upon restart however is to re-play the graph, skipping execution of completed jobs (determined by + * engine job store) but still re-submitting all related metadata events. This is likely sub-optimal, but is used here + * to simply stop the WMA (which will trigger all its descendants to be stopped recursively) without more coordination. + * Indeed even if the actor is stopped in between the above mentioned gap, metadata will be re-submitted anyway on restart, + * even for completed jobs. + * + * - Stop the LogCopyRouter: it can generate metadata events and must therefore be stopped before the service registry. + * Wrap the ShutdownCommand in a Broadcast message so the router forwards it to all its routees + * Use the ShutdownCommand because a PoisonPill could stop the routees in the middle of "transaction" + * with the IoActor. The routees handle the ShutdownCommand properly and shutdown only when they have + * no outstanding requests to the IoActor. When all routees are dead the router automatically stops itself. + * + * - Stop the job token dispenser: stop it before stopping WMA and its EJEA descendants because + * the dispenser is watching all EJEAs and would be flooded by Terminated messages otherwise + */ coordinatedShutdown.addTask(CoordinatedShutdown.PhaseServiceRequestsDone, "releaseWorkflowStoreEntries") { () => - EngineServicesStore.engineDatabaseInterface.releaseWorkflowStoreEntries(cromwellId).map(count => { - logger.info("{} workflows released by {}", count, cromwellId) - }).as(Done) + EngineServicesStore.engineDatabaseInterface + .releaseWorkflowStoreEntries(cromwellId) + .map(count => logger.info("{} workflows released by {}", count, cromwellId)) + .as(Done) } - coordinatedShutdown.addTask(CoordinatedShutdown.PhaseServiceRequestsDone, "publishMetadataForReleasedWorkflowStoreEntries") { () => - EngineServicesStore.engineDatabaseInterface.findWorkflows(cromwellId).map(ids => - ids foreach { id => - WorkflowProcessingEventPublishing.publish(WorkflowId.fromString(id), cromwellId, Released, serviceRegistryActor) - } - ).as(Done) + coordinatedShutdown.addTask(CoordinatedShutdown.PhaseServiceRequestsDone, + "publishMetadataForReleasedWorkflowStoreEntries" + ) { () => + EngineServicesStore.engineDatabaseInterface + .findWorkflows(cromwellId) + .map(ids => + ids foreach { id => + WorkflowProcessingEventPublishing + .publish(WorkflowId.fromString(id), cromwellId, Released, serviceRegistryActor) + } + ) + .as(Done) } shutdownActor(workflowStoreActor, CoordinatedShutdown.PhaseServiceRequestsDone, ShutdownCommand) shutdownActor(logCopyRouter, CoordinatedShutdown.PhaseServiceRequestsDone, Broadcast(ShutdownCommand)) - workflowCallbackActor.foreach(wca => shutdownActor(wca, CoordinatedShutdown.PhaseServiceRequestsDone, Broadcast(ShutdownCommand))) + workflowCallbackActor.foreach(wca => + shutdownActor(wca, CoordinatedShutdown.PhaseServiceRequestsDone, Broadcast(ShutdownCommand)) + ) shutdownActor(jobTokenDispenser, CoordinatedShutdown.PhaseServiceRequestsDone, ShutdownCommand) - + /* - * Aborting is only a special case of shutdown. Instead of sending a PoisonPill, send a AbortAllWorkflowsCommand - * Also attach this task to a special shutdown phase allowing for a longer timeout. + * Aborting is only a special case of shutdown. Instead of sending a PoisonPill, send a AbortAllWorkflowsCommand + * Also attach this task to a special shutdown phase allowing for a longer timeout. */ if (abortJobsOnTerminate) { val abortTimeout = coordinatedShutdown.timeout(PhaseAbortAllWorkflows) @@ -197,10 +203,10 @@ object CromwellShutdown extends GracefulStopSupport { } /* 4) Shutdown connection pools - * This will close all akka http opened connection pools tied to the actor system. - * The pools stop accepting new work but are given a chance to execute the work submitted prior to the shutdown call. - * When this future returns, all outstanding connections to client will be terminated. - * Note that this also includes connection pools like the one used to lookup docker hashes. + * This will close all akka http opened connection pools tied to the actor system. + * The pools stop accepting new work but are given a chance to execute the work submitted prior to the shutdown call. + * When this future returns, all outstanding connections to client will be terminated. + * Note that this also includes connection pools like the one used to lookup docker hashes. */ coordinatedShutdown.addTask(CoordinatedShutdown.PhaseServiceStop, "TerminatingConnections") { () => Http(actorSystem).shutdownAllConnectionPools() as { @@ -210,14 +216,20 @@ object CromwellShutdown extends GracefulStopSupport { } /* 5) Stop system level actors that require writing to the database or I/O - * - SubWorkflowStoreActor - * - JobStoreActor - * - CallCacheWriteActor - * - ServiceRegistryActor - * - DockerHashActor - * - IoActor - */ - List(subWorkflowStoreActor, jobStoreActor, callCacheWriteActor, serviceRegistryActor, dockerHashActor, ioActor) foreach { + * - SubWorkflowStoreActor + * - JobStoreActor + * - CallCacheWriteActor + * - ServiceRegistryActor + * - DockerHashActor + * - IoActor + */ + List(subWorkflowStoreActor, + jobStoreActor, + callCacheWriteActor, + serviceRegistryActor, + dockerHashActor, + ioActor + ) foreach { shutdownActor(_, PhaseStopIoActivity, ShutdownCommand) } @@ -236,12 +248,13 @@ object CromwellShutdown extends GracefulStopSupport { // 7) Close out the backend used for WDL HTTP import resolution // http://sttp.readthedocs.io/en/latest/backends/start_stop.html - coordinatedShutdown.addTask(CoordinatedShutdown.PhaseBeforeActorSystemTerminate, "wdlHttpImportResolverBackend") { () => - Future { - HttpResolver.closeBackendIfNecessary() - logger.info("WDL HTTP import resolver closed") - Done - } + coordinatedShutdown.addTask(CoordinatedShutdown.PhaseBeforeActorSystemTerminate, "wdlHttpImportResolverBackend") { + () => + Future { + HttpResolver.closeBackendIfNecessary() + logger.info("WDL HTTP import resolver closed") + Done + } } } } diff --git a/engine/src/main/scala/cromwell/server/CromwellSystem.scala b/engine/src/main/scala/cromwell/server/CromwellSystem.scala index 1a6a709c8f3..ec9b95999d0 100644 --- a/engine/src/main/scala/cromwell/server/CromwellSystem.scala +++ b/engine/src/main/scala/cromwell/server/CromwellSystem.scala @@ -40,26 +40,23 @@ trait CromwellSystem extends CromwellTerminator { implicit final lazy val actorSystem = newActorSystem() implicit final lazy val materializer = ActorMaterializer() - implicit private final lazy val ec = actorSystem.dispatcher + implicit final private lazy val ec = actorSystem.dispatcher - override def beginCromwellShutdown(reason: CoordinatedShutdown.Reason): Future[Done] = { + override def beginCromwellShutdown(reason: CoordinatedShutdown.Reason): Future[Done] = CromwellShutdown.instance(actorSystem).run(reason) - } - def shutdownActorSystem(): Future[Terminated] = { + def shutdownActorSystem(): Future[Terminated] = // If the actor system is already terminated it's already too late for a clean shutdown // Note: This does not protect again starting 2 shutdowns concurrently if (!actorSystem.whenTerminated.isCompleted) { Http().shutdownAllConnectionPools() flatMap { _ => shutdownMaterializerAndActorSystem() - } recoverWith { - case _ => - // we still want to shutdown the materializer and actor system if shutdownAllConnectionPools failed - shutdownMaterializerAndActorSystem() + } recoverWith { case _ => + // we still want to shutdown the materializer and actor system if shutdownAllConnectionPools failed + shutdownMaterializerAndActorSystem() } } else actorSystem.whenTerminated - } - + private def shutdownMaterializerAndActorSystem() = { materializer.shutdown() actorSystem.terminate() diff --git a/engine/src/main/scala/cromwell/subworkflowstore/EmptySubWorkflowStoreActor.scala b/engine/src/main/scala/cromwell/subworkflowstore/EmptySubWorkflowStoreActor.scala index cbbae2410c3..82e7a733817 100644 --- a/engine/src/main/scala/cromwell/subworkflowstore/EmptySubWorkflowStoreActor.scala +++ b/engine/src/main/scala/cromwell/subworkflowstore/EmptySubWorkflowStoreActor.scala @@ -7,7 +7,7 @@ import cromwell.util.GracefulShutdownHelper.ShutdownCommand class EmptySubWorkflowStoreActor extends Actor with ActorLogging { override def receive: Receive = { - case register: RegisterSubWorkflow => sender() ! SubWorkflowStoreRegisterSuccess(register) + case register: RegisterSubWorkflow => sender() ! SubWorkflowStoreRegisterSuccess(register) case query: QuerySubWorkflow => sender() ! SubWorkflowNotFound(query) case _: WorkflowComplete => // No-op! case ShutdownCommand => context stop self diff --git a/engine/src/main/scala/cromwell/subworkflowstore/SqlSubWorkflowStore.scala b/engine/src/main/scala/cromwell/subworkflowstore/SqlSubWorkflowStore.scala index 64f21275ff4..acf760c8143 100644 --- a/engine/src/main/scala/cromwell/subworkflowstore/SqlSubWorkflowStore.scala +++ b/engine/src/main/scala/cromwell/subworkflowstore/SqlSubWorkflowStore.scala @@ -10,7 +10,8 @@ class SqlSubWorkflowStore(subWorkflowStoreSqlDatabase: SubWorkflowStoreSqlDataba callFullyQualifiedName: String, jobIndex: Int, jobAttempt: Int, - subWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Unit] = { + subWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Unit] = subWorkflowStoreSqlDatabase.addSubWorkflowStoreEntry( rootWorkflowExecutionUuid, parentWorkflowExecutionUuid, @@ -19,13 +20,16 @@ class SqlSubWorkflowStore(subWorkflowStoreSqlDatabase: SubWorkflowStoreSqlDataba jobAttempt, subWorkflowExecutionUuid ) - } - override def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, jobAttempt: Int)(implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] = { + override def querySubWorkflowStore(parentWorkflowExecutionUuid: String, + callFqn: String, + jobIndex: Int, + jobAttempt: Int + )(implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] = subWorkflowStoreSqlDatabase.querySubWorkflowStore(parentWorkflowExecutionUuid, callFqn, jobIndex, jobAttempt) - } - override def removeSubWorkflowStoreEntries(parentWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Int] = { + override def removeSubWorkflowStoreEntries(parentWorkflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Future[Int] = subWorkflowStoreSqlDatabase.removeSubWorkflowStoreEntries(parentWorkflowExecutionUuid) - } } diff --git a/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStore.scala b/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStore.scala index 8ad92fa9bae..ef303be8e0e 100644 --- a/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStore.scala +++ b/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStore.scala @@ -10,10 +10,12 @@ trait SubWorkflowStore { callFullyQualifiedName: String, jobIndex: Int, jobAttempt: Int, - subWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Unit] + subWorkflowExecutionUuid: String + )(implicit ec: ExecutionContext): Future[Unit] - def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, jobAttempt: Int) - (implicit ec: ExecutionContext): Future[Option[SubWorkflowStoreEntry]] + def querySubWorkflowStore(parentWorkflowExecutionUuid: String, callFqn: String, jobIndex: Int, jobAttempt: Int)( + implicit ec: ExecutionContext + ): Future[Option[SubWorkflowStoreEntry]] def removeSubWorkflowStoreEntries(parentWorkflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Int] } diff --git a/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStoreActor.scala b/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStoreActor.scala index 22ec0476220..a3540c3497a 100644 --- a/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStoreActor.scala +++ b/engine/src/main/scala/cromwell/subworkflowstore/SubWorkflowStoreActor.scala @@ -11,7 +11,7 @@ import scala.concurrent.ExecutionContext import scala.util.{Failure, Success} class SubWorkflowStoreActor(database: SubWorkflowStore) extends Actor with ActorLogging with MonitoringCompanionHelper { - + implicit val ec: ExecutionContext = context.dispatcher val subWorkflowStoreReceive: Receive = { @@ -19,9 +19,9 @@ class SubWorkflowStoreActor(database: SubWorkflowStore) extends Actor with Actor case query: QuerySubWorkflow => querySubWorkflow(sender(), query) case complete: WorkflowComplete => workflowComplete(sender(), complete) } - + override def receive = subWorkflowStoreReceive.orElse(monitoringReceive) - + private def registerSubWorkflow(replyTo: ActorRef, command: RegisterSubWorkflow) = { addWork() database.addSubWorkflowStoreEntry( @@ -31,9 +31,9 @@ class SubWorkflowStoreActor(database: SubWorkflowStore) extends Actor with Actor command.jobKey.index.fromIndex, command.jobKey.attempt, command.subWorkflowExecutionUuid.toString - ) onComplete { + ) onComplete { case Success(_) => - replyTo ! SubWorkflowStoreRegisterSuccess(command) + replyTo ! SubWorkflowStoreRegisterSuccess(command) removeWork() case Failure(ex) => replyTo ! SubWorkflowStoreFailure(command, ex) @@ -43,7 +43,11 @@ class SubWorkflowStoreActor(database: SubWorkflowStore) extends Actor with Actor private def querySubWorkflow(replyTo: ActorRef, command: QuerySubWorkflow) = { val jobKey = command.jobKey - database.querySubWorkflowStore(command.parentWorkflowExecutionUuid.toString, jobKey.node.fullyQualifiedName, jobKey.index.fromIndex, jobKey.attempt) onComplete { + database.querySubWorkflowStore(command.parentWorkflowExecutionUuid.toString, + jobKey.node.fullyQualifiedName, + jobKey.index.fromIndex, + jobKey.attempt + ) onComplete { case Success(Some(result)) => replyTo ! SubWorkflowFound(result) case Success(None) => replyTo ! SubWorkflowNotFound(command) case Failure(ex) => replyTo ! SubWorkflowStoreFailure(command, ex) @@ -54,27 +58,33 @@ class SubWorkflowStoreActor(database: SubWorkflowStore) extends Actor with Actor addWork() database.removeSubWorkflowStoreEntries(command.workflowExecutionUuid.toString) onComplete { case Success(_) => removeWork() - case Failure(ex) => + case Failure(ex) => replyTo ! SubWorkflowStoreFailure(command, ex) removeWork() } } - + } object SubWorkflowStoreActor { sealed trait SubWorkflowStoreActorCommand - case class RegisterSubWorkflow(rootWorkflowExecutionUuid: WorkflowId, parentWorkflowExecutionUuid: WorkflowId, jobKey: JobKey, subWorkflowExecutionUuid: WorkflowId) extends SubWorkflowStoreActorCommand - case class QuerySubWorkflow(parentWorkflowExecutionUuid: WorkflowId, jobKey: JobKey) extends SubWorkflowStoreActorCommand + case class RegisterSubWorkflow(rootWorkflowExecutionUuid: WorkflowId, + parentWorkflowExecutionUuid: WorkflowId, + jobKey: JobKey, + subWorkflowExecutionUuid: WorkflowId + ) extends SubWorkflowStoreActorCommand + case class QuerySubWorkflow(parentWorkflowExecutionUuid: WorkflowId, jobKey: JobKey) + extends SubWorkflowStoreActorCommand case class WorkflowComplete(workflowExecutionUuid: WorkflowId) extends SubWorkflowStoreActorCommand sealed trait SubWorkflowStoreActorResponse case class SubWorkflowStoreRegisterSuccess(command: RegisterSubWorkflow) extends SubWorkflowStoreActorResponse case class SubWorkflowFound(subWorkflowStoreEntry: SubWorkflowStoreEntry) extends SubWorkflowStoreActorResponse case class SubWorkflowNotFound(command: QuerySubWorkflow) extends SubWorkflowStoreActorResponse - - case class SubWorkflowStoreFailure(command: SubWorkflowStoreActorCommand, failure: Throwable) extends SubWorkflowStoreActorResponse - + + case class SubWorkflowStoreFailure(command: SubWorkflowStoreActorCommand, failure: Throwable) + extends SubWorkflowStoreActorResponse + def props(database: SubWorkflowStore) = Props( new SubWorkflowStoreActor(database) ).withDispatcher(EngineDispatcher) diff --git a/engine/src/main/scala/cromwell/webservice/ApiDataModels.scala b/engine/src/main/scala/cromwell/webservice/ApiDataModels.scala index 61bd2ee7d01..ceb4c1b67e7 100644 --- a/engine/src/main/scala/cromwell/webservice/ApiDataModels.scala +++ b/engine/src/main/scala/cromwell/webservice/ApiDataModels.scala @@ -6,7 +6,6 @@ import spray.json._ import wdl.draft2.model.FullyQualifiedName import wom.values.WomValue - case class WorkflowStatusResponse(id: String, status: String) case class WorkflowSubmitResponse(id: String, status: String) @@ -21,16 +20,16 @@ case class WorkflowMetadataQueryParameters(outputs: Boolean = true, timings: Boo object APIResponse { - private def constructFailureResponse(status: String, ex: Throwable) = { + private def constructFailureResponse(status: String, ex: Throwable) = ex match { case exceptionWithErrors: MessageAggregation => - FailureResponse( - status, - exceptionWithErrors.exceptionContext, - Option(exceptionWithErrors.errorMessages.toVector)) - case e: Throwable => FailureResponse(status, e.getMessage, Option(e.getCause).map(c => Vector(ExceptionUtils.getMessage(c)))) + FailureResponse(status, + exceptionWithErrors.exceptionContext, + Option(exceptionWithErrors.errorMessages.toVector) + ) + case e: Throwable => + FailureResponse(status, e.getMessage, Option(e.getCause).map(c => Vector(ExceptionUtils.getMessage(c)))) } - } /** When the data submitted in the request is incorrect. */ def fail(ex: Throwable) = constructFailureResponse("fail", ex) diff --git a/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala b/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala index ce123db4d20..6aa5edc94fb 100644 --- a/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala +++ b/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala @@ -13,7 +13,9 @@ import scala.concurrent.duration._ * Because of the vagaries of timing, etc this is intended to give a rough idea of what's going on instead of * being a ground truth. */ -final case class EngineStatsActor(workflowActors: List[ActorRef], replyTo: ActorRef, timeout: FiniteDuration) extends Actor with ActorLogging { +final case class EngineStatsActor(workflowActors: List[ActorRef], replyTo: ActorRef, timeout: FiniteDuration) + extends Actor + with ActorLogging { implicit val ec = context.dispatcher private var jobCounts = Map.empty[ActorRef, Int] @@ -23,7 +25,7 @@ final case class EngineStatsActor(workflowActors: List[ActorRef], replyTo: Actor * Because of sub workflows there is currently no reliable way to know if we received responses from all running WEAs. * For now, we always wait for the timeout duration before responding to give a chance to all WEAs to respond (even nested ones). * This could be improved by having WEAs wait for their sub WEAs before sending back the response. - */ + */ val scheduledMsg = context.system.scheduler.scheduleOnce(timeout, self, ShutItDown) if (workflowActors.isEmpty) reportStats() @@ -47,9 +49,8 @@ final case class EngineStatsActor(workflowActors: List[ActorRef], replyTo: Actor object EngineStatsActor { import scala.language.postfixOps - def props(workflowActors: List[ActorRef], replyTo: ActorRef, timeout: FiniteDuration = MaxTimeToWait) = { + def props(workflowActors: List[ActorRef], replyTo: ActorRef, timeout: FiniteDuration = MaxTimeToWait) = Props(EngineStatsActor(workflowActors, replyTo, timeout)).withDispatcher(Dispatcher.ApiDispatcher) - } sealed abstract class EngineStatsActorMessage private case object ShutItDown extends EngineStatsActorMessage diff --git a/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala b/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala index ca85c8de694..9d317152ba9 100644 --- a/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala +++ b/engine/src/main/scala/cromwell/webservice/LabelsManagerActor.scala @@ -11,7 +11,8 @@ import spray.json.{DefaultJsonProtocol, JsObject, JsString} object LabelsManagerActor { - def props(serviceRegistryActor: ActorRef): Props = Props(new LabelsManagerActor(serviceRegistryActor)).withDispatcher(Dispatcher.ApiDispatcher) + def props(serviceRegistryActor: ActorRef): Props = + Props(new LabelsManagerActor(serviceRegistryActor)).withDispatcher(Dispatcher.ApiDispatcher) final case class LabelsData(workflowId: WorkflowId, labels: Labels) @@ -49,7 +50,7 @@ class LabelsManagerActor(serviceRegistryActor: ActorRef) extends Actor with Acto /* Ask the metadata store for the current set of labels, so we can return the full label set to the user. At this point in the actor lifecycle, wfId has already been filled out so the .get is safe - */ + */ serviceRegistryActor ! GetLabels(wfId.get) case SuccessfulMetadataJsonResponse(_, jsObject) => /* @@ -64,7 +65,7 @@ class LabelsManagerActor(serviceRegistryActor: ActorRef) extends Actor with Acto the return packet, this is a likely cause. At this point in the actor lifecycle, newLabels will have been filled in so the .get is safe - */ + */ def replaceOrAddLabel(originalJson: JsObject, label: Label): JsObject = { val labels = originalJson.fields.get("labels").map(_.asJsObject.fields).getOrElse(Map.empty) @@ -83,7 +84,9 @@ class LabelsManagerActor(serviceRegistryActor: ActorRef) extends Actor with Acto At this point in the actor lifecycle, wfId has already been filled out so the .get is safe */ - target ! FailedLabelsManagerResponse(new RuntimeException(s"Unable to update labels for ${wfId.get} due to ${f.reason.getMessage}")) + target ! FailedLabelsManagerResponse( + new RuntimeException(s"Unable to update labels for ${wfId.get} due to ${f.reason.getMessage}") + ) context stop self } } diff --git a/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala b/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala index 9862dc01151..22adbe6e0f8 100644 --- a/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala +++ b/engine/src/main/scala/cromwell/webservice/PartialWorkflowSources.scala @@ -36,7 +36,8 @@ final case class PartialWorkflowSources(workflowSource: Option[WorkflowSource] = zippedImports: Option[Array[Byte]] = None, warnings: Seq[String] = List.empty, workflowOnHold: Boolean, - requestedWorkflowIds: Vector[WorkflowId]) + requestedWorkflowIds: Vector[WorkflowId] +) object PartialWorkflowSources { val log = LoggerFactory.getLogger(classOf[PartialWorkflowSources]) @@ -56,15 +57,29 @@ object PartialWorkflowSources { val workflowOnHoldKey = "workflowOnHold" val RequestedWorkflowIdKey = "requestedWorkflowId" - val allKeys = List(WdlSourceKey, WorkflowUrlKey, WorkflowRootKey, WorkflowSourceKey, WorkflowTypeKey, WorkflowTypeVersionKey, WorkflowInputsKey, - WorkflowOptionsKey, labelsKey, WdlDependenciesKey, WorkflowDependenciesKey, workflowOnHoldKey, RequestedWorkflowIdKey) + val allKeys = List( + WdlSourceKey, + WorkflowUrlKey, + WorkflowRootKey, + WorkflowSourceKey, + WorkflowTypeKey, + WorkflowTypeVersionKey, + WorkflowInputsKey, + WorkflowOptionsKey, + labelsKey, + WdlDependenciesKey, + WorkflowDependenciesKey, + workflowOnHoldKey, + RequestedWorkflowIdKey + ) val allPrefixes = List(WorkflowInputsAuxPrefix) val MaxWorkflowUrlLength = 2000 def fromSubmitRoute(formData: Map[String, ByteString], - allowNoInputs: Boolean): Try[Seq[WorkflowSourceFilesCollection]] = { + allowNoInputs: Boolean + ): Try[Seq[WorkflowSourceFilesCollection]] = { import cats.syntax.apply._ import cats.syntax.traverse._ import cats.syntax.validated._ @@ -78,7 +93,7 @@ object PartialWorkflowSources { val unrecognized: ErrorOr[Unit] = formData.keySet .filterNot(name => allKeys.contains(name) || allPrefixes.exists(name.startsWith)) .toList - .map(name => s"Unexpected body part name: $name") match { + .map(name => s"Unexpected body part name: $name") match { case Nil => ().validNel case head :: tail => NonEmptyList.of(head, tail: _*).invalid } @@ -88,17 +103,17 @@ object PartialWorkflowSources { val workflowSource = getStringValue(WorkflowSourceKey) val workflowUrl = getStringValue(WorkflowUrlKey) - def deprecationWarning(out: String, in: String)(actual: String): String = { + def deprecationWarning(out: String, in: String)(actual: String): String = if (actual == out) { val warning = Array( s"The '$out' parameter name has been deprecated in favor of '$in'.", s"Support for '$out' will be removed from future versions of Cromwell.", - s"Please switch to using '$in' in future submissions.").mkString(" ") + s"Please switch to using '$in' in future submissions." + ).mkString(" ") log.warn(warning) warning } else "" - } val wdlSourceDeprecationWarning: String => String = deprecationWarning(out = WdlSourceKey, in = WorkflowSourceKey) val wdlSourceWarning = wdlSource.as(WdlSourceKey) map wdlSourceDeprecationWarning @@ -110,7 +125,8 @@ object PartialWorkflowSources { case (Some(_), Some(_), None) => s"$WdlSourceKey and $WorkflowSourceKey can't both be supplied".invalidNel case (None, Some(_), Some(_)) => s"$WorkflowSourceKey and $WorkflowUrlKey can't both be supplied".invalidNel case (Some(_), None, Some(_)) => s"$WdlSourceKey and $WorkflowUrlKey can't both be supplied".invalidNel - case (Some(_), Some(_), Some(_)) => s"$WdlSourceKey, $WorkflowSourceKey and $WorkflowUrlKey all 3 can't be supplied".invalidNel + case (Some(_), Some(_), Some(_)) => + s"$WdlSourceKey, $WorkflowSourceKey and $WorkflowUrlKey all 3 can't be supplied".invalidNel case (None, None, None) => s"$WorkflowSourceKey or $WorkflowUrlKey needs to be supplied".invalidNel } @@ -124,17 +140,20 @@ object PartialWorkflowSources { case None => Vector.empty.validNel } - val workflowInputsAux: ErrorOr[Map[Int, String]] = formData.toList.flatTraverse[ErrorOr, (Int, String)]({ - case (name, value) if name.startsWith(WorkflowInputsAuxPrefix) => - Try(name.stripPrefix(WorkflowInputsAuxPrefix).toInt).toErrorOr.map(index => List((index, value.utf8String))) - case _ => List.empty.validNel - }).map(_.toMap) + val workflowInputsAux: ErrorOr[Map[Int, String]] = formData.toList + .flatTraverse[ErrorOr, (Int, String)] { + case (name, value) if name.startsWith(WorkflowInputsAuxPrefix) => + Try(name.stripPrefix(WorkflowInputsAuxPrefix).toInt).toErrorOr.map(index => List((index, value.utf8String))) + case _ => List.empty.validNel + } + .map(_.toMap) // dependencies val wdlDependencies = getArrayValue(WdlDependenciesKey) val workflowDependencies = getArrayValue(WorkflowDependenciesKey) - val wdlDependenciesDeprecationWarning: String => String = deprecationWarning(out = "wdlDependencies", in = "workflowDependencies") + val wdlDependenciesDeprecationWarning: String => String = + deprecationWarning(out = "wdlDependencies", in = "workflowDependencies") val wdlDependenciesWarning = wdlDependencies.as(WdlDependenciesKey) map wdlDependenciesDeprecationWarning val workflowDependenciesFinal: ErrorOr[Option[Array[Byte]]] = (wdlDependencies, workflowDependencies) match { @@ -146,15 +165,22 @@ object PartialWorkflowSources { val onHold: ErrorOr[Boolean] = getBooleanValue(workflowOnHoldKey).getOrElse(false.validNel) - (unrecognized, workflowSourceFinal, requestedIds, workflowInputs, workflowInputsAux, workflowDependenciesFinal, onHold) mapN { - case (_, source, ids, inputs, aux, dep, onHoldActual) => PartialWorkflowSources( + (unrecognized, + workflowSourceFinal, + requestedIds, + workflowInputs, + workflowInputsAux, + workflowDependenciesFinal, + onHold + ) mapN { case (_, source, ids, inputs, aux, dep, onHoldActual) => + PartialWorkflowSources( workflowSource = source, workflowUrl = workflowUrl, workflowRoot = getStringValue(WorkflowRootKey), workflowType = getStringValue(WorkflowTypeKey), workflowTypeVersion = getStringValue(WorkflowTypeVersionKey), workflowInputs = inputs, - workflowInputsAux= aux, + workflowInputsAux = aux, workflowOptions = getStringValue(WorkflowOptionsKey), customLabels = getStringValue(labelsKey), zippedImports = dep, @@ -171,20 +197,28 @@ object PartialWorkflowSources { } } - private def arrayTypeElementValidation[A](data: String, interpretElementFunction: _root_.io.circe.Json => ErrorOr[A]) = { + private def arrayTypeElementValidation[A](data: String, + interpretElementFunction: _root_.io.circe.Json => ErrorOr[A] + ) = { import cats.syntax.validated._ val parseInputsTry = Try { YamlUtils.parse(data) match { // If it's an array, treat each element as an individual input object, otherwise simply toString the whole thing - case Right(json) => json.asArray.map(_.traverse(interpretElementFunction)).getOrElse(interpretElementFunction(json).map(Vector(_))).validNel - case Left(error) => s"Input file is not a valid yaml or json. Inputs data: '$data'. Error: ${ExceptionUtils.getMessage(error)}.".invalidNel + case Right(json) => + json.asArray + .map(_.traverse(interpretElementFunction)) + .getOrElse(interpretElementFunction(json).map(Vector(_))) + .validNel + case Left(error) => + s"Input file is not a valid yaml or json. Inputs data: '$data'. Error: ${ExceptionUtils.getMessage(error)}.".invalidNel } } parseInputsTry match { case Success(v) => v.flatten - case Failure(error) => s"Input file is not a valid yaml or json. Inputs data: '$data'. Error: ${ExceptionUtils.getMessage(error)}.".invalidNel + case Failure(error) => + s"Input file is not a valid yaml or json. Inputs data: '$data'. Error: ${ExceptionUtils.getMessage(error)}.".invalidNel } } @@ -203,17 +237,17 @@ object PartialWorkflowSources { import _root_.io.circe.Printer import cats.syntax.validated._ - def interpretEachElement(json: Json): ErrorOr[WorkflowId] = { + def interpretEachElement(json: Json): ErrorOr[WorkflowId] = if (json.isString) { Try(WorkflowId.fromString(json.asString.get)).toErrorOrWithContext("parse requested workflow ID as UUID") } else s"Requested workflow IDs must be strings but got: '${json.printWith(Printer.noSpaces)}'".invalidNel - } arrayTypeElementValidation(data, interpretEachElement) } private def partialSourcesToSourceCollections(partialSources: ErrorOr[PartialWorkflowSources], - allowNoInputs: Boolean): ErrorOr[Seq[WorkflowSourceFilesCollection]] = { + allowNoInputs: Boolean + ): ErrorOr[Seq[WorkflowSourceFilesCollection]] = { case class RequestedIdAndInputs(requestedId: Option[WorkflowId], inputs: WorkflowJson) def validateInputsAndRequestedIds(pws: PartialWorkflowSources): ErrorOr[Seq[RequestedIdAndInputs]] = { @@ -221,7 +255,9 @@ object PartialWorkflowSources { case (true, true) => Vector("{}").validNel case (true, false) => "No inputs were provided".invalidNel case _ => - val sortedInputAuxes = pws.workflowInputsAux.toSeq.sortBy { case (index, _) => index } map { case(_, inputJson) => Option(inputJson) } + val sortedInputAuxes = pws.workflowInputsAux.toSeq.sortBy { case (index, _) => index } map { + case (_, inputJson) => Option(inputJson) + } pws.workflowInputs.toList.traverse[ErrorOr, String] { workflowInputSet: WorkflowJson => mergeMaps(Seq(Option(workflowInputSet)) ++ sortedInputAuxes).map(_.toString) } @@ -231,7 +267,10 @@ object PartialWorkflowSources { if (pws.requestedWorkflowIds.isEmpty) { (workflowInputs map { i => RequestedIdAndInputs(None, i) }).validNel } else if (pws.requestedWorkflowIds.size == workflowInputs.size) { - (pws.requestedWorkflowIds.zip(workflowInputs).map { case (id, inputs) => RequestedIdAndInputs(Option(id), inputs) }).validNel + pws.requestedWorkflowIds + .zip(workflowInputs) + .map { case (id, inputs) => RequestedIdAndInputs(Option(id), inputs) } + .validNel } else { s"Mismatch between requested IDs count (${pws.requestedWorkflowIds.size}) and workflow inputs counts (${workflowInputs.size})".invalidNel } @@ -239,18 +278,23 @@ object PartialWorkflowSources { } def validateOptions(options: Option[WorkflowOptionsJson]): ErrorOr[WorkflowOptions] = - WorkflowOptions.fromJsonString(options.getOrElse("{}")).toErrorOr leftMap { _ map { i => s"Invalid workflow options provided: $i" } } + WorkflowOptions.fromJsonString(options.getOrElse("{}")).toErrorOr leftMap { + _ map { i => s"Invalid workflow options provided: $i" } + } - def validateLabels(labels: WorkflowJson) : ErrorOr[WorkflowJson] = { + def validateLabels(labels: WorkflowJson): ErrorOr[WorkflowJson] = { - def validateKeyValuePair(key: String, value: String): ErrorOr[Unit] = (Label.validateLabelKey(key), Label.validateLabelValue(value)).tupled.void + def validateKeyValuePair(key: String, value: String): ErrorOr[Unit] = + (Label.validateLabelKey(key), Label.validateLabelValue(value)).tupled.void - def validateLabelRestrictions(inputs: Map[String, JsValue]): ErrorOr[Unit] = { - inputs.toList.traverse[ErrorOr, Unit]({ - case (key, JsString(s)) => validateKeyValuePair(key, s) - case (key, other) => s"Invalid label $key: $other : Labels must be strings. ${Label.LabelExpectationsMessage}".invalidNel - }).void - } + def validateLabelRestrictions(inputs: Map[String, JsValue]): ErrorOr[Unit] = + inputs.toList + .traverse[ErrorOr, Unit] { + case (key, JsString(s)) => validateKeyValuePair(key, s) + case (key, other) => + s"Invalid label $key: $other : Labels must be strings. ${Label.LabelExpectationsMessage}".invalidNel + } + .void Try(labels.parseJson) match { case Success(JsObject(inputs)) => validateLabelRestrictions(inputs).map(_ => labels) @@ -262,11 +306,12 @@ object PartialWorkflowSources { partialSources match { case Valid(partialSource) => (validateInputsAndRequestedIds(partialSource), - validateOptions(partialSource.workflowOptions), - validateLabels(partialSource.customLabels.getOrElse("{}")), - partialSource.workflowUrl.traverse(validateWorkflowUrl)) mapN { - case (wfInputsAndIds, wfOptions, workflowLabels, wfUrl) => - wfInputsAndIds.map { case RequestedIdAndInputs(id, inputsJson) => WorkflowSourceFilesCollection( + validateOptions(partialSource.workflowOptions), + validateLabels(partialSource.customLabels.getOrElse("{}")), + partialSource.workflowUrl.traverse(validateWorkflowUrl) + ) mapN { case (wfInputsAndIds, wfOptions, workflowLabels, wfUrl) => + wfInputsAndIds.map { case RequestedIdAndInputs(id, inputsJson) => + WorkflowSourceFilesCollection( workflowSource = partialSource.workflowSource, workflowUrl = wfUrl, workflowRoot = partialSource.workflowRoot, @@ -278,7 +323,9 @@ object PartialWorkflowSources { importsFile = partialSource.zippedImports, warnings = partialSource.warnings, workflowOnHold = partialSource.workflowOnHold, - requestedWorkflowId = id) } + requestedWorkflowId = id + ) + } } case Invalid(err) => err.invalid } @@ -294,27 +341,28 @@ object PartialWorkflowSources { } def validateWorkflowUrl(workflowUrl: String): ErrorOr[WorkflowUrl] = { - def convertStringToUrl(workflowUrl: String): ErrorOr[WorkflowUrl] = { + def convertStringToUrl(workflowUrl: String): ErrorOr[WorkflowUrl] = Try(new URL(workflowUrl)) match { case Success(_) => workflowUrl.validNel case Failure(e) => s"Error while validating workflow url: ${e.getMessage}".invalidNel } - } val len = workflowUrl.length - if (len > MaxWorkflowUrlLength) s"Invalid workflow url: url has length $len, longer than the maximum allowed $MaxWorkflowUrlLength characters".invalidNel + if (len > MaxWorkflowUrlLength) + s"Invalid workflow url: url has length $len, longer than the maximum allowed $MaxWorkflowUrlLength characters".invalidNel else convertStringToUrl(workflowUrl) } - private def toMap(someInput: Option[String]): ErrorOr[Map[String, JsValue]] = { + private def toMap(someInput: Option[String]): ErrorOr[Map[String, JsValue]] = someInput match { case Some(input: String) => - Try(input.parseJson).toErrorOrWithContext(s"parse input: '$input', which is not a valid json. Please check for syntactical errors.") flatMap { + Try(input.parseJson).toErrorOrWithContext( + s"parse input: '$input', which is not a valid json. Please check for syntactical errors." + ) flatMap { case JsObject(inputMap) => inputMap.validNel - case j: JsValue => s"Submitted input '$input' of type ${j.getClass.getSimpleName} is not a valid JSON object.".invalidNel + case j: JsValue => + s"Submitted input '$input' of type ${j.getClass.getSimpleName} is not a valid JSON object.".invalidNel } case None => Map.empty[String, JsValue].validNel } - } } - diff --git a/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala b/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala index d63ab3bcb51..dd1d88d5f5d 100644 --- a/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala +++ b/engine/src/main/scala/cromwell/webservice/SwaggerUiHttpService.scala @@ -41,7 +41,7 @@ trait SwaggerUiHttpService { * * @return Route serving the swagger UI. */ - final def swaggerUiRoute: Route = { + final def swaggerUiRoute: Route = pathEndOrSingleSlash { get { serveIndex @@ -61,13 +61,13 @@ trait SwaggerUiHttpService { // the subject of the CVE linked below while preserving any fragment identifiers to scroll to the right spot in // the Swagger UI. // https://github.com/swagger-api/swagger-ui/security/advisories/GHSA-qrmm-w75w-3wpx - (path("swagger" / "index.html") | path ("swagger")) { + (path("swagger" / "index.html") | path("swagger")) { get { redirect("/", StatusCodes.MovedPermanently) } } - } } + /** * An extension of HttpService to serve up a resource containing the swagger api as yaml or json. The resource * directory and path on the classpath must match the path for route. The resource can be any file type supported by the @@ -75,9 +75,8 @@ trait SwaggerUiHttpService { */ trait SwaggerResourceHttpService { - def getBasePathOverride(): Option[String] = { + def getBasePathOverride(): Option[String] = Option(System.getenv("SWAGGER_BASE_PATH")) - } /** * @return The directory for the resource under the classpath, and in the url @@ -104,18 +103,21 @@ trait SwaggerResourceHttpService { */ final def swaggerResourceRoute: Route = { // Serve Cromwell API docs from either `/swagger/cromwell.yaml` or just `cromwell.yaml`. - val swaggerDocsDirective = path(separateOnSlashes(swaggerDocsPath)) | path(s"$swaggerServiceName.$swaggerResourceType") + val swaggerDocsDirective = + path(separateOnSlashes(swaggerDocsPath)) | path(s"$swaggerServiceName.$swaggerResourceType") - def injectBasePath(basePath: Option[String])(response: HttpResponse): HttpResponse = { + def injectBasePath(basePath: Option[String])(response: HttpResponse): HttpResponse = basePath match { case _ if response.status != StatusCodes.OK => response case None => response - case Some(base_path) => response.mapEntity { entity => - val swapperFlow: Flow[ByteString, ByteString, Any] = Flow[ByteString].map(byteString => ByteString.apply(byteString.utf8String.replace("#basePath: ...", "basePath: " + base_path))) - entity.transformDataBytes(swapperFlow) - } + case Some(base_path) => + response.mapEntity { entity => + val swapperFlow: Flow[ByteString, ByteString, Any] = Flow[ByteString].map(byteString => + ByteString.apply(byteString.utf8String.replace("#basePath: ...", "basePath: " + base_path)) + ) + entity.transformDataBytes(swapperFlow) + } } - } val route = get { swaggerDocsDirective { @@ -135,6 +137,7 @@ trait SwaggerResourceHttpService { * Extends the SwaggerUiHttpService and SwaggerResourceHttpService to serve up both. */ trait SwaggerUiResourceHttpService extends SwaggerUiHttpService with SwaggerResourceHttpService { + /** * @return A route that redirects to the swagger UI and returns the swagger resource. */ diff --git a/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala b/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala index 7483b300f2b..ab5f26caa37 100644 --- a/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala +++ b/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala @@ -18,28 +18,35 @@ trait WebServiceUtils { type MaterializedFormData = Map[String, ByteString] - def materializeFormData(formData: Multipart.FormData)(implicit timeout: Timeout, materializer: Materializer, executionContext: ExecutionContext): Future[MaterializedFormData] = { - formData.parts.mapAsync[(String, ByteString)](1) { - bodyPart => bodyPart.toStrict(timeout.duration)(materializer).map(strict => bodyPart.name -> strict.entity.data)(executionContext) - }.runFold(Map.empty[String, ByteString])((map, tuple) => map + tuple)(materializer) - } + def materializeFormData(formData: Multipart.FormData)(implicit + timeout: Timeout, + materializer: Materializer, + executionContext: ExecutionContext + ): Future[MaterializedFormData] = + formData.parts + .mapAsync[(String, ByteString)](1) { bodyPart => + bodyPart + .toStrict(timeout.duration)(materializer) + .map(strict => bodyPart.name -> strict.entity.data)(executionContext) + } + .runFold(Map.empty[String, ByteString])((map, tuple) => map + tuple)(materializer) /** * Completes a response of a Product, probably a case class, using an implicit marshaller, probably a json encoder. */ - def completeResponse[A <: Product](statusCode: StatusCode, value: A, warnings: Seq[String]) - (implicit mt: ToEntityMarshaller[A]): Route = { + def completeResponse[A <: Product](statusCode: StatusCode, value: A, warnings: Seq[String])(implicit + mt: ToEntityMarshaller[A] + ): Route = complete((statusCode, warningHeaders(warnings), value)) - } // 2.13 added this, not sure why the baseline version was compiling actually. /** * Completes a response of a List of Product (probably a case class), using an implicit marshaller, probably a json encoder. */ - def completeResponse[A <: Product](statusCode: StatusCode, values: List[A], warnings: Seq[String]) - (implicit mt: ToEntityMarshaller[List[A]]): Route = { + def completeResponse[A <: Product](statusCode: StatusCode, values: List[A], warnings: Seq[String])(implicit + mt: ToEntityMarshaller[List[A]] + ): Route = complete((statusCode, warningHeaders(warnings), values)) - } /** * Completes a response of string with the supplied content type. @@ -52,11 +59,11 @@ trait WebServiceUtils { def completeResponse(statusCode: StatusCode, contentType: ContentType.NonBinary, value: String, - warnings: Seq[String]): Route = { + warnings: Seq[String] + ): Route = complete((statusCode, warningHeaders(warnings), HttpEntity(contentType, value))) - } - def warningHeaders(warnings: Seq[String]): List[HttpHeader] = { + def warningHeaders(warnings: Seq[String]): List[HttpHeader] = warnings.toList map { warning => /* Need a quoted string. @@ -65,12 +72,11 @@ trait WebServiceUtils { Using a poor version of ~~#! https://github.com/akka/akka-http/blob/v10.0.9/akka-http-core/src/main/scala/akka/http/impl/util/Rendering.scala#L206 */ - val quotedString = "\"" + warning.replaceAll("\"","\\\\\"").replaceAll("[\\r\\n]+", " ").trim + "\"" + val quotedString = "\"" + warning.replaceAll("\"", "\\\\\"").replaceAll("[\\r\\n]+", " ").trim + "\"" // https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.46 RawHeader("Warning", s"299 cromwell/$cromwellVersion $quotedString") } - } } object WebServiceUtils extends WebServiceUtils { @@ -79,17 +85,14 @@ object WebServiceUtils extends WebServiceUtils { // A: There are customers who rely on the pretty printing to display the error directly in a terminal or GUI. // AEN 2018-12-05 implicit class EnhancedThrowable(val e: Throwable) extends AnyVal { - def failRequest(statusCode: StatusCode, warnings: Seq[String] = Vector.empty): Route = { + def failRequest(statusCode: StatusCode, warnings: Seq[String] = Vector.empty): Route = completeResponse(statusCode, ContentTypes.`application/json`, prettyPrint(APIResponse.fail(e)), warnings) - } - def errorRequest(statusCode: StatusCode, warnings: Seq[String] = Vector.empty): Route = { + def errorRequest(statusCode: StatusCode, warnings: Seq[String] = Vector.empty): Route = completeResponse(statusCode, ContentTypes.`application/json`, prettyPrint(APIResponse.error(e)), warnings) - } } - private def prettyPrint(failureResponse: FailureResponse): String ={ + private def prettyPrint(failureResponse: FailureResponse): String = // .asJson cannot live inside a value class like `EnhancedThrowable`, hence the object method failureResponse.asJson.printWith(Printer.spaces2.copy(dropNullValues = true, colonLeft = "")) - } } diff --git a/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala b/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala index 0bb398f67af..b504818c1a4 100644 --- a/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala +++ b/engine/src/main/scala/cromwell/webservice/WorkflowJsonSupport.scala @@ -13,7 +13,7 @@ import cromwell.services.metadata.MetadataArchiveStatus import cromwell.services.metadata.MetadataService._ import cromwell.util.JsonFormatting.WomValueJsonFormatter._ import cromwell.webservice.routes.CromwellApiService.BackendResponse -import spray.json.{DefaultJsonProtocol, JsString, JsValue, JsonFormat, RootJsonFormat} +import spray.json.{DefaultJsonProtocol, JsonFormat, JsString, JsValue, RootJsonFormat} object WorkflowJsonSupport extends DefaultJsonProtocol { implicit val workflowStatusResponseProtocol = jsonFormat2(WorkflowStatusResponse) @@ -25,18 +25,24 @@ object WorkflowJsonSupport extends DefaultJsonProtocol { implicit val BackendResponseFormat = jsonFormat2(BackendResponse) implicit val callAttempt = jsonFormat2(CallAttempt) - implicit val workflowOptionsFormatter: JsonFormat[WorkflowOptions] = new JsonFormat[WorkflowOptions] { + implicit val workflowOptionsFormatter: JsonFormat[WorkflowOptions] = new JsonFormat[WorkflowOptions] { override def read(json: JsValue): WorkflowOptions = json match { case str: JsString => WorkflowOptions.fromJsonString(str.value).get - case other => throw new UnsupportedOperationException(s"Cannot use ${other.getClass.getSimpleName} value. Expected a workflow options String") + case other => + throw new UnsupportedOperationException( + s"Cannot use ${other.getClass.getSimpleName} value. Expected a workflow options String" + ) } override def write(obj: WorkflowOptions): JsValue = JsString(obj.asPrettyJson) } - implicit val workflowIdFormatter: JsonFormat[WorkflowId] = new JsonFormat[WorkflowId] { + implicit val workflowIdFormatter: JsonFormat[WorkflowId] = new JsonFormat[WorkflowId] { override def read(json: JsValue): WorkflowId = json match { case str: JsString => WorkflowId.fromString(str.value) - case other => throw new UnsupportedOperationException(s"Cannot use ${other.getClass.getSimpleName} value. Expected a workflow ID String") + case other => + throw new UnsupportedOperationException( + s"Cannot use ${other.getClass.getSimpleName} value. Expected a workflow ID String" + ) } override def write(obj: WorkflowId): JsValue = JsString(obj.id.toString) } @@ -58,7 +64,7 @@ object WorkflowJsonSupport extends DefaultJsonProtocol { // By default the formatter for JsValues prints them out ADT-style. // In the case of SuccessResponses, we just want raw JsValues to be included in our output verbatim. - private implicit val identityJsValueFormatter = new RootJsonFormat[JsValue] { + implicit private val identityJsValueFormatter = new RootJsonFormat[JsValue] { override def read(json: JsValue): JsValue = json override def write(obj: JsValue): JsValue = obj } diff --git a/engine/src/main/scala/cromwell/webservice/routes/CromwellApiService.scala b/engine/src/main/scala/cromwell/webservice/routes/CromwellApiService.scala index a1c4f023135..d75eed4ccd8 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/CromwellApiService.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/CromwellApiService.scala @@ -10,7 +10,7 @@ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.ContentTypes._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{ExceptionHandler, Route} -import akka.pattern.{AskTimeoutException, ask} +import akka.pattern.{ask, AskTimeoutException} import akka.stream.ActorMaterializer import akka.util.Timeout import cats.data.NonEmptyList @@ -23,7 +23,12 @@ import cromwell.core.{path => _, _} import cromwell.engine.backend.BackendConfiguration import cromwell.engine.instrumentation.HttpInstrumentation import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor.{CachedCallNotFoundException, CallCacheDiffActorResponse, FailedCallCacheDiffResponse, SuccessfulCallCacheDiffResponse} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffActor.{ + CachedCallNotFoundException, + CallCacheDiffActorResponse, + FailedCallCacheDiffResponse, + SuccessfulCallCacheDiffResponse +} import cromwell.engine.workflow.lifecycle.execution.callcaching.{CallCacheDiffActor, CallCacheDiffQueryParameter} import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.NotInOnHoldStateException import cromwell.engine.workflow.workflowstore.{WorkflowStoreActor, WorkflowStoreEngineActor, WorkflowStoreSubmitActor} @@ -42,7 +47,11 @@ import scala.concurrent.{ExecutionContext, Future, TimeoutException} import scala.io.Source import scala.util.{Failure, Success, Try} -trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport with WomtoolRouteSupport with WebServiceUtils { +trait CromwellApiService + extends HttpInstrumentation + with MetadataRouteSupport + with WomtoolRouteSupport + with WebServiceUtils { import CromwellApiService._ implicit def actorRefFactory: ActorRefFactory @@ -60,11 +69,14 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w val engineRoutes = concat( path("engine" / Segment / "stats") { _ => get { - completeResponse(StatusCodes.Forbidden, APIResponse.fail(new RuntimeException("The /stats endpoint is currently disabled.")), warnings = Seq.empty) + completeResponse(StatusCodes.Forbidden, + APIResponse.fail(new RuntimeException("The /stats endpoint is currently disabled.")), + warnings = Seq.empty + ) } }, path("engine" / Segment / "version") { _ => - get { complete(versionResponse) } + get(complete(versionResponse)) }, path("engine" / Segment / "status") { _ => onComplete(serviceRegistryActor.ask(GetCurrentStatus).mapTo[StatusCheckResponse]) { @@ -72,14 +84,15 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w val httpCode = if (status.ok) StatusCodes.OK else StatusCodes.InternalServerError complete(ToResponseMarshallable((httpCode, status.systems))) case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) - case Failure(_) => new RuntimeException("Unable to gather engine status").failRequest(StatusCodes.InternalServerError) + case Failure(_) => + new RuntimeException("Unable to gather engine status").failRequest(StatusCodes.InternalServerError) } } ) val workflowRoutes = path("workflows" / Segment / "backends") { _ => - get { instrumentRequest { complete(ToResponseMarshallable(backendResponse)) } } + get(instrumentRequest(complete(ToResponseMarshallable(backendResponse)))) } ~ path("workflows" / Segment / "callcaching" / "diff") { _ => parameterSeq { parameters => @@ -87,11 +100,15 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w instrumentRequest { CallCacheDiffQueryParameter.fromParameters(parameters) match { case Valid(queryParameter) => - val diffActor = actorRefFactory.actorOf(CallCacheDiffActor.props(serviceRegistryActor), "CallCacheDiffActor-" + UUID.randomUUID()) + val diffActor = actorRefFactory.actorOf(CallCacheDiffActor.props(serviceRegistryActor), + "CallCacheDiffActor-" + UUID.randomUUID() + ) onComplete(diffActor.ask(queryParameter).mapTo[CallCacheDiffActorResponse]) { case Success(r: SuccessfulCallCacheDiffResponse) => complete(r) - case Success(r: FailedCallCacheDiffResponse) => r.reason.errorRequest(StatusCodes.InternalServerError) - case Failure(_: AskTimeoutException) if CromwellShutdown.shutdownInProgress() => serviceShuttingDownResponse + case Success(r: FailedCallCacheDiffResponse) => + r.reason.errorRequest(StatusCodes.InternalServerError) + case Failure(_: AskTimeoutException) if CromwellShutdown.shutdownInProgress() => + serviceShuttingDownResponse case Failure(e: CachedCallNotFoundException) => e.errorRequest(StatusCodes.NotFound) case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) @@ -142,13 +159,21 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w path("workflows" / Segment / Segment / "releaseHold") { (_, possibleWorkflowId) => post { instrumentRequest { - val response = validateWorkflowIdInMetadata(possibleWorkflowId, serviceRegistryActor) flatMap { workflowId => - workflowStoreActor.ask(WorkflowStoreActor.WorkflowOnHoldToSubmittedCommand(workflowId)).mapTo[WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedResponse] + val response = validateWorkflowIdInMetadata(possibleWorkflowId, serviceRegistryActor) flatMap { + workflowId => + workflowStoreActor + .ask(WorkflowStoreActor.WorkflowOnHoldToSubmittedCommand(workflowId)) + .mapTo[WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedResponse] } - onComplete(response){ - case Success(WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedFailure(_, e: NotInOnHoldStateException)) => e.errorRequest(StatusCodes.Forbidden) - case Success(WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedFailure(_, e)) => e.errorRequest(StatusCodes.InternalServerError) - case Success(r: WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedSuccess) => completeResponse(StatusCodes.OK, toResponse(r.workflowId, WorkflowSubmitted), Seq.empty) + onComplete(response) { + case Success( + WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedFailure(_, e: NotInOnHoldStateException) + ) => + e.errorRequest(StatusCodes.Forbidden) + case Success(WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedFailure(_, e)) => + e.errorRequest(StatusCodes.InternalServerError) + case Success(r: WorkflowStoreEngineActor.WorkflowOnHoldToSubmittedSuccess) => + completeResponse(StatusCodes.OK, toResponse(r.workflowId, WorkflowSubmitted), Seq.empty) case Failure(e: UnrecognizedWorkflowException) => e.failRequest(StatusCodes.NotFound) case Failure(e: InvalidWorkflowException) => e.failRequest(StatusCodes.BadRequest) case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) @@ -157,44 +182,49 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w } } ~ metadataRoutes - private def metadataLookupForTimingRoute(workflowId: WorkflowId): Future[MetadataJsonResponse] = { val includeKeys = NonEmptyList.of("start", "end", "executionStatus", "executionEvents", "subWorkflowMetadata") - val readMetadataRequest = (w: WorkflowId) => GetSingleWorkflowMetadataAction(w, Option(includeKeys), None, expandSubWorkflows = true) + val readMetadataRequest = (w: WorkflowId) => + GetSingleWorkflowMetadataAction(w, Option(includeKeys), None, expandSubWorkflows = true) serviceRegistryActor.ask(readMetadataRequest(workflowId)).mapTo[MetadataJsonResponse] } - private def completeTimingRouteResponse(metadataResponse: Future[MetadataJsonResponse]) = { + private def completeTimingRouteResponse(metadataResponse: Future[MetadataJsonResponse]) = onComplete(metadataResponse) { case Success(r: SuccessfulMetadataJsonResponse) => - Try(Source.fromResource("workflowTimings/workflowTimings.html").mkString) match { case Success(wfTimingsContent) => - val response = HttpResponse(entity = wfTimingsContent.replace("\"{{REPLACE_THIS_WITH_METADATA}}\"", r.responseJson.toString)) + val response = HttpResponse(entity = + wfTimingsContent.replace("\"{{REPLACE_THIS_WITH_METADATA}}\"", r.responseJson.toString) + ) complete(response.withEntity(response.entity.withContentType(`text/html(UTF-8)`))) - case Failure(e) => completeResponse(StatusCodes.InternalServerError, APIResponse.fail(new RuntimeException("Error while loading workflowTimings.html", e)), Seq.empty) + case Failure(e) => + completeResponse(StatusCodes.InternalServerError, + APIResponse.fail(new RuntimeException("Error while loading workflowTimings.html", e)), + Seq.empty + ) } case Success(r: FailedMetadataJsonResponse) => r.reason.errorRequest(StatusCodes.InternalServerError) case Failure(_: AskTimeoutException) if CromwellShutdown.shutdownInProgress() => serviceShuttingDownResponse case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) case Failure(e) => e.failRequest(StatusCodes.InternalServerError) } - } - private def toResponse(workflowId: WorkflowId, workflowState: WorkflowState): WorkflowSubmitResponse = { + private def toResponse(workflowId: WorkflowId, workflowState: WorkflowState): WorkflowSubmitResponse = WorkflowSubmitResponse(workflowId.toString, workflowState.toString) - } private def submitRequest(formData: Multipart.FormData, isSingleSubmission: Boolean): Route = { - def getWorkflowState(workflowOnHold: Boolean): WorkflowState = { + def getWorkflowState(workflowOnHold: Boolean): WorkflowState = if (workflowOnHold) WorkflowOnHold else WorkflowSubmitted - } - def askSubmit(command: WorkflowStoreActor.WorkflowStoreActorSubmitCommand, warnings: Seq[String], workflowState: WorkflowState): Route = { + def askSubmit(command: WorkflowStoreActor.WorkflowStoreActorSubmitCommand, + warnings: Seq[String], + workflowState: WorkflowState + ): Route = // NOTE: Do not blindly copy the akka-http -to- ask-actor pattern below without knowing the pros and cons. onComplete(workflowStoreActor.ask(command).mapTo[WorkflowStoreSubmitActor.WorkflowStoreSubmitActorResponse]) { case Success(w) => @@ -210,14 +240,16 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) case Failure(e) => e.failRequest(StatusCodes.InternalServerError, warnings) } - } onComplete(materializeFormData(formData)) { case Success(data) => PartialWorkflowSources.fromSubmitRoute(data, allowNoInputs = isSingleSubmission) match { case Success(workflowSourceFiles) if isSingleSubmission && workflowSourceFiles.size == 1 => val warnings = workflowSourceFiles.flatMap(_.warnings) - askSubmit(WorkflowStoreActor.SubmitWorkflow(workflowSourceFiles.head), warnings, getWorkflowState(workflowSourceFiles.head.workflowOnHold)) + askSubmit(WorkflowStoreActor.SubmitWorkflow(workflowSourceFiles.head), + warnings, + getWorkflowState(workflowSourceFiles.head.workflowOnHold) + ) // Catches the case where someone has gone through the single submission endpoint w/ more than one workflow case Success(workflowSourceFiles) if isSingleSubmission => val warnings = workflowSourceFiles.flatMap(_.warnings) @@ -227,7 +259,9 @@ trait CromwellApiService extends HttpInstrumentation with MetadataRouteSupport w val warnings = workflowSourceFiles.flatMap(_.warnings) askSubmit( WorkflowStoreActor.BatchSubmitWorkflows(NonEmptyList.fromListUnsafe(workflowSourceFiles.toList)), - warnings, getWorkflowState(workflowSourceFiles.head.workflowOnHold)) + warnings, + getWorkflowState(workflowSourceFiles.head.workflowOnHold) + ) case Failure(t) => t.failRequest(StatusCodes.BadRequest) } case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) @@ -248,12 +282,13 @@ object CromwellApiService { workflowStoreActor: ActorRef, workflowManagerActor: ActorRef, successHandler: PartialFunction[SuccessfulAbortResponse, Route] = standardAbortSuccessHandler, - errorHandler: PartialFunction[Throwable, Route] = standardAbortErrorHandler) - (implicit timeout: Timeout): Route = { + errorHandler: PartialFunction[Throwable, Route] = standardAbortErrorHandler + )(implicit timeout: Timeout): Route = handleExceptions(ExceptionHandler(errorHandler)) { Try(WorkflowId.fromString(possibleWorkflowId)) match { case Success(workflowId) => - val response = workflowStoreActor.ask(WorkflowStoreActor.AbortWorkflowCommand(workflowId)).mapTo[AbortResponse] + val response = + workflowStoreActor.ask(WorkflowStoreActor.AbortWorkflowCommand(workflowId)).mapTo[AbortResponse] onComplete(response) { case Success(x: SuccessfulAbortResponse) => successHandler(x) case Success(x: WorkflowAbortFailureResponse) => throw x.failure @@ -262,14 +297,15 @@ object CromwellApiService { case Failure(_) => throw InvalidWorkflowException(possibleWorkflowId) } } - } /** * The abort success handler for typical cases, i.e. cromwell's API. */ private def standardAbortSuccessHandler: PartialFunction[SuccessfulAbortResponse, Route] = { - case WorkflowAbortedResponse(id) => complete(ToResponseMarshallable(WorkflowAbortResponse(id.toString, WorkflowAborted.toString))) - case WorkflowAbortRequestedResponse(id) => complete(ToResponseMarshallable(WorkflowAbortResponse(id.toString, WorkflowAborting.toString))) + case WorkflowAbortedResponse(id) => + complete(ToResponseMarshallable(WorkflowAbortResponse(id.toString, WorkflowAborted.toString))) + case WorkflowAbortRequestedResponse(id) => + complete(ToResponseMarshallable(WorkflowAbortResponse(id.toString, WorkflowAborting.toString))) } /** @@ -283,9 +319,10 @@ object CromwellApiService { case e: Exception => e.errorRequest(StatusCodes.InternalServerError) } - def validateWorkflowIdInMetadata(possibleWorkflowId: String, - serviceRegistryActor: ActorRef) - (implicit timeout: Timeout, executor: ExecutionContext): Future[WorkflowId] = { + def validateWorkflowIdInMetadata(possibleWorkflowId: String, serviceRegistryActor: ActorRef)(implicit + timeout: Timeout, + executor: ExecutionContext + ): Future[WorkflowId] = Try(WorkflowId.fromString(possibleWorkflowId)) match { case Success(w) => serviceRegistryActor.ask(ValidateWorkflowIdInMetadata(w)).mapTo[WorkflowValidationResponse] flatMap { @@ -295,11 +332,11 @@ object CromwellApiService { } case Failure(_) => Future.failed(InvalidWorkflowException(possibleWorkflowId)) } - } - def validateWorkflowIdInMetadataSummaries(possibleWorkflowId: String, - serviceRegistryActor: ActorRef) - (implicit timeout: Timeout, executor: ExecutionContext): Future[WorkflowId] = { + def validateWorkflowIdInMetadataSummaries(possibleWorkflowId: String, serviceRegistryActor: ActorRef)(implicit + timeout: Timeout, + executor: ExecutionContext + ): Future[WorkflowId] = Try(WorkflowId.fromString(possibleWorkflowId)) match { case Success(w) => serviceRegistryActor.ask(ValidateWorkflowIdInMetadataSummaries(w)).mapTo[WorkflowValidationResponse] map { @@ -309,17 +346,20 @@ object CromwellApiService { } case Failure(_) => Future.failed(InvalidWorkflowException(possibleWorkflowId)) } - } final case class BackendResponse(supportedBackends: List[String], defaultBackend: String) final case class UnrecognizedWorkflowException(id: WorkflowId) extends Exception(s"Unrecognized workflow ID: $id") - final case class InvalidWorkflowException(possibleWorkflowId: String) extends Exception(s"Invalid workflow ID: '$possibleWorkflowId'.") + final case class InvalidWorkflowException(possibleWorkflowId: String) + extends Exception(s"Invalid workflow ID: '$possibleWorkflowId'.") val cromwellVersion = VersionUtil.getVersion("cromwell-engine") val swaggerUiVersion = VersionUtil.getVersion("swagger-ui", VersionUtil.sbtDependencyVersion("swaggerUi")) - val backendResponse = BackendResponse(BackendConfiguration.AllBackendEntries.map(_.name).sorted, BackendConfiguration.DefaultBackendEntry.name) + val backendResponse = BackendResponse(BackendConfiguration.AllBackendEntries.map(_.name).sorted, + BackendConfiguration.DefaultBackendEntry.name + ) val versionResponse = JsObject(Map("cromwell" -> cromwellVersion.toJson)) - val serviceShuttingDownResponse = new Exception("Cromwell service is shutting down.").failRequest(StatusCodes.ServiceUnavailable) + val serviceShuttingDownResponse = + new Exception("Cromwell service is shutting down.").failRequest(StatusCodes.ServiceUnavailable) } diff --git a/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala b/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala index b84ff59d5a3..4ec1d0babc7 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala @@ -1,6 +1,6 @@ package cromwell.webservice.routes -import java.time.{OffsetDateTime, Duration => JDuration} +import java.time.{Duration => JDuration, OffsetDateTime} import java.util.concurrent.TimeUnit import akka.actor.{ActorRef, ActorRefFactory} @@ -9,14 +9,14 @@ import akka.http.scaladsl.marshalling.ToResponseMarshallable import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import akka.pattern.{AskTimeoutException, ask} +import akka.pattern.{ask, AskTimeoutException} import akka.util.Timeout import cats.data.NonEmptyList import cats.data.Validated.{Invalid, Valid} import cromwell.core.Dispatcher.ApiDispatcher import cromwell.core.instrumentation.InstrumentationPrefixes.ServicesPrefix import cromwell.core.labels.Labels -import cromwell.core.{WorkflowId, WorkflowMetadataKeys, path => _} +import cromwell.core.{path => _, WorkflowId, WorkflowMetadataKeys} import cromwell.engine.instrumentation.HttpInstrumentation import cromwell.server.CromwellShutdown import cromwell.services._ @@ -38,7 +38,6 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future, TimeoutException} import scala.util.{Failure, Success} - trait MetadataRouteSupport extends HttpInstrumentation { implicit def actorRefFactory: ActorRefFactory implicit val ec: ExecutionContext @@ -83,15 +82,21 @@ trait MetadataRouteSupport extends HttpInstrumentation { encodeResponse { path("workflows" / Segment / Segment / "metadata") { (_, possibleWorkflowId) => instrumentRequest { - parameters((Symbol("includeKey").*, Symbol("excludeKey").*, Symbol("expandSubWorkflows").as[Boolean].?)) { (includeKeys, excludeKeys, expandSubWorkflowsOption) => - val includeKeysOption = NonEmptyList.fromList(includeKeys.toList) - val excludeKeysOption = NonEmptyList.fromList(excludeKeys.toList) - val expandSubWorkflows = expandSubWorkflowsOption.getOrElse(false) + parameters((Symbol("includeKey").*, Symbol("excludeKey").*, Symbol("expandSubWorkflows").as[Boolean].?)) { + (includeKeys, excludeKeys, expandSubWorkflowsOption) => + val includeKeysOption = NonEmptyList.fromList(includeKeys.toList) + val excludeKeysOption = NonEmptyList.fromList(excludeKeys.toList) + val expandSubWorkflows = expandSubWorkflowsOption.getOrElse(false) - metadataLookup( - possibleWorkflowId, - (w: WorkflowId) => GetSingleWorkflowMetadataAction(w, includeKeysOption, excludeKeysOption, expandSubWorkflows), - serviceRegistryActor) + metadataLookup(possibleWorkflowId, + (w: WorkflowId) => + GetSingleWorkflowMetadataAction(w, + includeKeysOption, + excludeKeysOption, + expandSubWorkflows + ), + serviceRegistryActor + ) } } } @@ -107,7 +112,8 @@ trait MetadataRouteSupport extends HttpInstrumentation { entity(as[Map[String, String]]) { parameterMap => instrumentRequest { Labels.validateMapOfLabels(parameterMap) match { - case Valid(labels) => patchLabelsRequest(possibleWorkflowId, labels, serviceRegistryActor, actorRefFactory) + case Valid(labels) => + patchLabelsRequest(possibleWorkflowId, labels, serviceRegistryActor, actorRefFactory) case Invalid(e) => val iae = new IllegalArgumentException(e.toList.mkString(",")) iae.failRequest(StatusCodes.BadRequest) @@ -141,41 +147,42 @@ object MetadataRouteSupport { private def processMetadataArchivedResponse(workflowId: WorkflowId, archiveStatus: MetadataArchiveStatus, endTime: Option[OffsetDateTime], - additionalMsg: String = ""): JsObject = { + additionalMsg: String = "" + ): JsObject = { val baseMessage = "Cromwell has archived this workflow's metadata according to the lifecycle policy." val timeSinceMessage = endTime map { timestamp => val duration = FiniteDuration(JDuration.between(timestamp, OffsetDateTime.now()).toMillis, TimeUnit.MILLISECONDS) s" The workflow completed at $timestamp, which was ${duration} ago." - } getOrElse("") - val additionalDetails = if (archiveStatus == MetadataArchiveStatus.ArchivedAndDeleted) - " It is available in the archive bucket, or via a support request in the case of a managed instance." - else "" + } getOrElse "" + val additionalDetails = + if (archiveStatus == MetadataArchiveStatus.ArchivedAndDeleted) + " It is available in the archive bucket, or via a support request in the case of a managed instance." + else "" - JsObject(Map( - WorkflowMetadataKeys.Id -> JsString(workflowId.toString), - WorkflowMetadataKeys.MetadataArchiveStatus -> JsString(archiveStatus.toString), - WorkflowMetadataKeys.Message -> JsString(baseMessage + timeSinceMessage + additionalDetails + additionalMsg) - )) + JsObject( + Map( + WorkflowMetadataKeys.Id -> JsString(workflowId.toString), + WorkflowMetadataKeys.MetadataArchiveStatus -> JsString(archiveStatus.toString), + WorkflowMetadataKeys.Message -> JsString(baseMessage + timeSinceMessage + additionalDetails + additionalMsg) + ) + ) } def metadataLookup(possibleWorkflowId: String, request: WorkflowId => BuildMetadataJsonAction, - serviceRegistryActor: ActorRef) - (implicit timeout: Timeout, - ec: ExecutionContext): Route = { + serviceRegistryActor: ActorRef + )(implicit timeout: Timeout, ec: ExecutionContext): Route = completeMetadataBuilderResponse(metadataBuilderActorRequest(possibleWorkflowId, request, serviceRegistryActor)) - } - def queryMetadata(parameters: Seq[(String, String)], - serviceRegistryActor: ActorRef)(implicit timeout: Timeout): Route = { + def queryMetadata(parameters: Seq[(String, String)], serviceRegistryActor: ActorRef)(implicit + timeout: Timeout + ): Route = completeMetadataQueryResponse(metadataQueryRequest(parameters, serviceRegistryActor)) - } def metadataBuilderActorRequest(possibleWorkflowId: String, request: WorkflowId => BuildMetadataJsonAction, - serviceRegistryActor: ActorRef) - (implicit timeout: Timeout, - ec: ExecutionContext): Future[MetadataJsonResponse] = { + serviceRegistryActor: ActorRef + )(implicit timeout: Timeout, ec: ExecutionContext): Future[MetadataJsonResponse] = { def recordHistoricalMetadataLookupMetrics(endTime: Option[OffsetDateTime]): Unit = { val timeSinceEndTime = endTime match { @@ -183,8 +190,11 @@ object MetadataRouteSupport { case None => 0.seconds } - val lagInstrumentationPath = MetadataServiceActor.MetadataInstrumentationPrefix :+ "archiver" :+ "historical_metadata_lookup" :+ "lag" - val lagMessage = InstrumentationServiceMessage(CromwellTiming(CromwellBucket(ServicesPrefix.toList, lagInstrumentationPath), timeSinceEndTime)) + val lagInstrumentationPath = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "archiver" :+ "historical_metadata_lookup" :+ "lag" + val lagMessage = InstrumentationServiceMessage( + CromwellTiming(CromwellBucket(ServicesPrefix.toList, lagInstrumentationPath), timeSinceEndTime) + ) serviceRegistryActor ! lagMessage val interestingDayMarks = (5.to(55, step = 5) ++ 0.to(4)).map(d => (d.days, s"${d}_day_old")) @@ -192,29 +202,39 @@ object MetadataRouteSupport { (interestingDayMarks ++ interestingMonthMarks) foreach { case (timeSpan, metricName) if timeSinceEndTime >= timeSpan => - val oldMetadataCounterPath = MetadataServiceActor.MetadataInstrumentationPrefix :+ "archiver" :+ "historical_metadata_lookup" :+ "lookup_age_counts" :+ metricName - val oldMetadataCounterMessage = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(ServicesPrefix.toList, oldMetadataCounterPath))) + val oldMetadataCounterPath = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "archiver" :+ "historical_metadata_lookup" :+ "lookup_age_counts" :+ metricName + val oldMetadataCounterMessage = InstrumentationServiceMessage( + CromwellIncrement(CromwellBucket(ServicesPrefix.toList, oldMetadataCounterPath)) + ) serviceRegistryActor ! oldMetadataCounterMessage case _ => // Do nothing } } def checkIfMetadataDeletedAndRespond(id: WorkflowId, - metadataRequest: BuildWorkflowMetadataJsonWithOverridableSourceAction): Future[MetadataJsonResponse] = { - serviceRegistryActor.ask(FetchWorkflowMetadataArchiveStatusAndEndTime(id)).mapTo[FetchWorkflowArchiveStatusAndEndTimeResponse] flatMap { + metadataRequest: BuildWorkflowMetadataJsonWithOverridableSourceAction + ): Future[MetadataJsonResponse] = + serviceRegistryActor + .ask(FetchWorkflowMetadataArchiveStatusAndEndTime(id)) + .mapTo[FetchWorkflowArchiveStatusAndEndTimeResponse] flatMap { case WorkflowMetadataArchivedStatusAndEndTime(archiveStatus, endTime) => recordHistoricalMetadataLookupMetrics(endTime) - if (archiveStatus.isDeleted) Future.successful(SuccessfulMetadataJsonResponse(metadataRequest, processMetadataArchivedResponse(id, archiveStatus, endTime))) + if (archiveStatus.isDeleted) + Future.successful( + SuccessfulMetadataJsonResponse(metadataRequest, + processMetadataArchivedResponse(id, archiveStatus, endTime) + ) + ) else serviceRegistryActor.ask(request(id)).mapTo[MetadataJsonResponse] case FailedToGetArchiveStatusAndEndTime(e) => Future.failed(e) } - } validateWorkflowIdInMetadata(possibleWorkflowId, serviceRegistryActor) flatMap { id => /* for requests made to one of /metadata, /logs or /outputs endpoints, perform an additional check to see if metadata for the workflow has been archived and deleted or not (as they interact with metadata table) - */ + */ request(id) match { case m: BuildWorkflowMetadataJsonWithOverridableSourceAction => checkIfMetadataDeletedAndRespond(id, m) case _ => serviceRegistryActor.ask(request(id)).mapTo[MetadataJsonResponse] @@ -222,8 +242,9 @@ object MetadataRouteSupport { } } - def completeMetadataBuilderResponse(response: Future[MetadataJsonResponse]): Route = { - onComplete(response) { case Success(r: SuccessfulMetadataJsonResponse) => complete(r.responseJson) + def completeMetadataBuilderResponse(response: Future[MetadataJsonResponse]): Route = + onComplete(response) { + case Success(r: SuccessfulMetadataJsonResponse) => complete(r.responseJson) case Success(r: FailedMetadataJsonResponse) => r.reason.errorRequest(StatusCodes.InternalServerError) case Failure(_: AskTimeoutException) if CromwellShutdown.shutdownInProgress() => serviceShuttingDownResponse case Failure(e: UnrecognizedWorkflowException) => e.failRequest(StatusCodes.NotFound) @@ -231,12 +252,11 @@ object MetadataRouteSupport { case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) } - } - def metadataQueryRequest(parameters: Seq[(String, String)], - serviceRegistryActor: ActorRef)(implicit timeout: Timeout): Future[MetadataQueryResponse] = { + def metadataQueryRequest(parameters: Seq[(String, String)], serviceRegistryActor: ActorRef)(implicit + timeout: Timeout + ): Future[MetadataQueryResponse] = serviceRegistryActor.ask(QueryForWorkflowsMatchingParameters(parameters)).mapTo[MetadataQueryResponse] - } def completeMetadataQueryResponse(response: Future[MetadataQueryResponse]): Route = { import cromwell.webservice.WorkflowJsonSupport.workflowQueryResponse @@ -250,41 +270,49 @@ object MetadataRouteSupport { } } - def completePatchLabelsResponse(response: Future[LabelsManagerActorResponse]): Route = { + def completePatchLabelsResponse(response: Future[LabelsManagerActorResponse]): Route = onComplete(response) { case Success(r: BuiltLabelsManagerResponse) => complete(r.response) - case Success(r: WorkflowArchivedLabelsManagerResponse) => completeResponse(StatusCodes.BadRequest, r.response, Seq.empty) + case Success(r: WorkflowArchivedLabelsManagerResponse) => + completeResponse(StatusCodes.BadRequest, r.response, Seq.empty) case Success(e: FailedLabelsManagerResponse) => e.reason.failRequest(StatusCodes.InternalServerError) case Failure(e: UnrecognizedWorkflowException) => e.failRequest(StatusCodes.NotFound) case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) case Failure(e) => e.errorRequest(StatusCodes.InternalServerError) } - } def patchLabelsRequest(possibleWorkflowId: String, labels: Labels, serviceRegistryActor: ActorRef, - actorRefFactory: ActorRefFactory) - (implicit timeout: Timeout, ec: ExecutionContext): Route = { + actorRefFactory: ActorRefFactory + )(implicit timeout: Timeout, ec: ExecutionContext): Route = { - def checkIfMetadataArchivedAndRespond(id: WorkflowId, archiveStatusResponse: FetchWorkflowArchiveStatusAndEndTimeResponse): Future[LabelsManagerActorResponse] = { + def checkIfMetadataArchivedAndRespond(id: WorkflowId, + archiveStatusResponse: FetchWorkflowArchiveStatusAndEndTimeResponse + ): Future[LabelsManagerActorResponse] = archiveStatusResponse match { case WorkflowMetadataArchivedStatusAndEndTime(archiveStatus, endTime) => if (archiveStatus.isArchived) { - val message = " As a result, new labels can't be added or existing labels can't be updated for this workflow." - Future.successful(WorkflowArchivedLabelsManagerResponse(processMetadataArchivedResponse(id, archiveStatus, endTime, message))) - } - else { - val lma = actorRefFactory.actorOf(LabelsManagerActor.props(serviceRegistryActor).withDispatcher(ApiDispatcher)) + val message = + " As a result, new labels can't be added or existing labels can't be updated for this workflow." + Future.successful( + WorkflowArchivedLabelsManagerResponse( + processMetadataArchivedResponse(id, archiveStatus, endTime, message) + ) + ) + } else { + val lma = + actorRefFactory.actorOf(LabelsManagerActor.props(serviceRegistryActor).withDispatcher(ApiDispatcher)) lma.ask(LabelsAddition(LabelsData(id, labels))).mapTo[LabelsManagerActorResponse] } case FailedToGetArchiveStatusAndEndTime(e) => Future.failed(e) } - } val response = for { id <- validateWorkflowIdInMetadataSummaries(possibleWorkflowId, serviceRegistryActor) - archiveStatusResponse <- serviceRegistryActor.ask(FetchWorkflowMetadataArchiveStatusAndEndTime(id)).mapTo[FetchWorkflowArchiveStatusAndEndTimeResponse] + archiveStatusResponse <- serviceRegistryActor + .ask(FetchWorkflowMetadataArchiveStatusAndEndTime(id)) + .mapTo[FetchWorkflowArchiveStatusAndEndTimeResponse] response <- checkIfMetadataArchivedAndRespond(id, archiveStatusResponse) } yield response diff --git a/engine/src/main/scala/cromwell/webservice/routes/WomtoolRouteSupport.scala b/engine/src/main/scala/cromwell/webservice/routes/WomtoolRouteSupport.scala index 0690c11dcb5..dec26a63dbf 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/WomtoolRouteSupport.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/WomtoolRouteSupport.scala @@ -8,7 +8,12 @@ import akka.pattern.ask import akka.stream.ActorMaterializer import akka.util.Timeout import cromwell.core.{WorkflowOptions, WorkflowSourceFilesCollection} -import cromwell.services.womtool.WomtoolServiceMessages.{DescribeFailure, DescribeRequest, DescribeResult, DescribeSuccess} +import cromwell.services.womtool.WomtoolServiceMessages.{ + DescribeFailure, + DescribeRequest, + DescribeResult, + DescribeSuccess +} import cromwell.webservice.WebServiceUtils import cromwell.webservice.WebServiceUtils.EnhancedThrowable diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/CromwellMetadata.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/CromwellMetadata.scala index d8c38de8dc9..0ff96489169 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/CromwellMetadata.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/CromwellMetadata.scala @@ -8,7 +8,7 @@ final case class CromwellSubmittedFiles(workflow: Option[String], options: Option[String], inputs: Option[String], labels: Option[String] - ) +) final case class CromwellCallsMetadata(shardIndex: Option[Int], commandLine: Option[String], @@ -17,7 +17,7 @@ final case class CromwellCallsMetadata(shardIndex: Option[Int], end: Option[String], stdout: Option[String], stderr: Option[String] - ) +) final case class CromwellMetadata(workflowName: Option[String], id: String, @@ -27,7 +27,7 @@ final case class CromwellMetadata(workflowName: Option[String], submittedFiles: CromwellSubmittedFiles, outputs: Option[JsObject], calls: Option[Map[String, Seq[CromwellCallsMetadata]]] - ) { +) { import CromwellMetadata._ def wesRunLog: WesRunLog = { @@ -35,7 +35,8 @@ final case class CromwellMetadata(workflowName: Option[String], val workflowTags = submittedFiles.labels.map(JsonParser(_).asJsObject) val workflowEngineParams = submittedFiles.options.map(JsonParser(_).asJsObject) - val workflowRequest = WesRunRequest(workflow_params = workflowParams, + val workflowRequest = WesRunRequest( + workflow_params = workflowParams, workflow_type = submittedFiles.workflowType.getOrElse("None supplied"), workflow_type_version = submittedFiles.workflowTypeVersion.getOrElse("None supplied"), tags = workflowTags, @@ -44,12 +45,12 @@ final case class CromwellMetadata(workflowName: Option[String], ) val workflowLogData = WesLog(name = workflowName, - cmd = None, - start_time = start, - end_time = end, - stdout = None, - stderr = None, - exit_code = None + cmd = None, + start_time = start, + end_time = end, + stdout = None, + stderr = None, + exit_code = None ) val taskLogs = for { @@ -74,7 +75,9 @@ object CromwellMetadata { import spray.json.DefaultJsonProtocol._ implicit val cromwellCallsMetadataFormat: JsonFormat[CromwellCallsMetadata] = jsonFormat7(CromwellCallsMetadata.apply) - implicit val cromwellSubmittedFilesFormat: JsonFormat[CromwellSubmittedFiles] = jsonFormat6(CromwellSubmittedFiles.apply) + implicit val cromwellSubmittedFilesFormat: JsonFormat[CromwellSubmittedFiles] = jsonFormat6( + CromwellSubmittedFiles.apply + ) implicit val cromwellMetadataFormat: JsonFormat[CromwellMetadata] = jsonFormat8(CromwellMetadata.apply) def fromJson(json: String): CromwellMetadata = { diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/RunListResponse.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/RunListResponse.scala index b9bea188061..11ca85082ac 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/RunListResponse.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/RunListResponse.scala @@ -8,13 +8,11 @@ import cromwell.webservice.routes.wes.WesState.fromStatusString case class RunListResponse(runs: List[WesRunStatus], next_page_token: String) object RunListResponse { - def fromMetadataQueryResponse(response: MetadataService.MetadataQueryResponse): WesResponse = { - - response match { + def fromMetadataQueryResponse(response: MetadataService.MetadataQueryResponse): WesResponse = + response match { case w: WorkflowQuerySuccess => val runs = w.response.results.toList.map(x => WesRunStatus(x.id, fromStatusString(x.status))) WesResponseRunList(runs) case f: WorkflowQueryFailure => WesErrorResponse(f.reason.getMessage, StatusCodes.BadRequest.intValue) } - } } diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/ServiceInfo.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/ServiceInfo.scala index 02710a5c7e1..c6a3a8401cd 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/ServiceInfo.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/ServiceInfo.scala @@ -49,9 +49,12 @@ object ServiceInfo { /** * Generate any runtime level information and create a response to the client */ - def toWesResponse(workflowStoreActor: ActorRef)(implicit ec: ExecutionContext, timeout: Timeout): Future[WesStatusInfoResponse] = { + def toWesResponse( + workflowStoreActor: ActorRef + )(implicit ec: ExecutionContext, timeout: Timeout): Future[WesStatusInfoResponse] = workflowStats(workflowStoreActor).map(stats => - WesStatusInfoResponse(WorkflowTypeVersion, + WesStatusInfoResponse( + WorkflowTypeVersion, SupportedWesVersions, SupportedFilesystemProtocols, WorkflowEngineVerisons, @@ -59,18 +62,20 @@ object ServiceInfo { stats, AuthInstructionsUrl.toString, ContactInfoUrl.toString, - Tags) + Tags + ) ) - } /** * Retrieve a map from state to count for all represented non-terminal workflow states */ - private def workflowStats(workflowStoreActor: ActorRef)(implicit ec: ExecutionContext, timeout: Timeout): Future[Map[WesState, Int]] = { - workflowStoreActor.ask(GetWorkflowStoreStats) + private def workflowStats( + workflowStoreActor: ActorRef + )(implicit ec: ExecutionContext, timeout: Timeout): Future[Map[WesState, Int]] = + workflowStoreActor + .ask(GetWorkflowStoreStats) .mapTo[Map[WorkflowState, Int]] .map(m => m.map(e => WesState.fromCromwellStatus(e._1) -> e._2)) // Convert WorkflowState -> WesState - } } /* diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/WesResponse.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/WesResponse.scala index 58313cb5946..9d2ca2a5e82 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/WesResponse.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/WesResponse.scala @@ -17,22 +17,22 @@ final case class WesRunLog(run_id: String, run_log: Option[WesLog], task_logs: Option[List[WesLog]], outputs: Option[JsObject] - ) extends WesResponse +) extends WesResponse object WesRunLog { def fromJson(json: String): WesRunLog = CromwellMetadata.fromJson(json).wesRunLog } - final case class WesStatusInfoResponse(workflow_type_version: Map[String, Iterable[String]], - supported_wes_versions: Iterable[String], - supported_filesystem_protocols: Iterable[String], - workflow_engine_versions: Map[String, String], - default_workflow_engine_parameters: Iterable[DefaultWorkflowEngineParameter], - system_state_counts: Map[WesState, Int], - auth_instructions_url: String, - contact_info_url: String, - tags: Map[String, String]) extends WesResponse + supported_wes_versions: Iterable[String], + supported_filesystem_protocols: Iterable[String], + workflow_engine_versions: Map[String, String], + default_workflow_engine_parameters: Iterable[DefaultWorkflowEngineParameter], + system_state_counts: Map[WesState, Int], + auth_instructions_url: String, + contact_info_url: String, + tags: Map[String, String] +) extends WesResponse object WesResponseJsonSupport extends SprayJsonSupport with DefaultJsonProtocol { import WesStateJsonSupport._ @@ -51,7 +51,7 @@ object WesResponseJsonSupport extends SprayJsonSupport with DefaultJsonProtocol implicit object WesResponseFormat extends RootJsonFormat[WesResponse] { import spray.json._ - def write(r: WesResponse) = { + def write(r: WesResponse) = r match { case r: WesRunId => r.toJson case s: WesRunStatus => s.toJson @@ -61,8 +61,9 @@ object WesResponseJsonSupport extends SprayJsonSupport with DefaultJsonProtocol case m: WesResponseWorkflowMetadata => m.toJson case w: WesRunLog => w.toJson } - } - def read(value: JsValue) = throw new UnsupportedOperationException("Reading WesResponse objects from JSON is not supported") + def read(value: JsValue) = throw new UnsupportedOperationException( + "Reading WesResponse objects from JSON is not supported" + ) } } diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/WesRouteSupport.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/WesRouteSupport.scala index 41973bb8981..bd5977ae853 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/WesRouteSupport.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/WesRouteSupport.scala @@ -5,7 +5,7 @@ import akka.http.scaladsl.model.{Multipart, StatusCode, StatusCodes} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.directives.RouteDirectives.complete import akka.http.scaladsl.server.{Directive1, Route} -import akka.pattern.{AskTimeoutException, ask} +import akka.pattern.{ask, AskTimeoutException} import akka.stream.ActorMaterializer import akka.util.Timeout import cats.data.NonEmptyList @@ -16,12 +16,18 @@ import cromwell.engine.instrumentation.HttpInstrumentation import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException import cromwell.engine.workflow.workflowstore.{WorkflowStoreActor, WorkflowStoreSubmitActor} import cromwell.server.CromwellShutdown -import cromwell.services.metadata.MetadataService.{BuildMetadataJsonAction, GetSingleWorkflowMetadataAction, GetStatus, MetadataServiceResponse, StatusLookupFailed} +import cromwell.services.metadata.MetadataService.{ + BuildMetadataJsonAction, + GetSingleWorkflowMetadataAction, + GetStatus, + MetadataServiceResponse, + StatusLookupFailed +} import cromwell.services.{FailedMetadataJsonResponse, SuccessfulMetadataJsonResponse} import cromwell.webservice.PartialWorkflowSources -import cromwell.webservice.WebServiceUtils.{EnhancedThrowable, completeResponse, materializeFormData} +import cromwell.webservice.WebServiceUtils.{completeResponse, materializeFormData, EnhancedThrowable} import cromwell.webservice.routes.CromwellApiService -import cromwell.webservice.routes.CromwellApiService.{UnrecognizedWorkflowException, validateWorkflowIdInMetadata} +import cromwell.webservice.routes.CromwellApiService.{validateWorkflowIdInMetadata, UnrecognizedWorkflowException} import cromwell.webservice.routes.MetadataRouteSupport.{metadataBuilderActorRequest, metadataQueryRequest} import cromwell.webservice.routes.wes.WesResponseJsonSupport._ import cromwell.webservice.routes.wes.WesRouteSupport.{respondWithWesError, _} @@ -31,8 +37,6 @@ import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ExecutionContext, Future, TimeoutException} import scala.util.{Failure, Success} - - trait WesRouteSupport extends HttpInstrumentation { val serviceRegistryActor: ActorRef @@ -55,7 +59,7 @@ trait WesRouteSupport extends HttpInstrumentation { - It'd require a fairly substantial refactor of the MetadataBuilderActor to be more general - It's expected that for now the usage of these endpoints will not be extensive, so the protections of the regulator should not be necessary - */ + */ val wesRoutes: Route = instrumentRequest { concat( @@ -74,19 +78,25 @@ trait WesRouteSupport extends HttpInstrumentation { } ~ post { extractSubmission() { submission => - wesSubmitRequest(submission.entity, - isSingleSubmission = true) + wesSubmitRequest(submission.entity, isSingleSubmission = true) } } }, path("runs" / Segment) { workflowId => get { // this is what it was like in code found in the project… it perhaps isn’t ideal but doesn’t seem to hurt, so leaving it like this for now. - completeCromwellResponse(runLog(workflowId, (w: WorkflowId) => GetSingleWorkflowMetadataAction(w, None, None, expandSubWorkflows = false), serviceRegistryActor)) + completeCromwellResponse( + runLog(workflowId, + (w: WorkflowId) => GetSingleWorkflowMetadataAction(w, None, None, expandSubWorkflows = false), + serviceRegistryActor + ) + ) } }, path("runs" / Segment / "status") { possibleWorkflowId => - val response = validateWorkflowIdInMetadata(possibleWorkflowId, serviceRegistryActor).flatMap(w => serviceRegistryActor.ask(GetStatus(w)).mapTo[MetadataServiceResponse]) + val response = validateWorkflowIdInMetadata(possibleWorkflowId, serviceRegistryActor).flatMap(w => + serviceRegistryActor.ask(GetStatus(w)).mapTo[MetadataServiceResponse] + ) // WES can also return a 401 or a 403 but that requires user auth knowledge which Cromwell doesn't currently have onComplete(response) { case Success(SuccessfulMetadataJsonResponse(_, jsObject)) => @@ -104,10 +114,11 @@ trait WesRouteSupport extends HttpInstrumentation { path("runs" / Segment / "cancel") { possibleWorkflowId => post { CromwellApiService.abortWorkflow(possibleWorkflowId, - workflowStoreActor, - workflowManagerActor, - successHandler = WesAbortSuccessHandler, - errorHandler = WesAbortErrorHandler) + workflowStoreActor, + workflowManagerActor, + successHandler = WesAbortSuccessHandler, + errorHandler = WesAbortErrorHandler + ) } } ) @@ -115,22 +126,22 @@ trait WesRouteSupport extends HttpInstrumentation { ) } - def toWesResponse(workflowId: WorkflowId, workflowState: WorkflowState): WesRunStatus = { + def toWesResponse(workflowId: WorkflowId, workflowState: WorkflowState): WesRunStatus = WesRunStatus(workflowId.toString, WesState.fromCromwellStatus(workflowState)) - } - def toWesResponseId(workflowId: WorkflowId): WesRunId ={ + def toWesResponseId(workflowId: WorkflowId): WesRunId = WesRunId(workflowId.toString) - } def wesSubmitRequest(formData: Multipart.FormData, isSingleSubmission: Boolean): Route = { - def getWorkflowState(workflowOnHold: Boolean): WorkflowState = { + def getWorkflowState(workflowOnHold: Boolean): WorkflowState = if (workflowOnHold) WorkflowOnHold else WorkflowSubmitted - } - def sendToWorkflowStore(command: WorkflowStoreActor.WorkflowStoreActorSubmitCommand, warnings: Seq[String], workflowState: WorkflowState): Route = { + def sendToWorkflowStore(command: WorkflowStoreActor.WorkflowStoreActorSubmitCommand, + warnings: Seq[String], + workflowState: WorkflowState + ): Route = // NOTE: Do not blindly copy the akka-http -to- ask-actor pattern below without knowing the pros and cons. onComplete(workflowStoreActor.ask(command).mapTo[WorkflowStoreSubmitActor.WorkflowStoreSubmitActorResponse]) { case Success(w) => @@ -142,18 +153,21 @@ trait WesRouteSupport extends HttpInstrumentation { case WorkflowStoreSubmitActor.WorkflowSubmitFailed(throwable) => respondWithWesError(throwable.getLocalizedMessage, StatusCodes.BadRequest) } - case Failure(_: AskTimeoutException) if CromwellShutdown.shutdownInProgress() => respondWithWesError("Cromwell service is shutting down", StatusCodes.InternalServerError) + case Failure(_: AskTimeoutException) if CromwellShutdown.shutdownInProgress() => + respondWithWesError("Cromwell service is shutting down", StatusCodes.InternalServerError) case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) case Failure(e) => e.failRequest(StatusCodes.InternalServerError, warnings) } - } onComplete(materializeFormData(formData)) { case Success(data) => PartialWorkflowSources.fromSubmitRoute(data, allowNoInputs = isSingleSubmission) match { case Success(workflowSourceFiles) if isSingleSubmission && workflowSourceFiles.size == 1 => val warnings = workflowSourceFiles.flatMap(_.warnings) - sendToWorkflowStore(WorkflowStoreActor.SubmitWorkflow(workflowSourceFiles.head), warnings, getWorkflowState(workflowSourceFiles.head.workflowOnHold)) + sendToWorkflowStore(WorkflowStoreActor.SubmitWorkflow(workflowSourceFiles.head), + warnings, + getWorkflowState(workflowSourceFiles.head.workflowOnHold) + ) // Catches the case where someone has gone through the single submission endpoint w/ more than one workflow case Success(workflowSourceFiles) if isSingleSubmission => val warnings = workflowSourceFiles.flatMap(_.warnings) @@ -163,7 +177,9 @@ trait WesRouteSupport extends HttpInstrumentation { val warnings = workflowSourceFiles.flatMap(_.warnings) sendToWorkflowStore( WorkflowStoreActor.BatchSubmitWorkflows(NonEmptyList.fromListUnsafe(workflowSourceFiles.toList)), - warnings, getWorkflowState(workflowSourceFiles.head.workflowOnHold)) + warnings, + getWorkflowState(workflowSourceFiles.head.workflowOnHold) + ) case Failure(t) => t.failRequest(StatusCodes.BadRequest) } case Failure(e: TimeoutException) => e.failRequest(StatusCodes.ServiceUnavailable) @@ -172,64 +188,67 @@ trait WesRouteSupport extends HttpInstrumentation { } } - - object WesRouteSupport { import WesResponseJsonSupport._ - implicit lazy val duration: FiniteDuration = ConfigFactory.load().as[FiniteDuration]("akka.http.server.request-timeout") + implicit lazy val duration: FiniteDuration = + ConfigFactory.load().as[FiniteDuration]("akka.http.server.request-timeout") implicit lazy val timeout: Timeout = duration import scala.concurrent.ExecutionContext.Implicits.global val NotFoundError = WesErrorResponse("The requested workflow run wasn't found", StatusCodes.NotFound.intValue) - def WesAbortSuccessHandler: PartialFunction[SuccessfulAbortResponse, Route] = { - case response => complete(WesRunId(response.workflowId.toString)) + def WesAbortSuccessHandler: PartialFunction[SuccessfulAbortResponse, Route] = { case response => + complete(WesRunId(response.workflowId.toString)) } def WesAbortErrorHandler: PartialFunction[Throwable, Route] = { // There are also some auth situations which should be handled, but at the moment Cromwell doesn't allow for those case e: IllegalStateException => respondWithWesError(e.getLocalizedMessage, StatusCodes.Forbidden) case e: WorkflowNotFoundException => respondWithWesError(e.getLocalizedMessage, StatusCodes.NotFound) - case _: AskTimeoutException if CromwellShutdown.shutdownInProgress() => respondWithWesError("Cromwell service is shutting down", StatusCodes.InternalServerError) + case _: AskTimeoutException if CromwellShutdown.shutdownInProgress() => + respondWithWesError("Cromwell service is shutting down", StatusCodes.InternalServerError) case e: Exception => respondWithWesError(e.getLocalizedMessage, StatusCodes.InternalServerError) } - private def respondWithWesError(errorMsg: String, status: StatusCode): Route = { + private def respondWithWesError(errorMsg: String, status: StatusCode): Route = complete((status, WesErrorResponse(errorMsg, status.intValue))) - } - def extractSubmission(): Directive1[WesSubmission] = { - formFields(( - "workflow_params".?, - "workflow_type".?, - "workflow_type_version".?, - "tags".?, - "workflow_engine_parameters".?, - "workflow_url".?, - "workflow_attachment".as[String].* - )).as(WesSubmission) - } + def extractSubmission(): Directive1[WesSubmission] = + formFields( + ( + "workflow_params".?, + "workflow_type".?, + "workflow_type_version".?, + "tags".?, + "workflow_engine_parameters".?, + "workflow_url".?, + "workflow_attachment".as[String].* + ) + ).as(WesSubmission) - def completeCromwellResponse(future: => Future[WesResponse]): Route = { + def completeCromwellResponse(future: => Future[WesResponse]): Route = onComplete(future) { case Success(response: WesResponse) => complete(response) case Failure(e) => complete(WesErrorResponse(e.getMessage, StatusCodes.InternalServerError.intValue)) } - } - def listRuns(pageSize: Option[Int], pageToken: Option[String], serviceRegistryActor: ActorRef): Future[WesResponse] = { + def listRuns(pageSize: Option[Int], pageToken: Option[String], serviceRegistryActor: ActorRef): Future[WesResponse] = // FIXME: to handle - page_size, page_token // FIXME: How to handle next_page_token in response? - metadataQueryRequest(Seq.empty[(String, String)], serviceRegistryActor).map(RunListResponse.fromMetadataQueryResponse) - } + metadataQueryRequest(Seq.empty[(String, String)], serviceRegistryActor) + .map(RunListResponse.fromMetadataQueryResponse) - def runLog(workflowId: String, request: WorkflowId => BuildMetadataJsonAction, serviceRegistryActor: ActorRef): Future[WesResponse] = { + def runLog(workflowId: String, + request: WorkflowId => BuildMetadataJsonAction, + serviceRegistryActor: ActorRef + ): Future[WesResponse] = { val metadataJsonResponse = metadataBuilderActorRequest(workflowId, request, serviceRegistryActor) metadataJsonResponse.map { case SuccessfulMetadataJsonResponse(_, responseJson) => WesRunLog.fromJson(responseJson.toString()) - case FailedMetadataJsonResponse(_, reason) => WesErrorResponse(reason.getMessage, StatusCodes.InternalServerError.intValue) + case FailedMetadataJsonResponse(_, reason) => + WesErrorResponse(reason.getMessage, StatusCodes.InternalServerError.intValue) } } -} \ No newline at end of file +} diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/WesRunLog.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/WesRunLog.scala index c882ebf4f8b..240fce01341 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/WesRunLog.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/WesRunLog.scala @@ -2,7 +2,6 @@ package cromwell.webservice.routes.wes import spray.json.JsObject - final case class WesLog(name: Option[String], cmd: Option[Seq[String]], start_time: Option[String], @@ -10,7 +9,7 @@ final case class WesLog(name: Option[String], stdout: Option[String], stderr: Option[String], exit_code: Option[Int] - ) +) final case class WesRunRequest(workflow_params: Option[JsObject], workflow_type: String, @@ -18,4 +17,4 @@ final case class WesRunRequest(workflow_params: Option[JsObject], tags: Option[JsObject], workflow_engine_parameters: Option[JsObject], workflow_url: Option[String] - ) +) diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/WesState.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/WesState.scala index f7feee727b8..58cf69e6a08 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/WesState.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/WesState.scala @@ -6,24 +6,23 @@ import spray.json.{DefaultJsonProtocol, JsObject, JsString, JsValue, RootJsonFor object WesState { sealed trait WesState extends Product with Serializable { val name: String } - case object Unknown extends WesState { override val name = "UNKNOWN"} - case object Queued extends WesState { override val name = "QUEUED"} - case object Initializing extends WesState { override val name = "INITIALIZING"} - case object Running extends WesState { override val name = "RUNNING"} - case object Paused extends WesState { override val name = "PAUSED"} - case object Complete extends WesState { override val name = "COMPLETE"} - case object ExecutorError extends WesState { override val name = "EXECUTOR_ERROR"} - case object SystemError extends WesState { override val name = "SYSTEM_ERROR"} - case object Canceled extends WesState { override val name = "CANCELED"} - case object Canceling extends WesState { override val name = "CANCELING"} + case object Unknown extends WesState { override val name = "UNKNOWN" } + case object Queued extends WesState { override val name = "QUEUED" } + case object Initializing extends WesState { override val name = "INITIALIZING" } + case object Running extends WesState { override val name = "RUNNING" } + case object Paused extends WesState { override val name = "PAUSED" } + case object Complete extends WesState { override val name = "COMPLETE" } + case object ExecutorError extends WesState { override val name = "EXECUTOR_ERROR" } + case object SystemError extends WesState { override val name = "SYSTEM_ERROR" } + case object Canceled extends WesState { override val name = "CANCELED" } + case object Canceling extends WesState { override val name = "CANCELING" } - def fromStatusString(status: Option[String]): WesState = { + def fromStatusString(status: Option[String]): WesState = status match { case Some(status) => fromCromwellStatus(WorkflowState.withName(status)) case None => Unknown } - } - def fromCromwellStatus(cromwellStatus: WorkflowState): WesState = { + def fromCromwellStatus(cromwellStatus: WorkflowState): WesState = cromwellStatus match { case WorkflowOnHold => Paused case WorkflowSubmitted => Queued @@ -34,18 +33,24 @@ object WesState { case WorkflowFailed => ExecutorError case _ => Unknown } - } def fromCromwellStatusJson(jsonResponse: JsObject): WesState = { - val statusString = jsonResponse.fields.get("status").collect { - case str: JsString => str.value - }.getOrElse(throw new IllegalArgumentException(s"Could not coerce Cromwell status response ${jsonResponse.compactPrint} into a valid WES status")) + val statusString = jsonResponse.fields + .get("status") + .collect { case str: JsString => + str.value + } + .getOrElse( + throw new IllegalArgumentException( + s"Could not coerce Cromwell status response ${jsonResponse.compactPrint} into a valid WES status" + ) + ) fromCromwellStatus(WorkflowState.withName(statusString)) } - def fromString(status: String): WesState = { + def fromString(status: String): WesState = status match { case Unknown.name => Unknown case Queued.name => Queued @@ -59,7 +64,6 @@ object WesState { case Canceling.name => Canceling case doh => throw new IllegalArgumentException(s"Invalid status attempting to be coerced to WesState: $doh") } - } } object WesStateJsonSupport extends SprayJsonSupport with DefaultJsonProtocol { @@ -70,8 +74,8 @@ object WesStateJsonSupport extends SprayJsonSupport with DefaultJsonProtocol { def read(json: JsValue): WesState = json match { - case JsString(string) => WesState.fromString(string) - case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a WesState") - } + case JsString(string) => WesState.fromString(string) + case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a WesState") + } } } diff --git a/engine/src/main/scala/cromwell/webservice/routes/wes/WesSubmission.scala b/engine/src/main/scala/cromwell/webservice/routes/wes/WesSubmission.scala index 1db2cd801b8..a3de8bbb24a 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/wes/WesSubmission.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/wes/WesSubmission.scala @@ -10,7 +10,7 @@ final case class WesSubmission(workflowParams: Option[String], workflowEngineParameters: Option[String], workflowUrl: Option[String], workflowAttachment: Iterable[String] - ) { +) { val entity: Multipart.FormData = { /* FIXME: @@ -27,15 +27,29 @@ final case class WesSubmission(workflowParams: Option[String], Content-Disposition headers on each of these files which can be used to describe directory structure and such for relative import resolution */ - val sourcePart = workflowAttachment.headOption map { a => Multipart.FormData.BodyPart(WorkflowSourceKey, HttpEntity(MediaTypes.`application/json`, a)) } - - val urlPart = workflowUrl map { u => Multipart.FormData.BodyPart(WorkflowUrlKey, HttpEntity(MediaTypes.`application/json`, u)) } - - val typePart = workflowType map { w => Multipart.FormData.BodyPart(WorkflowTypeKey, HttpEntity(MediaTypes.`application/json`, w)) } - val typeVersionPart = workflowTypeVersion map { v => Multipart.FormData.BodyPart(WorkflowTypeVersionKey, HttpEntity(MediaTypes.`application/json`, v)) } - val inputsPart = workflowParams map { p => Multipart.FormData.BodyPart(WorkflowInputsKey, HttpEntity(MediaTypes.`application/json`, p)) } - val optionsPart = workflowEngineParameters map { o => Multipart.FormData.BodyPart(WorkflowOptionsKey, HttpEntity(MediaTypes.`application/json`, o)) } - val labelsPart = tags map { t => Multipart.FormData.BodyPart(labelsKey, HttpEntity(MediaTypes.`application/json`, t)) } + val sourcePart = workflowAttachment.headOption map { a => + Multipart.FormData.BodyPart(WorkflowSourceKey, HttpEntity(MediaTypes.`application/json`, a)) + } + + val urlPart = workflowUrl map { u => + Multipart.FormData.BodyPart(WorkflowUrlKey, HttpEntity(MediaTypes.`application/json`, u)) + } + + val typePart = workflowType map { w => + Multipart.FormData.BodyPart(WorkflowTypeKey, HttpEntity(MediaTypes.`application/json`, w)) + } + val typeVersionPart = workflowTypeVersion map { v => + Multipart.FormData.BodyPart(WorkflowTypeVersionKey, HttpEntity(MediaTypes.`application/json`, v)) + } + val inputsPart = workflowParams map { p => + Multipart.FormData.BodyPart(WorkflowInputsKey, HttpEntity(MediaTypes.`application/json`, p)) + } + val optionsPart = workflowEngineParameters map { o => + Multipart.FormData.BodyPart(WorkflowOptionsKey, HttpEntity(MediaTypes.`application/json`, o)) + } + val labelsPart = tags map { t => + Multipart.FormData.BodyPart(labelsKey, HttpEntity(MediaTypes.`application/json`, t)) + } val parts = List(sourcePart, urlPart, typePart, typeVersionPart, inputsPart, optionsPart, labelsPart).flatten diff --git a/engine/src/main/scala/cromwell/webservice/webservice_.scala b/engine/src/main/scala/cromwell/webservice/webservice_.scala index d68ba0bdb1f..08b57cfc378 100644 --- a/engine/src/main/scala/cromwell/webservice/webservice_.scala +++ b/engine/src/main/scala/cromwell/webservice/webservice_.scala @@ -35,5 +35,5 @@ object Patterns { \. # Literal dot. (\d+) # Captured shard digits. )? # End outer optional noncapturing group for shard. - """.trim.r // The trim is necessary as (?x) must be at the beginning of the regex. + """.trim.r // The trim is necessary as (?x) must be at the beginning of the regex. } diff --git a/engine/src/test/scala/cromwell/MetadataWatchActor.scala b/engine/src/test/scala/cromwell/MetadataWatchActor.scala index 2c43ff74463..e9c7710d79f 100644 --- a/engine/src/test/scala/cromwell/MetadataWatchActor.scala +++ b/engine/src/test/scala/cromwell/MetadataWatchActor.scala @@ -18,7 +18,7 @@ final case class MetadataWatchActor(promise: Promise[Unit], matchers: Matcher*) var unsatisfiedMatchers = matchers def tryMatchingEvents(events: Iterable[MetadataEvent]) = { - unsatisfiedMatchers = unsatisfiedMatchers.filterNot { m => m.matches(events) } + unsatisfiedMatchers = unsatisfiedMatchers.filterNot(m => m.matches(events)) if (unsatisfiedMatchers.isEmpty) { promise.trySuccess(()) () @@ -41,7 +41,8 @@ final case class MetadataWatchActor(promise: Promise[Unit], matchers: Matcher*) object MetadataWatchActor { - def props(promise: Promise[Unit], matchers: Matcher*): Props = Props(MetadataWatchActor(promise, matchers: _*)).withDispatcher(EngineDispatcher) + def props(promise: Promise[Unit], matchers: Matcher*): Props = + Props(MetadataWatchActor(promise, matchers: _*)).withDispatcher(EngineDispatcher) trait Matcher { private var _fullEventList: List[MetadataEvent] = List.empty @@ -57,7 +58,8 @@ object MetadataWatchActor { def checkMetadataValueContains(key: String, actual: MetadataValue, expected: String): Boolean = { val result = actual.value.contains(expected) - if (!result) addNearMissInfo(s"Key $key had unexpected value.\nActual value: ${actual.value}\n\nDid not contain: $expected") + if (!result) + addNearMissInfo(s"Key $key had unexpected value.\nActual value: ${actual.value}\n\nDid not contain: $expected") result } } @@ -67,20 +69,28 @@ object MetadataWatchActor { case None => false } - final case class JobKeyMetadataKeyAndValueContainStringMatcher(jobKeyCheck: Option[MetadataJobKey] => Boolean, key: String, value: String) extends Matcher { - def _matches(events: Iterable[MetadataEvent]): Boolean = { - events.exists(e => e.key.key.contains(key) && jobKeyCheck(e.key.jobKey) && e.value.exists { v => v.valueType == MetadataString && checkMetadataValueContains(e.key.key, v, value) }) - } + final case class JobKeyMetadataKeyAndValueContainStringMatcher(jobKeyCheck: Option[MetadataJobKey] => Boolean, + key: String, + value: String + ) extends Matcher { + def _matches(events: Iterable[MetadataEvent]): Boolean = + events.exists(e => + e.key.key.contains(key) && jobKeyCheck(e.key.jobKey) && e.value.exists { v => + v.valueType == MetadataString && checkMetadataValueContains(e.key.key, v, value) + } + ) } abstract class KeyMatchesRegexAndValueContainsStringMatcher(keyTemplate: String, value: String) extends Matcher { val templateRegex = keyTemplate.r - def _matches(events: Iterable[MetadataEvent]): Boolean = { - events.exists(e => templateRegex.findFirstIn(e.key.key).isDefined && - e.value.exists { v => checkMetadataValueContains(e.key.key, v, value) }) - } + def _matches(events: Iterable[MetadataEvent]): Boolean = + events.exists(e => + templateRegex.findFirstIn(e.key.key).isDefined && + e.value.exists(v => checkMetadataValueContains(e.key.key, v, value)) + ) } val failurePattern = """failures\[\d*\].*\:message""" - final case class FailureMatcher(value: String) extends KeyMatchesRegexAndValueContainsStringMatcher(failurePattern, value) { } + final case class FailureMatcher(value: String) + extends KeyMatchesRegexAndValueContainsStringMatcher(failurePattern, value) {} } diff --git a/engine/src/test/scala/cromwell/engine/MockCromwellTerminator.scala b/engine/src/test/scala/cromwell/engine/MockCromwellTerminator.scala index 1ef165ce14b..24b21541a1a 100644 --- a/engine/src/test/scala/cromwell/engine/MockCromwellTerminator.scala +++ b/engine/src/test/scala/cromwell/engine/MockCromwellTerminator.scala @@ -6,7 +6,6 @@ import akka.actor.CoordinatedShutdown import scala.concurrent.Future object MockCromwellTerminator extends CromwellTerminator { - override def beginCromwellShutdown(notUsed: CoordinatedShutdown.Reason): Future[Done] = { + override def beginCromwellShutdown(notUsed: CoordinatedShutdown.Reason): Future[Done] = Future.successful(Done) - } } diff --git a/engine/src/test/scala/cromwell/engine/backend/mock/DefaultBackendJobExecutionActor.scala b/engine/src/test/scala/cromwell/engine/backend/mock/DefaultBackendJobExecutionActor.scala index f31e4d72002..6fc7ed0fbd5 100644 --- a/engine/src/test/scala/cromwell/engine/backend/mock/DefaultBackendJobExecutionActor.scala +++ b/engine/src/test/scala/cromwell/engine/backend/mock/DefaultBackendJobExecutionActor.scala @@ -10,13 +10,26 @@ import wom.graph.CommandCallNode import scala.concurrent.{ExecutionContext, Future} object DefaultBackendJobExecutionActor { - def props(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor) = Props(DefaultBackendJobExecutionActor(jobDescriptor, configurationDescriptor)) + def props(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor) = Props( + DefaultBackendJobExecutionActor(jobDescriptor, configurationDescriptor) + ) } -case class DefaultBackendJobExecutionActor(override val jobDescriptor: BackendJobDescriptor, override val configurationDescriptor: BackendConfigurationDescriptor) extends BackendJobExecutionActor { - override def execute: Future[BackendJobExecutionResponse] = { - Future.successful(JobSucceededResponse(jobDescriptor.key, Some(0), CallOutputs((jobDescriptor.taskCall.outputPorts map taskOutputToJobOutput).toMap), None, Seq.empty, dockerImageUsed = None, resultGenerationMode = RunOnBackend)) - } +case class DefaultBackendJobExecutionActor(override val jobDescriptor: BackendJobDescriptor, + override val configurationDescriptor: BackendConfigurationDescriptor +) extends BackendJobExecutionActor { + override def execute: Future[BackendJobExecutionResponse] = + Future.successful( + JobSucceededResponse( + jobDescriptor.key, + Some(0), + CallOutputs((jobDescriptor.taskCall.outputPorts map taskOutputToJobOutput).toMap), + None, + Seq.empty, + dockerImageUsed = None, + resultGenerationMode = RunOnBackend + ) + ) override def recover = execute @@ -24,25 +37,26 @@ case class DefaultBackendJobExecutionActor(override val jobDescriptor: BackendJo } class DefaultBackendLifecycleActorFactory(val name: String, val configurationDescriptor: BackendConfigurationDescriptor) - extends BackendLifecycleActorFactory { + extends BackendLifecycleActorFactory { override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], serviceRegistryActor: ActorRef, - restarting: Boolean): Option[Props] = None + restarting: Boolean + ): Option[Props] = None override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor, initializationData: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]): Props = { + backendSingletonActor: Option[ActorRef] + ): Props = DefaultBackendJobExecutionActor.props(jobDescriptor, configurationDescriptor) - } override def expressionLanguageFunctions(workflowDescriptor: BackendWorkflowDescriptor, jobKey: BackendJobDescriptorKey, initializationData: Option[BackendInitializationData], ioActorProxy: ActorRef, - ec: ExecutionContext): IoFunctionSet = NoIoFunctionSet + ec: ExecutionContext + ): IoFunctionSet = NoIoFunctionSet } - diff --git a/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendJobExecutionActor.scala b/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendJobExecutionActor.scala index 6bc7b04adce..69685d58176 100644 --- a/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendJobExecutionActor.scala +++ b/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendJobExecutionActor.scala @@ -2,26 +2,39 @@ package cromwell.engine.backend.mock import akka.actor.Props import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptor, BackendJobExecutionActor} -import cromwell.backend.BackendJobExecutionActor.{BackendJobExecutionResponse, JobFailedNonRetryableResponse, JobFailedRetryableResponse} +import cromwell.backend.BackendJobExecutionActor.{ + BackendJobExecutionResponse, + JobFailedNonRetryableResponse, + JobFailedRetryableResponse +} import scala.concurrent.Future object RetryableBackendJobExecutionActor { - def props(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor) = Props(RetryableBackendJobExecutionActor(jobDescriptor, configurationDescriptor)) + def props(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor) = Props( + RetryableBackendJobExecutionActor(jobDescriptor, configurationDescriptor) + ) } -final case class RetryableBackendJobExecutionActor(override val jobDescriptor: BackendJobDescriptor, override val configurationDescriptor: BackendConfigurationDescriptor) extends BackendJobExecutionActor { +final case class RetryableBackendJobExecutionActor(override val jobDescriptor: BackendJobDescriptor, + override val configurationDescriptor: BackendConfigurationDescriptor +) extends BackendJobExecutionActor { val attempts = 3 - override def execute: Future[BackendJobExecutionResponse] = { + override def execute: Future[BackendJobExecutionResponse] = if (jobDescriptor.key.attempt < attempts) { - Future.successful(JobFailedRetryableResponse(jobDescriptor.key, new RuntimeException("An apparent transient Exception!"), None)) - } - else { - Future.successful(JobFailedNonRetryableResponse(jobDescriptor.key, new RuntimeException("A permanent Exception! Yikes, what a pickle!"), None)) + Future.successful( + JobFailedRetryableResponse(jobDescriptor.key, new RuntimeException("An apparent transient Exception!"), None) + ) + } else { + Future.successful( + JobFailedNonRetryableResponse(jobDescriptor.key, + new RuntimeException("A permanent Exception! Yikes, what a pickle!"), + None + ) + ) } - } override def recover = execute diff --git a/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendLifecycleActorFactory.scala b/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendLifecycleActorFactory.scala index 3ee7afc8767..23ea949f9d6 100644 --- a/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendLifecycleActorFactory.scala +++ b/engine/src/test/scala/cromwell/engine/backend/mock/RetryableBackendLifecycleActorFactory.scala @@ -8,25 +8,27 @@ import wom.graph.CommandCallNode import scala.concurrent.ExecutionContext class RetryableBackendLifecycleActorFactory(val name: String, - val configurationDescriptor: BackendConfigurationDescriptor) - extends BackendLifecycleActorFactory { + val configurationDescriptor: BackendConfigurationDescriptor +) extends BackendLifecycleActorFactory { override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], serviceRegistryActor: ActorRef, - restarting: Boolean): Option[Props] = None + restarting: Boolean + ): Option[Props] = None override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor, initializationData: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]): Props = { + backendSingletonActor: Option[ActorRef] + ): Props = RetryableBackendJobExecutionActor.props(jobDescriptor, configurationDescriptor) - } override def expressionLanguageFunctions(workflowDescriptor: BackendWorkflowDescriptor, jobKey: BackendJobDescriptorKey, initializationData: Option[BackendInitializationData], ioActorProxy: ActorRef, - ec: ExecutionContext): IoFunctionSet = NoIoFunctionSet + ec: ExecutionContext + ): IoFunctionSet = NoIoFunctionSet } diff --git a/engine/src/test/scala/cromwell/engine/backend/mock/package.scala b/engine/src/test/scala/cromwell/engine/backend/mock/package.scala index 4de47444c64..ef75c0df703 100644 --- a/engine/src/test/scala/cromwell/engine/backend/mock/package.scala +++ b/engine/src/test/scala/cromwell/engine/backend/mock/package.scala @@ -17,6 +17,7 @@ package object mock { case WomSingleFileType => WomSingleFile("/root/of/all/evil") case WomArrayType(memberType) => WomArray(WomArrayType(memberType), List(sampleValue(memberType))) case WomObjectType => WomObject(Map("a" -> WomString("1"), "b" -> WomString("2"))) - case WomMapType(keyType, valueType) => WomMap(WomMapType(keyType, valueType), Map(sampleValue(keyType) -> sampleValue(valueType))) + case WomMapType(keyType, valueType) => + WomMap(WomMapType(keyType, valueType), Map(sampleValue(keyType) -> sampleValue(valueType))) } } diff --git a/engine/src/test/scala/cromwell/engine/io/IoActorProxyGcsBatchSpec.scala b/engine/src/test/scala/cromwell/engine/io/IoActorProxyGcsBatchSpec.scala index 6c69fe98af6..57b85dd2a3b 100644 --- a/engine/src/test/scala/cromwell/engine/io/IoActorProxyGcsBatchSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/IoActorProxyGcsBatchSpec.scala @@ -21,7 +21,12 @@ import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext} import scala.language.postfixOps -class IoActorProxyGcsBatchSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender with Eventually { +class IoActorProxyGcsBatchSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with Eventually { behavior of "IoActor [GCS Batch]" implicit val ec: ExecutionContext = system.dispatcher @@ -74,10 +79,11 @@ class IoActorProxyGcsBatchSpec extends TestKitSuite with AnyFlatSpecLike with Ma dst: GcsPath, directory: GcsPath, testActorName: String, - serviceRegistryActorName: String) = { + serviceRegistryActorName: String + ) = { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe(serviceRegistryActorName).ref, "cromwell test"), - name = testActorName, + name = testActorName ) val copyCommand = GcsBatchCopyCommand.forPaths(src, dst).get @@ -102,20 +108,24 @@ class IoActorProxyGcsBatchSpec extends TestKitSuite with AnyFlatSpecLike with Ma received1.size shouldBe 5 received1 forall { _.isInstanceOf[IoSuccess[_]] } shouldBe true - received1 collect { - case IoSuccess(_: GcsBatchSizeCommand, fileSize: Long) => fileSize shouldBe 5 + received1 collect { case IoSuccess(_: GcsBatchSizeCommand, fileSize: Long) => + fileSize shouldBe 5 } - received1 collect { - case IoSuccess(_: GcsBatchCrc32Command, hash: String) => hash shouldBe "mnG7TA==" + received1 collect { case IoSuccess(_: GcsBatchCrc32Command, hash: String) => + hash shouldBe "mnG7TA==" } received1 collect { - case IoSuccess(command: GcsBatchIsDirectoryCommand, isDirectory: Boolean) if command.file.pathAsString == directory.pathAsString => isDirectory shouldBe true + case IoSuccess(command: GcsBatchIsDirectoryCommand, isDirectory: Boolean) + if command.file.pathAsString == directory.pathAsString => + isDirectory shouldBe true } received1 collect { - case IoSuccess(command: GcsBatchIsDirectoryCommand, isDirectory: Boolean) if command.file.pathAsString == src.pathAsString => isDirectory shouldBe false + case IoSuccess(command: GcsBatchIsDirectoryCommand, isDirectory: Boolean) + if command.file.pathAsString == src.pathAsString => + isDirectory shouldBe false } testActor ! deleteSrcCommand @@ -136,7 +146,7 @@ class IoActorProxyGcsBatchSpec extends TestKitSuite with AnyFlatSpecLike with Ma dst = dst, directory = directory, testActorName = "testActor-batch", - serviceRegistryActorName = "serviceRegistryActor-batch", + serviceRegistryActorName = "serviceRegistryActor-batch" ) } @@ -146,14 +156,14 @@ class IoActorProxyGcsBatchSpec extends TestKitSuite with AnyFlatSpecLike with Ma dst = dstRequesterPays, directory = directoryRequesterPays, testActorName = "testActor-batch-rp", - serviceRegistryActorName = "serviceRegistryActor-batch-rp", + serviceRegistryActorName = "serviceRegistryActor-batch-rp" ) } it should "copy files across GCS storage classes" taggedAs IntegrationTest in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActor").ref, "cromwell test"), - name = "testActor", + name = "testActor" ) val copyCommand = GcsBatchCopyCommand.forPaths(srcRegional, dstMultiRegional).get diff --git a/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala b/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala index e6209ff5958..880e9028282 100644 --- a/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala @@ -37,7 +37,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "copy a file" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorCopy").ref, "cromwell test"), - name = "testActorCopy", + name = "testActorCopy" ) val src = DefaultPathBuilder.createTempFile() @@ -59,7 +59,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "write to a file" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorWrite").ref, "cromwell test"), - name = "testActorWrite", + name = "testActorWrite" ) val src = DefaultPathBuilder.createTempFile() @@ -79,7 +79,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "delete a file" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorDelete").ref, "cromwell test"), - name = "testActorDelete", + name = "testActorDelete" ) val src = DefaultPathBuilder.createTempFile() @@ -98,7 +98,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "read a file" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorRead").ref, "cromwell test"), - name = "testActorRead", + name = "testActorRead" ) val src = DefaultPathBuilder.createTempFile() @@ -120,7 +120,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "read only the first bytes of file" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorFirstBytes").ref, "cromwell test"), - name = "testActorFirstBytes", + name = "testActorFirstBytes" ) val src = DefaultPathBuilder.createTempFile() @@ -142,7 +142,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "read the file if it's under the byte limit" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorByteLimit").ref, "cromwell test"), - name = "testActorByteLimit", + name = "testActorByteLimit" ) val src = DefaultPathBuilder.createTempFile() @@ -164,7 +164,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "fail if the file is larger than the read limit" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorReadLimit").ref, "cromwell test"), - name = "testActorReadLimit", + name = "testActorReadLimit" ) val src = DefaultPathBuilder.createTempFile() @@ -174,8 +174,10 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I testActor ! readCommand expectMsgPF(5 seconds) { - case _: IoSuccess[_] => fail("Command should have failed because the read limit was < file size and failOnOverflow was true") - case response: IoFailure[_] => response.failure.getMessage shouldBe s"[Attempted 1 time(s)] - IOException: Could not read from ${src.pathAsString}: File ${src.pathAsString} is larger than requested maximum of 2 Bytes." + case _: IoSuccess[_] => + fail("Command should have failed because the read limit was < file size and failOnOverflow was true") + case response: IoFailure[_] => + response.failure.getMessage shouldBe s"[Attempted 1 time(s)] - IOException: Could not read from ${src.pathAsString}: File ${src.pathAsString} is larger than requested maximum of 2 Bytes." } src.delete() @@ -184,7 +186,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "return a file size" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorSize").ref, "cromwell test"), - name = "testActorSize", + name = "testActorSize" ) val src = DefaultPathBuilder.createTempFile() @@ -206,7 +208,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "return a file md5 hash (local)" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorHash").ref, "cromwell test"), - name = "testActorHash", + name = "testActorHash" ) val src = DefaultPathBuilder.createTempFile() @@ -228,7 +230,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I it should "touch a file (local)" in { val testActor = TestActorRef( factory = new IoActor(IoActorConfig, TestProbe("serviceRegistryActorTouch").ref, "cromwell test"), - name = "testActorTouch", + name = "testActorTouch" ) val src = DefaultPathBuilder.createTempFile() @@ -259,25 +261,45 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I new SocketException(), new SocketTimeoutException(), new IOException("text Error getting access token for service account some other text"), - - new IOException("Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 500 Internal Server Error\nBackend Error"), - new IOException("Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 500 Internal Server Error Backend Error"), - - new IOException("Could not read from gs://broad-epi-cromwell/workflows/ChipSeq/ce6a5671-baf6-4734-a32b-abf3d9138e9b/call-epitope_classifier/memory_retry_rc: 503 Service Unavailable\nBackend Error"), - new IOException("Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 503 Service Unavailable Backend Error"), - - new IOException("Could not read from gs://mccarroll-mocha/cromwell/cromwell-executions/mocha/86d47e9a-5745-4ec0-b4eb-0164f073e5f4/call-idat2gtc/shard-73/rc: 504 Gateway Timeout\nGET https://storage.googleapis.com/download/storage/v1/b/mccarroll-mocha/o/cromwell%2Fcromwell-executions%2Fmocha%2F86d47e9a-5745-4ec0-b4eb-0164f073e5f4%2Fcall-idat2gtc%2Fshard-73%2Frc?alt=media"), + new IOException( + "Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 500 Internal Server Error\nBackend Error" + ), + new IOException( + "Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 500 Internal Server Error Backend Error" + ), + new IOException( + "Could not read from gs://broad-epi-cromwell/workflows/ChipSeq/ce6a5671-baf6-4734-a32b-abf3d9138e9b/call-epitope_classifier/memory_retry_rc: 503 Service Unavailable\nBackend Error" + ), + new IOException( + "Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 503 Service Unavailable Backend Error" + ), + new IOException( + "Could not read from gs://mccarroll-mocha/cromwell/cromwell-executions/mocha/86d47e9a-5745-4ec0-b4eb-0164f073e5f4/call-idat2gtc/shard-73/rc: 504 Gateway Timeout\nGET https://storage.googleapis.com/download/storage/v1/b/mccarroll-mocha/o/cromwell%2Fcromwell-executions%2Fmocha%2F86d47e9a-5745-4ec0-b4eb-0164f073e5f4%2Fcall-idat2gtc%2Fshard-73%2Frc?alt=media" + ), // Prove that `isRetryable` successfully recurses to unwrap the lowest-level Throwable - new IOException(new Throwable("Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 500 Internal Server Error Backend Error")), - new IOException(new Throwable("Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 503 Service Unavailable Backend Error")), - - new IOException("Some other text. Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 503 Service Unavailable"), - new IOException("Some other text. Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 504 Gateway Timeout"), + new IOException( + new Throwable( + "Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 500 Internal Server Error Backend Error" + ) + ), + new IOException( + new Throwable( + "Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 503 Service Unavailable Backend Error" + ) + ), + new IOException( + "Some other text. Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 503 Service Unavailable" + ), + new IOException( + "Some other text. Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-4688/rc: 504 Gateway Timeout" + ) ) - retryables foreach { e => withClue(e) { - RetryableRequestSupport.isRetryable(e) shouldBe true } + retryables foreach { e => + withClue(e) { + RetryableRequestSupport.isRetryable(e) shouldBe true + } } } @@ -288,7 +310,9 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I new IOException("502 HTTP Status Code"), new Exception("502 HTTP Status Code"), new Exception("5xx HTTP Status Code"), - new IOException("Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-500/rc: 404 File Not Found") + new IOException( + "Could not read from gs://fc-secure-/JointGenotyping//call-HardFilterAndMakeSitesOnlyVcf/shard-500/rc: 404 File Not Found" + ) ) nonRetryables foreach { RetryableRequestSupport.isRetryable(_) shouldBe false } diff --git a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchCommandContextSpec.scala b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchCommandContextSpec.scala index a7bf613efa1..ac19b4d8ad1 100644 --- a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchCommandContextSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchCommandContextSpec.scala @@ -13,7 +13,11 @@ import org.scalatest.matchers.should.Matchers import scala.util.{Failure, Success} class GcsBatchCommandContextSpec - extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Eventually with BeforeAndAfter { + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with Eventually + with BeforeAndAfter { behavior of "GcsBatchCommandContext" it should "handle exceptions in success handlers" in { @@ -34,7 +38,10 @@ class GcsBatchCommandContextSpec exceptionSpewingCommandContext.promise.future.value.get match { case Success(oops) => fail(s"Should not have produced a success: $oops") - case Failure(error) => error.getMessage should be("Error processing IO response in onSuccessCallback: Ill behaved code that throws in mapGoogleResponse") + case Failure(error) => + error.getMessage should be( + "Error processing IO response in onSuccessCallback: Ill behaved code that throws in mapGoogleResponse" + ) } } @@ -48,7 +55,7 @@ class GcsBatchCommandContextSpec exceptionSpewingCommandContext.promise.isCompleted should be(false) // Simulate a failure response from an underlying IO operation: - exceptionSpewingCommandContext.callback.onFailure(new GoogleJsonError { }, new HttpHeaders()) + exceptionSpewingCommandContext.callback.onFailure(new GoogleJsonError {}, new HttpHeaders()) eventually { exceptionSpewingCommandContext.promise.isCompleted should be(true) @@ -56,7 +63,10 @@ class GcsBatchCommandContextSpec exceptionSpewingCommandContext.promise.future.value.get match { case Success(oops) => fail(s"Should not have produced a success: $oops") - case Failure(error) => error.getMessage should be("Error processing IO response in onFailureCallback: Ill behaved code that throws in onFailure") + case Failure(error) => + error.getMessage should be( + "Error processing IO response in onFailureCallback: Ill behaved code that throws in onFailure" + ) } } @@ -78,7 +88,10 @@ class GcsBatchCommandContextSpec errorReturningCommandContext.promise.future.value.get match { case Success(oops) => fail(s"Should not have produced a success: $oops") - case Failure(error) => error.getMessage should be("Unexpected result in successful Google API call:\nWell behaved code that returns an error in mapGoogleResponse") + case Failure(error) => + error.getMessage should be( + "Unexpected result in successful Google API call:\nWell behaved code that returns an error in mapGoogleResponse" + ) } } } diff --git a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala index c133e7618c0..9b0a99752fe 100644 --- a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala @@ -17,14 +17,20 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContextExecutor, Future} import scala.language.postfixOps -class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTimeoutSpec with Matchers - with PrivateMethodTester with MockSugar { +class GcsBatchFlowSpec + extends TestKitSuite + with AnyFlatSpecLike + with CromwellTimeoutSpec + with Matchers + with PrivateMethodTester + with MockSugar { private val NoopOnRetry: IoCommandContext[_] => Throwable => Unit = _ => _ => () private val NoopOnBackpressure: Option[Double] => Unit = _ => () "GcsBatchFlow" should "know what read forbidden bucket failures look like" in { - val ErrorTemplate = "foo@bar.iam.gserviceaccount.com does not have storage.objects.%s access to %s/three_step/f0000000-baaa-f000-baaa-f00000000000/call-foo/foo.log" + val ErrorTemplate = + "foo@bar.iam.gserviceaccount.com does not have storage.objects.%s access to %s/three_step/f0000000-baaa-f000-baaa-f00000000000/call-foo/foo.log" val UnreadableBucketName = "unreadable-bucket" val objectReadOperationNames = Set( @@ -41,7 +47,9 @@ class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTi } yield new StorageException(code, String.format(ErrorTemplate, op, UnreadableBucketName)) } // Can't flatMap this since any Nones would just be squashed out. - storageExceptions(objectReadOperationNames) map (_.getMessage) map GcsBatchFlow.getReadForbiddenBucket map { _.get } shouldBe Set(UnreadableBucketName) + storageExceptions(objectReadOperationNames) map (_.getMessage) map GcsBatchFlow.getReadForbiddenBucket map { + _.get + } shouldBe Set(UnreadableBucketName) // A sampling of write operations, not an exhaustive list. val objectWriteOperationNames = Set( @@ -49,9 +57,13 @@ class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTi "delete", "insert" ) - storageExceptions(objectWriteOperationNames) map (_.getMessage) flatMap GcsBatchFlow.getReadForbiddenBucket shouldBe Set.empty + storageExceptions( + objectWriteOperationNames + ) map (_.getMessage) flatMap GcsBatchFlow.getReadForbiddenBucket shouldBe Set.empty - Set(new RuntimeException("random exception")) map (_.getMessage) flatMap GcsBatchFlow.getReadForbiddenBucket shouldBe Set.empty + Set( + new RuntimeException("random exception") + ) map (_.getMessage) flatMap GcsBatchFlow.getReadForbiddenBucket shouldBe Set.empty } "GcsBatchFlow" should "not throw unhandled exception and kill the thread when trying to recover from unretryable exception with null error message" in { @@ -63,19 +75,23 @@ class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTi onRetry = NoopOnRetry, onBackpressure = NoopOnBackpressure, applicationName = "testAppName", - backpressureStaleness = 5 seconds) + backpressureStaleness = 5 seconds + ) val mockGcsPath = GcsPath( nioPath = CloudStorageFileSystem.forBucket("bucket").getPath("test"), apiStorage = mock[com.google.api.services.storage.Storage], cloudStorage = mock[com.google.cloud.storage.Storage], - projectId = "GcsBatchFlowSpec-project", + projectId = "GcsBatchFlowSpec-project" ) - val gcsBatchCommandContext = GcsBatchCommandContext(GcsBatchCrc32Command.forPath(mockGcsPath).get, TestProbe().ref, 5) - val recoverCommandPrivateMethod = PrivateMethod[PartialFunction[Throwable, Future[GcsBatchResponse[_]]]](Symbol("recoverCommand")) + val gcsBatchCommandContext = + GcsBatchCommandContext(GcsBatchCrc32Command.forPath(mockGcsPath).get, TestProbe().ref, 5) + val recoverCommandPrivateMethod = + PrivateMethod[PartialFunction[Throwable, Future[GcsBatchResponse[_]]]](Symbol("recoverCommand")) val partialFuncAcceptingThrowable = gcsBatchFlow invokePrivate recoverCommandPrivateMethod(gcsBatchCommandContext) - val futureRes = partialFuncAcceptingThrowable(new NullPointerException(null)) // no unhandled exceptions should be thrown here + val futureRes = + partialFuncAcceptingThrowable(new NullPointerException(null)) // no unhandled exceptions should be thrown here futureRes.isCompleted shouldBe true } } diff --git a/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala b/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala index 41c121da0cc..5859684e756 100644 --- a/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala @@ -38,12 +38,12 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with private val NoopOnRetry: IoCommandContext[_] => Throwable => Unit = _ => _ => () private val NoopOnBackpressure: Option[Double] => Unit = _ => () - private val flow = new NioFlow( - parallelism = 1, - onRetryCallback = NoopOnRetry, - onBackpressure = NoopOnBackpressure, - numberOfAttempts = 3, - commandBackpressureStaleness = 5 seconds)(system).flow + private val flow = new NioFlow(parallelism = 1, + onRetryCallback = NoopOnRetry, + onBackpressure = NoopOnBackpressure, + numberOfAttempts = 3, + commandBackpressureStaleness = 5 seconds + )(system).flow implicit val materializer: ActorMaterializer = ActorMaterializer() private val replyTo = mock[ActorRef] @@ -106,7 +106,7 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with val stream = testSource.via(flow).toMat(readSink)(Keep.right) stream.run() map { case (IoFailure(_, EnhancedCromwellIoException(_, receivedException)), _) => - receivedException.getMessage should include ("UnknownHost") + receivedException.getMessage should include("UnknownHost") case (ack, _) => fail(s"size should have failed with UnknownHost but didn't:\n$ack\n\n") } } @@ -120,7 +120,7 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with val stream = testSource.via(flow).toMat(readSink)(Keep.right) stream.run() map { case (IoFailure(_, EnhancedCromwellIoException(_, receivedException)), _) => - receivedException.getMessage should include ("Couldn't fetch size") + receivedException.getMessage should include("Couldn't fetch size") case (ack, _) => fail(s"size should have failed but didn't:\n$ack\n\n") } } @@ -135,7 +135,8 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with val stream = testSource.via(flow).toMat(readSink)(Keep.right) stream.run() map { - case (success: IoSuccess[_], _) => assert(success.result.asInstanceOf[String] == "5d41402abc4b2a76b9719d911017c592") + case (success: IoSuccess[_], _) => + assert(success.result.asInstanceOf[String] == "5d41402abc4b2a76b9719d911017c592") case _ => fail("hash returned an unexpected message") } } @@ -177,16 +178,19 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with it should "fail if DrsPath hash doesn't match checksum" in { val testPath = mock[DrsPath] when(testPath.limitFileContent(any[Option[Int]], any[Boolean])(any[ExecutionContext])).thenReturn("hello".getBytes) - when(testPath.getFileHash).thenReturn(FileHash(HashType.Crc32c, "boom")) // correct Base64-encoded crc32c checksum is "9a71bb4c" + when(testPath.getFileHash).thenReturn( + FileHash(HashType.Crc32c, "boom") + ) // correct Base64-encoded crc32c checksum is "9a71bb4c" - val context = DefaultCommandContext(contentAsStringCommand(testPath, Option(100), failOnOverflow = true).get, replyTo) + val context = + DefaultCommandContext(contentAsStringCommand(testPath, Option(100), failOnOverflow = true).get, replyTo) val testSource = Source.single(context) val stream = testSource.via(flow).toMat(readSink)(Keep.right) stream.run() map { case (IoFailure(_, EnhancedCromwellIoException(_, receivedException)), _) => - receivedException.getMessage should include ("Failed checksum") + receivedException.getMessage should include("Failed checksum") case (ack, _) => fail(s"read returned an unexpected message:\n$ack\n\n") } } @@ -202,7 +206,8 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with .thenReturn(FileHash(HashType.Crc32c, "boom")) .thenReturn(FileHash(HashType.Crc32c, "9a71bb4c")) - val context = DefaultCommandContext(contentAsStringCommand(testPath, Option(100), failOnOverflow = true).get, replyTo) + val context = + DefaultCommandContext(contentAsStringCommand(testPath, Option(100), failOnOverflow = true).get, replyTo) val testSource = Source.single(context) val stream = testSource.via(flow).toMat(readSink)(Keep.right) @@ -225,7 +230,8 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with when(testPath.md5HexString) .thenReturn(Success(None)) - val context = DefaultCommandContext(contentAsStringCommand(testPath, Option(100), failOnOverflow = true).get, replyTo) + val context = + DefaultCommandContext(contentAsStringCommand(testPath, Option(100), failOnOverflow = true).get, replyTo) val testSource = Source.single(context) val stream = testSource.via(flow).toMat(readSink)(Keep.right) @@ -290,7 +296,7 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with it should "delete a Nio path with swallowIoExceptions true" in { val testPath = DefaultPathBuilder.build("/this/does/not/exist").get - //noinspection RedundantDefaultArgument + // noinspection RedundantDefaultArgument val context = DefaultCommandContext(deleteCommand(testPath, swallowIoExceptions = true).get, replyTo) val testSource = Source.single(context) @@ -328,21 +334,20 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with val testSource = Source.single(context) - val customFlow = new NioFlow( - parallelism = 1, - onRetryCallback = NoopOnRetry, - onBackpressure = NoopOnBackpressure, - numberOfAttempts = 3, - commandBackpressureStaleness = 5 seconds)(system) { + val customFlow = new NioFlow(parallelism = 1, + onRetryCallback = NoopOnRetry, + onBackpressure = NoopOnBackpressure, + numberOfAttempts = 3, + commandBackpressureStaleness = 5 seconds + )(system) { private var tries = 0 - override def handleSingleCommand(ioSingleCommand: IoCommand[_]): IO[IoSuccess[_]] = { + override def handleSingleCommand(ioSingleCommand: IoCommand[_]): IO[IoSuccess[_]] = IO { tries += 1 if (tries < 3) throw new StorageException(500, "message") else IoSuccess(ioSingleCommand, "content") } - } }.flow val stream = testSource.via(customFlow).toMat(readSink)(Keep.right) diff --git a/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala index 1d2652b83f1..667861801a4 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala @@ -9,9 +9,14 @@ import cromwell.core.retry.SimpleExponentialBackoff import cromwell.core.{TestKitSuite, WorkflowId} import cromwell.database.slick.EngineSlickDatabase import cromwell.database.sql.tables.DockerHashStoreEntry -import cromwell.docker.DockerInfoActor.{DockerInfoFailedResponse, DockerInfoSuccessResponse, DockerInformation} +import cromwell.docker.DockerInfoActor.{DockerInfoFailedResponse, DockerInformation, DockerInfoSuccessResponse} import cromwell.docker.{DockerHashResult, DockerImageIdentifier, DockerImageIdentifierWithoutHash, DockerInfoRequest} -import cromwell.engine.workflow.WorkflowDockerLookupActor.{DockerHashActorTimeout, Running, WorkflowDockerLookupFailure, WorkflowDockerTerminalFailure} +import cromwell.engine.workflow.WorkflowDockerLookupActor.{ + DockerHashActorTimeout, + Running, + WorkflowDockerLookupFailure, + WorkflowDockerTerminalFailure +} import cromwell.engine.workflow.WorkflowDockerLookupActorSpec._ import cromwell.engine.workflow.workflowstore.{StartableState, Submitted} import cromwell.services.EngineServicesStore @@ -25,9 +30,8 @@ import scala.concurrent.{ExecutionContext, Future} import scala.language.postfixOps import scala.util.control.NoStackTrace - class WorkflowDockerLookupActorSpec - extends TestKitSuite + extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender @@ -55,11 +59,11 @@ class WorkflowDockerLookupActorSpec } it should "wait and resubmit the docker request when it gets a backpressure message" in { - val backoff = SimpleExponentialBackoff(2.seconds, 10.minutes, 2D) + val backoff = SimpleExponentialBackoff(2.seconds, 10.minutes, 2d) val lookupActor = TestActorRef( Props(new TestWorkflowDockerLookupActor(workflowId, dockerHashingActor.ref, Submitted, backoff)), - dockerSendingActor.ref, + dockerSendingActor.ref ) lookupActor.tell(LatestRequest, dockerSendingActor.ref) @@ -75,7 +79,8 @@ class WorkflowDockerLookupActorSpec Future.successful(()) } - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false, db)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false, db)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) // The WorkflowDockerLookupActor should not have the hash for this tag yet and will need to query the dockerHashingActor. @@ -94,7 +99,8 @@ class WorkflowDockerLookupActorSpec } it should "soldier on after docker hashing actor timeouts" in { - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) lookupActor.tell(OlderRequest, dockerSendingActor.ref) @@ -131,7 +137,13 @@ class WorkflowDockerLookupActorSpec // BA-6495 it should "not fail and enter terminal state when response for certain image id from DockerHashingActor arrived after the self-imposed timeout" in { - val lookupActor = TestFSMRef(new WorkflowDockerLookupActor(workflowId, dockerHashingActor.ref, isRestart = false, EngineServicesStore.engineDatabaseInterface)) + val lookupActor = TestFSMRef( + new WorkflowDockerLookupActor(workflowId, + dockerHashingActor.ref, + isRestart = false, + EngineServicesStore.engineDatabaseInterface + ) + ) lookupActor.tell(LatestRequest, dockerSendingActor.ref) @@ -142,7 +154,8 @@ class WorkflowDockerLookupActorSpec // WorkflowDockerLookupActor actually sends DockerHashActorTimeout to itself lookupActor.tell(timeout, lookupActor) - val failedRequest: WorkflowDockerLookupFailure = dockerSendingActor.receiveOne(2 seconds).asInstanceOf[WorkflowDockerLookupFailure] + val failedRequest: WorkflowDockerLookupFailure = + dockerSendingActor.receiveOne(2 seconds).asInstanceOf[WorkflowDockerLookupFailure] failedRequest.request shouldBe LatestRequest lookupActor.tell(LatestRequest, dockerSendingActor.ref) @@ -160,7 +173,8 @@ class WorkflowDockerLookupActorSpec } it should "respond appropriately to docker hash lookup failures" in { - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) lookupActor.tell(OlderRequest, dockerSendingActor.ref) @@ -176,7 +190,9 @@ class WorkflowDockerLookupActorSpec val mixedResponses = results collect { case msg: DockerInfoSuccessResponse => msg // Scoop out the request here since we can't match the exception on the whole message. - case msg: WorkflowDockerLookupFailure if msg.reason.getMessage == "Failed to get docker hash for ubuntu:older Lookup failed" => msg.request + case msg: WorkflowDockerLookupFailure + if msg.reason.getMessage == "Failed to get docker hash for ubuntu:older Lookup failed" => + msg.request } Set(LatestSuccessResponse, OlderRequest) should equal(mixedResponses) @@ -190,11 +206,11 @@ class WorkflowDockerLookupActorSpec it should "reuse previously looked up hashes following a restart" in { val db = dbWithQuery { - Future.successful( - Seq(LatestStoreEntry(workflowId), OlderStoreEntry(workflowId))) + Future.successful(Seq(LatestStoreEntry(workflowId), OlderStoreEntry(workflowId))) } - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = true, db)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = true, db)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) lookupActor.tell(OlderRequest, dockerSendingActor.ref) @@ -209,7 +225,8 @@ class WorkflowDockerLookupActorSpec it should "not try to look up hashes if not restarting" in { val db = dbWithWrite(Future.successful(())) - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false, db)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false, db)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) lookupActor.tell(OlderRequest, dockerSendingActor.ref) @@ -231,7 +248,8 @@ class WorkflowDockerLookupActorSpec if (numWrites == 1) Future.failed(new RuntimeException("Fake exception from a test.")) else Future.successful(()) } - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false, db)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = false, db)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) // The WorkflowDockerLookupActor should not have the hash for this tag yet and will need to query the dockerHashingActor. @@ -256,7 +274,8 @@ class WorkflowDockerLookupActorSpec Future.failed(new Exception("Don't worry this is just a dummy failure in a test") with NoStackTrace) } - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = true, db)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = true, db)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) dockerHashingActor.expectNoMessage() @@ -267,14 +286,17 @@ class WorkflowDockerLookupActorSpec it should "emit a terminal failure message if unable to parse hashes read from the database on restart" in { val db = dbWithQuery { numReads = numReads + 1 - Future.successful(Seq( - DockerHashStoreEntry(workflowId.toString, Latest, "md5:AAAAA", None), - // missing the "algorithm:" preceding the hash value so this should fail parsing. - DockerHashStoreEntry(workflowId.toString, Older, "BBBBB", None) - )) + Future.successful( + Seq( + DockerHashStoreEntry(workflowId.toString, Latest, "md5:AAAAA", None), + // missing the "algorithm:" preceding the hash value so this should fail parsing. + DockerHashStoreEntry(workflowId.toString, Older, "BBBBB", None) + ) + ) } - val lookupActor = TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = true, db)) + val lookupActor = + TestActorRef(WorkflowDockerLookupActor.props(workflowId, dockerHashingActor.ref, isRestart = true, db)) lookupActor.tell(LatestRequest, dockerSendingActor.ref) dockerHashingActor.expectNoMessage() @@ -282,25 +304,26 @@ class WorkflowDockerLookupActorSpec numReads should equal(1) } - def dbWithWrite(writeFn: => Future[Unit]): EngineSlickDatabase = { + def dbWithWrite(writeFn: => Future[Unit]): EngineSlickDatabase = databaseInterface(write = _ => writeFn) - } - def dbWithQuery(queryFn: => Future[Seq[DockerHashStoreEntry]]): EngineSlickDatabase = { + def dbWithQuery(queryFn: => Future[Seq[DockerHashStoreEntry]]): EngineSlickDatabase = databaseInterface(query = _ => queryFn) - } def databaseInterface(query: String => Future[Seq[DockerHashStoreEntry]] = abjectFailure, - write: DockerHashStoreEntry => Future[Unit] = abjectFailure): EngineSlickDatabase = { + write: DockerHashStoreEntry => Future[Unit] = abjectFailure + ): EngineSlickDatabase = new EngineSlickDatabase(DatabaseConfig) { - override def queryDockerHashStoreEntries(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[DockerHashStoreEntry]] = query(workflowExecutionUuid) + override def queryDockerHashStoreEntries(workflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Future[Seq[DockerHashStoreEntry]] = query(workflowExecutionUuid) - override def addDockerHashStoreEntry(dockerHashStoreEntry: DockerHashStoreEntry)(implicit ec: ExecutionContext): Future[Unit] = write(dockerHashStoreEntry) + override def addDockerHashStoreEntry(dockerHashStoreEntry: DockerHashStoreEntry)(implicit + ec: ExecutionContext + ): Future[Unit] = write(dockerHashStoreEntry) }.initialized(EngineServicesStore.EngineLiquibaseSettings) - } } - object WorkflowDockerLookupActorSpec { val Latest = "ubuntu:latest" val Older = "ubuntu:older" @@ -313,8 +336,10 @@ object WorkflowDockerLookupActorSpec { val LatestRequest: DockerInfoRequest = DockerInfoRequest(LatestImageId) val OlderRequest: DockerInfoRequest = DockerInfoRequest(OlderImageId) - def LatestStoreEntry(workflowId: WorkflowId): DockerHashStoreEntry = DockerHashStoreEntry(workflowId.toString, Latest, "md5:AAAAAAAA", None) - def OlderStoreEntry(workflowId: WorkflowId): DockerHashStoreEntry = DockerHashStoreEntry(workflowId.toString, Older, "md5:BBBBBBBB", None) + def LatestStoreEntry(workflowId: WorkflowId): DockerHashStoreEntry = + DockerHashStoreEntry(workflowId.toString, Latest, "md5:AAAAAAAA", None) + def OlderStoreEntry(workflowId: WorkflowId): DockerHashStoreEntry = + DockerHashStoreEntry(workflowId.toString, Older, "md5:BBBBBBBB", None) val LatestSuccessResponse: DockerInfoSuccessResponse = DockerInfoSuccessResponse(DockerInformation(DockerHashResult("md5", "AAAAAAAA"), None), LatestRequest) @@ -325,12 +350,15 @@ object WorkflowDockerLookupActorSpec { def abjectFailure[A, B]: A => Future[B] = _ => Future.failed(new RuntimeException("Should not be called!")) - class TestWorkflowDockerLookupActor(workflowId: WorkflowId, dockerHashingActor: ActorRef, startState: StartableState, backoff: Backoff) - extends WorkflowDockerLookupActor( - workflowId, - dockerHashingActor, - startState.restarted, - EngineServicesStore.engineDatabaseInterface) { + class TestWorkflowDockerLookupActor(workflowId: WorkflowId, + dockerHashingActor: ActorRef, + startState: StartableState, + backoff: Backoff + ) extends WorkflowDockerLookupActor(workflowId, + dockerHashingActor, + startState.restarted, + EngineServicesStore.engineDatabaseInterface + ) { override protected def initialBackoff(): Backoff = backoff } } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCachingConfigSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCachingConfigSpec.scala index 095721569e7..b5a46729b24 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCachingConfigSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCachingConfigSpec.scala @@ -8,26 +8,28 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.util.{Failure, Success, Try} -class ValidatingCachingConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class ValidatingCachingConfigSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { - it should "run config tests" in { - val cases = Table[String, Any]( - ("config" , "exceptionMessage" ), - ("enabled = not-a-boolean", "String: 1: enabled has type STRING rather than BOOLEAN" ), - ("enabled = true" , true ), - ("enabled = false" , false ), - ("enabled = 1" , "String: 1: enabled has type NUMBER rather than BOOLEAN" ), - ("" , "String: 1: No configuration setting found for key 'enabled'" ) - ) + it should "run config tests" in { + val cases = Table[String, Any]( + ("config", "exceptionMessage"), + ("enabled = not-a-boolean", "String: 1: enabled has type STRING rather than BOOLEAN"), + ("enabled = true", true), + ("enabled = false", false), + ("enabled = 1", "String: 1: enabled has type NUMBER rather than BOOLEAN"), + ("", "String: 1: No configuration setting found for key 'enabled'") + ) - forEvery(cases) { (config, expected) => - val rootConfig = ConfigFactory.parseString(config) - Try(rootConfig.getBoolean("enabled")) match { - case Success(what) => what shouldBe a [java.lang.Boolean] - case Failure(exception) => exception.getMessage should be (expected) - } + forEvery(cases) { (config, expected) => + val rootConfig = ConfigFactory.parseString(config) + Try(rootConfig.getBoolean("enabled")) match { + case Success(what) => what shouldBe a[java.lang.Boolean] + case Failure(exception) => exception.getMessage should be(expected) } } } - - +} diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCallCachingModeSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCallCachingModeSpec.scala index e7dc1aeaa4f..f2a4aafa6c7 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCallCachingModeSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/ValidatingCallCachingModeSpec.scala @@ -12,7 +12,11 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.util.{Success, Try} -class ValidatingCallCachingModeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class ValidatingCallCachingModeSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { def makeOptions(writeOpt: Option[Boolean], readOpt: Option[Boolean]) = { val writeValue = writeOpt map { v => s""""write_to_cache": $v""" } @@ -29,54 +33,54 @@ class ValidatingCallCachingModeSpec extends AnyFlatSpec with CromwellTimeoutSpec val allCombinations = (for { writeOption <- options readOption <- options - } yield (makeOptions(writeOption, readOption))).toSet + } yield makeOptions(writeOption, readOption)).toSet // writeCache is ON when config is ON and write_to_cache is None or true val writeCacheOnCombinations = (for { writeOption <- options if writeOption.isEmpty || writeOption.get readOption <- options - } yield (makeOptions(writeOption, readOption))).toSet + } yield makeOptions(writeOption, readOption)).toSet // readCache is ON when config is ON and read_from_cache is None or true val readCacheOnCombinations = (for { writeOption <- options readOption <- options if readOption.isEmpty || readOption.get - } yield (makeOptions(writeOption, readOption))).toSet + } yield makeOptions(writeOption, readOption)).toSet val writeCacheOffCombinations = allCombinations -- writeCacheOnCombinations val readCacheOffCombinations = allCombinations -- readCacheOnCombinations - validateCallCachingMode( - "write cache on options", - writeCacheOnCombinations, - callCachingEnabled, - invalidBadCaсheResults) { _.writeToCache should be(true) } - validateCallCachingMode( - "read cache on options", - readCacheOnCombinations, - callCachingEnabled, - invalidBadCaсheResults) { _.readFromCache should be(true) } - validateCallCachingMode( - "write cache off options", - writeCacheOffCombinations, - callCachingEnabled, - invalidBadCaсheResults) { _.writeToCache should be(false) } - validateCallCachingMode( - "read cache off options", - readCacheOffCombinations, - callCachingEnabled, - invalidBadCaсheResults) { _.readFromCache should be(false) } + validateCallCachingMode("write cache on options", + writeCacheOnCombinations, + callCachingEnabled, + invalidBadCaсheResults + )(_.writeToCache should be(true)) + validateCallCachingMode("read cache on options", readCacheOnCombinations, callCachingEnabled, invalidBadCaсheResults)( + _.readFromCache should be(true) + ) + validateCallCachingMode("write cache off options", + writeCacheOffCombinations, + callCachingEnabled, + invalidBadCaсheResults + )(_.writeToCache should be(false)) + validateCallCachingMode("read cache off options", + readCacheOffCombinations, + callCachingEnabled, + invalidBadCaсheResults + )(_.readFromCache should be(false)) private def validateCallCachingMode(testName: String, wfOptions: Set[Try[WorkflowOptions]], callCachingEnabled: Boolean, - invalidBadCacheResults: Boolean) - (verificationFunction: CallCachingMode => Assertion): Unit = { + invalidBadCacheResults: Boolean + )(verificationFunction: CallCachingMode => Assertion): Unit = it should s"correctly identify $testName" in { - wfOptions foreach { + wfOptions foreach { case Success(wfOptions) => MaterializeWorkflowDescriptorActor.validateCallCachingMode(wfOptions, - callCachingEnabled, invalidBadCacheResults) match { + callCachingEnabled, + invalidBadCacheResults + ) match { case Valid(activity) => verificationFunction(activity) case Invalid(errors) => val errorsList = errors.toList.mkString(", ") @@ -85,5 +89,4 @@ class ValidatingCallCachingModeSpec extends AnyFlatSpec with CromwellTimeoutSpec case x => fail(s"Unexpected test tuple: $x") } } - } } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActorSpec.scala index 1d117e0a8bc..d1759afef9d 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActorSpec.scala @@ -12,7 +12,10 @@ import cromwell.core.path.Path import cromwell.core.path.PathFactory.PathBuilders import cromwell.core.retry.SimpleExponentialBackoff import cromwell.engine.io.IoAttempts.EnhancedCromwellIoException -import cromwell.engine.workflow.lifecycle.deletion.DeleteWorkflowFilesActor.{StartWorkflowFilesDeletion, WaitingForIoResponses} +import cromwell.engine.workflow.lifecycle.deletion.DeleteWorkflowFilesActor.{ + StartWorkflowFilesDeletion, + WaitingForIoResponses +} import cromwell.filesystems.gcs.batch.{GcsBatchCommandBuilder, GcsBatchDeleteCommand} import cromwell.filesystems.gcs.{GcsPath, GcsPathBuilder, MockGcsPathBuilder} import cromwell.services.metadata.MetadataService.PutMetadataAction @@ -31,10 +34,7 @@ import scala.concurrent.duration._ import scala.util.control.NoStackTrace import scala.util.{Failure, Try} -class DeleteWorkflowFilesActorSpec extends TestKitSuite - with AnyFlatSpecLike - with Matchers - with BeforeAndAfter { +class DeleteWorkflowFilesActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BeforeAndAfter { val mockPathBuilder: GcsPathBuilder = MockGcsPathBuilder.instance val mockPathBuilders = List(mockPathBuilder) @@ -50,7 +50,10 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite var rootWorkflowRoots: Set[Path] = _ var allOutputs: CallOutputs = _ var finalOutputs: CallOutputs = _ - var testDeleteWorkflowFilesActor: TestFSMRef[DeleteWorkflowFilesActor.DeleteWorkflowFilesActorState, DeleteWorkflowFilesActor.DeleteWorkflowFilesActorStateData, MockDeleteWorkflowFilesActor] = _ + var testDeleteWorkflowFilesActor: TestFSMRef[DeleteWorkflowFilesActor.DeleteWorkflowFilesActorState, + DeleteWorkflowFilesActor.DeleteWorkflowFilesActorStateData, + MockDeleteWorkflowFilesActor + ] = _ var gcsFilePath: GcsPath = _ before { @@ -60,44 +63,103 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite rootWorkflowRoots = Set[Path](mockPathBuilder.build(rootWorkflowExecutionDir).get) testProbe = TestProbe(s"test-probe-$rootWorkflowId") - allOutputs = CallOutputs(Map( - GraphNodeOutputPort(WomIdentifier(LocalName("main_output"),FullyQualifiedName("main_workflow.main_output")), WomSingleFileType, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt"), - ExpressionBasedOutputPort(WomIdentifier(LocalName("first_task.first_task_output_2"),FullyQualifiedName("first_sub_workflow.first_task.first_task_output_2")), WomSingleFileType, null, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt"), - GraphNodeOutputPort(WomIdentifier(LocalName("first_output_file"),FullyQualifiedName("first_sub_workflow.first_output_file")), WomSingleFileType, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt"), - ExpressionBasedOutputPort(WomIdentifier(LocalName("first_task.first_task_output_1"),FullyQualifiedName("first_sub_workflow.first_task.first_task_output_1")), WomSingleFileType, null, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt"), - GraphNodeOutputPort(WomIdentifier(LocalName("second_output_file"),FullyQualifiedName("first_sub_workflow.second_output_file")), WomSingleFileType, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt") - )) - finalOutputs = CallOutputs(Map( - GraphNodeOutputPort(WomIdentifier(LocalName("main_output"),FullyQualifiedName("main_workflow.main_output")), WomSingleFileType, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt") - )) - - testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + allOutputs = CallOutputs( + Map( + GraphNodeOutputPort(WomIdentifier(LocalName("main_output"), FullyQualifiedName("main_workflow.main_output")), + WomSingleFileType, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ), + ExpressionBasedOutputPort( + WomIdentifier(LocalName("first_task.first_task_output_2"), + FullyQualifiedName("first_sub_workflow.first_task.first_task_output_2") + ), + WomSingleFileType, + null, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt" + ), + GraphNodeOutputPort(WomIdentifier(LocalName("first_output_file"), + FullyQualifiedName("first_sub_workflow.first_output_file") + ), + WomSingleFileType, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ), + ExpressionBasedOutputPort( + WomIdentifier(LocalName("first_task.first_task_output_1"), + FullyQualifiedName("first_sub_workflow.first_task.first_task_output_1") + ), + WomSingleFileType, + null, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ), + GraphNodeOutputPort(WomIdentifier(LocalName("second_output_file"), + FullyQualifiedName("first_sub_workflow.second_output_file") + ), + WomSingleFileType, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt" + ) + ) + ) + finalOutputs = CallOutputs( + Map( + GraphNodeOutputPort(WomIdentifier(LocalName("main_output"), FullyQualifiedName("main_workflow.main_output")), + WomSingleFileType, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt" + ) + ) + ) + + testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) testProbe watch testDeleteWorkflowFilesActor - gcsFilePath = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt").get + gcsFilePath = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ) + .get } it should "follow the expected golden-path lifecycle" in { testDeleteWorkflowFilesActor ! StartWorkflowFilesDeletion - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) } - ioActor.expectMsgPF(10.seconds) { - case cmd: IoDeleteCommand => cmd.file shouldBe gcsFilePath + ioActor.expectMsgPF(10.seconds) { case cmd: IoDeleteCommand => + cmd.file shouldBe gcsFilePath } eventually { @@ -107,13 +169,12 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite testDeleteWorkflowFilesActor ! IoSuccess(GcsBatchDeleteCommand.forPath(gcsFilePath, swallowIOExceptions = false).get, ()) - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Succeeded) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Succeeded) } testProbe.expectTerminated(testDeleteWorkflowFilesActor, 10.seconds) @@ -123,19 +184,17 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite testDeleteWorkflowFilesActor ! StartWorkflowFilesDeletion - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) } val expectedDeleteCommand = GcsBatchDeleteCommand.forPath(gcsFilePath, swallowIOExceptions = false).get - ioActor.expectMsgPF(10.seconds) { - case `expectedDeleteCommand` => // woohoo! + ioActor.expectMsgPF(10.seconds) { case `expectedDeleteCommand` => // woohoo! } eventually { @@ -153,13 +212,12 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite ioActor.send(testDeleteWorkflowFilesActor, IoSuccess(expectedDeleteCommand, ())) - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Succeeded) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Succeeded) } testProbe.expectTerminated(testDeleteWorkflowFilesActor, 10.seconds) @@ -169,17 +227,16 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite testDeleteWorkflowFilesActor ! StartWorkflowFilesDeletion - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) } - ioActor.expectMsgPF(10.seconds) { - case cmd: IoDeleteCommand => cmd.file shouldBe gcsFilePath + ioActor.expectMsgPF(10.seconds) { case cmd: IoDeleteCommand => + cmd.file shouldBe gcsFilePath } eventually { @@ -189,63 +246,59 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite testDeleteWorkflowFilesActor ! IoFailure( command = GcsBatchDeleteCommand.forPath(gcsFilePath, swallowIOExceptions = false).get, - failure = new Exception(s"Something is fishy!"), + failure = new Exception(s"Something is fishy!") ) - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Failed) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Failed) } testProbe.expectTerminated(testDeleteWorkflowFilesActor, 10.seconds) } - it should "send success when the failure is FileNotFound" in { testDeleteWorkflowFilesActor ! StartWorkflowFilesDeletion - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) } - ioActor.expectMsgPF(10.seconds) { - case cmd: IoDeleteCommand => cmd.file shouldBe gcsFilePath + ioActor.expectMsgPF(10.seconds) { case cmd: IoDeleteCommand => + cmd.file shouldBe gcsFilePath } eventually { testDeleteWorkflowFilesActor.stateName shouldBe WaitingForIoResponses } - val fileNotFoundException = EnhancedCromwellIoException(s"File not found", new FileNotFoundException(gcsFilePath.pathAsString)) + val fileNotFoundException = + EnhancedCromwellIoException(s"File not found", new FileNotFoundException(gcsFilePath.pathAsString)) testDeleteWorkflowFilesActor ! IoFailure( command = GcsBatchDeleteCommand.forPath(gcsFilePath, swallowIOExceptions = false).get, - failure = fileNotFoundException, + failure = fileNotFoundException ) - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Succeeded) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(Succeeded) } testProbe.expectTerminated(testDeleteWorkflowFilesActor, 10.seconds) } - it should "remove any non-file intermediate outputs" in { val expectedIntermediateFiles = Set(gcsFilePath) @@ -258,16 +311,33 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite actualIntermediateFiles shouldBe expectedIntermediateFiles } - it should "delete all file outputs if there are no final outputs" in { finalOutputs = CallOutputs.empty - val gcsFilePath1: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt").get - val gcsFilePath2: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt").get + val gcsFilePath1: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ) + .get + val gcsFilePath2: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt" + ) + .get val expectedIntermediateFiles = Set(gcsFilePath1, gcsFilePath2) - testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) val actualIntermediateFiles = testDeleteWorkflowFilesActor.underlyingActor.gatherIntermediateOutputFiles( allOutputs.outputs.values.toSet, @@ -280,14 +350,15 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite it should "send failure when delete command creation is unsuccessful for a file" in { val partialIoCommandBuilder = new PartialIoCommandBuilder { - override def deleteCommand: PartialFunction[(Path, Boolean), Try[IoDeleteCommand]] = { - case _ => Failure(new Exception("everything's fine, I am an expected delete fail") with NoStackTrace) + override def deleteCommand: PartialFunction[(Path, Boolean), Try[IoDeleteCommand]] = { case _ => + Failure(new Exception("everything's fine, I am an expected delete fail") with NoStackTrace) } } val ioCommandBuilder = new IoCommandBuilder(List(partialIoCommandBuilder)) - testDeleteWorkflowFilesActor = - TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, + testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor( + rootWorkflowId, rootAndSubworkflowIds = emptyWorkflowIdSet, rootWorkflowRoots = rootWorkflowRoots, workflowFinalOutputs = finalOutputs, @@ -295,19 +366,19 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite pathBuilders = mockPathBuilders, serviceRegistryActor = serviceRegistryActor.ref, ioActor = ioActor.ref, - gcsCommandBuilder = ioCommandBuilder, - )) + gcsCommandBuilder = ioCommandBuilder + ) + ) testProbe.watch(testDeleteWorkflowFilesActor) testDeleteWorkflowFilesActor ! StartWorkflowFilesDeletion - serviceRegistryActor.expectMsgPF(10.seconds) { - case m: PutMetadataAction => - val event = m.events.head - m.events.size shouldBe 1 - event.key.workflowId shouldBe rootWorkflowId - event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus - event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) + serviceRegistryActor.expectMsgPF(10.seconds) { case m: PutMetadataAction => + val event = m.events.head + m.events.size shouldBe 1 + event.key.workflowId shouldBe rootWorkflowId + event.key.key shouldBe WorkflowMetadataKeys.FileDeletionStatus + event.value.get.value shouldBe FileDeletionStatus.toDatabaseValue(InProgress) } testProbe.expectTerminated(testDeleteWorkflowFilesActor, 10.seconds) @@ -316,14 +387,40 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite it should "terminate if root workflow has no intermediate outputs to delete" in { - finalOutputs = CallOutputs(Map( - GraphNodeOutputPort(WomIdentifier(LocalName("main_output_1"),FullyQualifiedName("main_workflow.main_output_1")), WomSingleFileType, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt") , - GraphNodeOutputPort(WomIdentifier(LocalName("main_output_2"),FullyQualifiedName("main_workflow.main_output_2")), WomSingleFileType, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt") - )) + finalOutputs = CallOutputs( + Map( + GraphNodeOutputPort(WomIdentifier(LocalName("main_output_1"), + FullyQualifiedName("main_workflow.main_output_1") + ), + WomSingleFileType, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ), + GraphNodeOutputPort(WomIdentifier(LocalName("main_output_2"), + FullyQualifiedName("main_workflow.main_output_2") + ), + WomSingleFileType, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file2.txt" + ) + ) + ) - val testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + val testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) testProbe watch testDeleteWorkflowFilesActor @@ -332,21 +429,56 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite testProbe.expectTerminated(testDeleteWorkflowFilesActor, 10.seconds) } - it should "remove values that are file names in form of string" in { - allOutputs = allOutputs.copy(outputs = allOutputs.outputs ++ Map( - GraphNodeOutputPort(WomIdentifier(LocalName("file_with_file_path_string_output"),FullyQualifiedName("first_sub_workflow.file_with_file_path_string_output")), WomStringType, null) - -> WomString(s"gs://my_bucket/non_existent_file.txt"), - ExpressionBasedOutputPort(WomIdentifier(LocalName("first_task.first_task_file_with_file_path_output"),FullyQualifiedName("first_sub_workflow.first_task.first_task_file_with_file_path_output")), WomSingleFileType, null, null) - -> WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/file_with_file_path.txt") - )) + allOutputs = allOutputs.copy(outputs = + allOutputs.outputs ++ Map( + GraphNodeOutputPort( + WomIdentifier(LocalName("file_with_file_path_string_output"), + FullyQualifiedName("first_sub_workflow.file_with_file_path_string_output") + ), + WomStringType, + null + ) + -> WomString(s"gs://my_bucket/non_existent_file.txt"), + ExpressionBasedOutputPort( + WomIdentifier( + LocalName("first_task.first_task_file_with_file_path_output"), + FullyQualifiedName("first_sub_workflow.first_task.first_task_file_with_file_path_output") + ), + WomSingleFileType, + null, + null + ) + -> WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/file_with_file_path.txt" + ) + ) + ) - val gcsFilePath1: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt").get - val gcsFilePath2: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/file_with_file_path.txt").get + val gcsFilePath1: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ) + .get + val gcsFilePath2: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/file_with_file_path.txt" + ) + .get val expectedIntermediateFiles = Set(gcsFilePath1, gcsFilePath2) - val testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + val testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) val actualIntermediateFiles = testDeleteWorkflowFilesActor.underlyingActor.gatherIntermediateOutputFiles( allOutputs.outputs.values.toSet, @@ -356,25 +488,76 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite actualIntermediateFiles shouldBe expectedIntermediateFiles } - it should "identify and gather glob files" in { - allOutputs = allOutputs.copy(outputs = allOutputs.outputs ++ Map( - GraphNodeOutputPort(WomIdentifier(LocalName("glob_output"),FullyQualifiedName("first_sub_workflow.glob_output")), WomMaybeEmptyArrayType(WomSingleFileType), null) - -> WomArray(Seq(WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file1.txt"), - WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file2.txt"))), - ExpressionBasedOutputPort(WomIdentifier(LocalName("first_task.first_task_glob"),FullyQualifiedName("first_sub_workflow.first_task.first_task_glob")), WomMaybeEmptyArrayType(WomSingleFileType), null, null) - -> WomArray(Seq(WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file1.txt"), - WomSingleFile(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file2.txt"))) - )) + allOutputs = allOutputs.copy(outputs = + allOutputs.outputs ++ Map( + GraphNodeOutputPort(WomIdentifier(LocalName("glob_output"), + FullyQualifiedName("first_sub_workflow.glob_output") + ), + WomMaybeEmptyArrayType(WomSingleFileType), + null + ) + -> WomArray( + Seq( + WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file1.txt" + ), + WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file2.txt" + ) + ) + ), + ExpressionBasedOutputPort( + WomIdentifier(LocalName("first_task.first_task_glob"), + FullyQualifiedName("first_sub_workflow.first_task.first_task_glob") + ), + WomMaybeEmptyArrayType(WomSingleFileType), + null, + null + ) + -> WomArray( + Seq( + WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file1.txt" + ), + WomSingleFile( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file2.txt" + ) + ) + ) + ) + ) - val gcsFilePath1: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt").get - val gcsGlobFilePath1: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file1.txt").get - val gcsGlobFilePath2: Path = mockPathBuilder.build(s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file2.txt").get + val gcsFilePath1: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/intermediate_file1.txt" + ) + .get + val gcsGlobFilePath1: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file1.txt" + ) + .get + val gcsGlobFilePath2: Path = mockPathBuilder + .build( + s"$rootWorkflowExecutionDir/call-first_sub_workflow/firstSubWf.first_sub_workflow/$subworkflowId/call-first_task/glob-random_id/intermediate_file2.txt" + ) + .get val expectedIntermediateFiles = Set(gcsFilePath1, gcsGlobFilePath1, gcsGlobFilePath2) - val testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + val testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) val actualIntermediateFiles = testDeleteWorkflowFilesActor.underlyingActor.gatherIntermediateOutputFiles( allOutputs.outputs.values.toSet, @@ -384,7 +567,6 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite actualIntermediateFiles shouldBe expectedIntermediateFiles } - it should "sanity check in multiple rootWorkflowRoots" in { val expectedIntermediateFiles = Set(gcsFilePath) @@ -393,7 +575,17 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite mockPathBuilder.build(s"gs://my_bucket/main_workflow/yyyy").get, mockPathBuilder.build(rootWorkflowExecutionDir).get ) - val testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + val testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) val actualIntermediateFiles = testDeleteWorkflowFilesActor.underlyingActor.gatherIntermediateOutputFiles( allOutputs.outputs.values.toSet, @@ -403,10 +595,19 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite actualIntermediateFiles shouldBe expectedIntermediateFiles } - it should "not delete outputs if they are outside the root workflow execution directory" in { val rootWorkflowRoots = Set[Path](mockPathBuilder.build(s"gs://my_bucket/main_workflow/xxxx").get) - val testDeleteWorkflowFilesActor = TestFSMRef(new MockDeleteWorkflowFilesActor(rootWorkflowId, emptyWorkflowIdSet, rootWorkflowRoots, finalOutputs, allOutputs, mockPathBuilders, serviceRegistryActor.ref, ioActor.ref)) + val testDeleteWorkflowFilesActor = TestFSMRef( + new MockDeleteWorkflowFilesActor(rootWorkflowId, + emptyWorkflowIdSet, + rootWorkflowRoots, + finalOutputs, + allOutputs, + mockPathBuilders, + serviceRegistryActor.ref, + ioActor.ref + ) + ) val actualIntermediateFiles = testDeleteWorkflowFilesActor.underlyingActor.gatherIntermediateOutputFiles( allOutputs.outputs.values.toSet, @@ -417,9 +618,6 @@ class DeleteWorkflowFilesActorSpec extends TestKitSuite } } - - - class MockDeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, rootAndSubworkflowIds: Set[WorkflowId], rootWorkflowRoots: Set[Path], @@ -428,19 +626,18 @@ class MockDeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, pathBuilders: PathBuilders, serviceRegistryActor: ActorRef, ioActor: ActorRef, - gcsCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder, - ) extends - DeleteWorkflowFilesActor( - rootWorkflowId, - rootAndSubworkflowIds, - rootWorkflowRoots, - workflowFinalOutputs.outputs.values.toSet, - workflowAllOutputs.outputs.values.toSet, - pathBuilders, - serviceRegistryActor, - ioActor, - gcsCommandBuilder, - ) { + gcsCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder +) extends DeleteWorkflowFilesActor( + rootWorkflowId, + rootAndSubworkflowIds, + rootWorkflowRoots, + workflowFinalOutputs.outputs.values.toSet, + workflowAllOutputs.outputs.values.toSet, + pathBuilders, + serviceRegistryActor, + ioActor, + gcsCommandBuilder + ) { // Override the IO actor backoff for the benefit of the backpressure tests: - override def initialBackoff(): Backoff = SimpleExponentialBackoff(100.millis, 1.second, 1.2D) + override def initialBackoff(): Backoff = SimpleExponentialBackoff(100.millis, 1.second, 1.2d) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala index 6feb8338bab..797321d7ba9 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala @@ -34,28 +34,33 @@ object ExecutionStoreBenchmark extends Bench[Double] with DefaultJsonProtocol { val inputJson = Option(SampleWdl.PrepareScatterGatherWdl().rawInputs.toJson.compactPrint) val namespace = WdlNamespaceWithWorkflow.load(SampleWdl.PrepareScatterGatherWdl().workflowSource(), Seq.empty).get - val graph = namespace.toWomExecutable(inputJson, NoIoFunctionSet, strictValidation = true).getOrElse(throw new Exception("Failed to build womExecutable")).graph + val graph = namespace + .toWomExecutable(inputJson, NoIoFunctionSet, strictValidation = true) + .getOrElse(throw new Exception("Failed to build womExecutable")) + .graph val prepareCall: CommandCallNode = graph.calls.find(_.localName == "do_prepare").get.asInstanceOf[CommandCallNode] val scatterCall: CommandCallNode = graph.allNodes.find(_.localName == "do_scatter").get.asInstanceOf[CommandCallNode] val scatter: ScatterNode = graph.scatters.head - private def makeKey(call: CommandCallNode, executionStatus: ExecutionStatus)(index: Int) = { + private def makeKey(call: CommandCallNode, executionStatus: ExecutionStatus)(index: Int) = BackendJobDescriptorKey(call, Option(index), 1) -> executionStatus - } // Generates executionStores using the given above sizes // Each execution store contains X simulated shards of "prepareCall" in status Done and X simulated shards of "scatterCall" in status NotStarted // This provides a good starting point to evaluate the speed of "runnableCalls", as it needs to iterate over all "NotStarted" keys, and for each one // look for their upstreams keys in status "Done" - private def stores(sizes: Gen[Int]): Gen[ActiveExecutionStore] = { + private def stores(sizes: Gen[Int]): Gen[ActiveExecutionStore] = for { size <- sizes doneMap = (0 until size map makeKey(prepareCall, ExecutionStatus.Done)).toMap - collectorKeys = scatter.outputMapping.map(om => ScatterCollectorKey(om, size, ScatterNode.DefaultScatterCollectionFunction) -> ExecutionStatus.NotStarted).toMap + collectorKeys = scatter.outputMapping + .map(om => + ScatterCollectorKey(om, size, ScatterNode.DefaultScatterCollectionFunction) -> ExecutionStatus.NotStarted + ) + .toMap notStartedMap = (0 until size map makeKey(scatterCall, ExecutionStatus.NotStarted)).toMap ++ collectorKeys finalMap = doneMap ++ notStartedMap } yield ActiveExecutionStore(finalMap.toMap, needsUpdate = true) - } performance of "ExecutionStore" in { // Measures how fast the execution store can find runnable calls with lots of "Done" calls and "NotStarted" calls. diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala index fa90e796d95..ebd76a8e5fb 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala @@ -23,7 +23,11 @@ import cromwell.engine.workflow.lifecycle.execution.keys.SubWorkflowKey import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore import cromwell.engine.workflow.workflowstore.{RestartableRunning, StartableState, Submitted} import cromwell.engine.{ContinueWhilePossible, EngineIoFunctions, EngineWorkflowDescriptor} -import cromwell.services.metadata.MetadataService.{MetadataWriteFailure, MetadataWriteSuccess, PutMetadataActionAndRespond} +import cromwell.services.metadata.MetadataService.{ + MetadataWriteFailure, + MetadataWriteSuccess, + PutMetadataActionAndRespond +} import cromwell.subworkflowstore.SubWorkflowStoreActor.{QuerySubWorkflow, SubWorkflowFound, SubWorkflowNotFound} import cromwell.util.WomMocks import org.scalatest.BeforeAndAfterAll @@ -37,8 +41,13 @@ import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.control.NoStackTrace -class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar - with Eventually with BeforeAndAfterAll { +class SubWorkflowExecutionActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with MockSugar + with Eventually + with BeforeAndAfterAll { behavior of "SubWorkflowExecutionActor" @@ -90,35 +99,40 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi parentProbe = TestProbe() } - private def buildSWEA(startState: StartableState = Submitted) = { - new TestFSMRef[SubWorkflowExecutionActorState, SubWorkflowExecutionActorData, SubWorkflowExecutionActor](system, Props( - new SubWorkflowExecutionActor( - subKey, - parentWorkflowDescriptor, - new EngineIoFunctions(List.empty, new AsyncIo(simpleIoActor, DefaultIoCommandBuilder), system.dispatcher), - Map.empty, - ioActorProbe.ref, - serviceRegistryProbe.ref, - jobStoreProbe.ref, - subWorkflowStoreProbe.ref, - callCacheReadActorProbe.ref, - callCacheWriteActorProbe.ref, - dockerHashActorProbe.ref, - jobRestartCheckTokenDispenserProbe.ref, - jobExecutionTokenDispenserProbe.ref, - BackendSingletonCollection(Map.empty), - AllBackendInitializationData(Map.empty), - startState, - rootConfig, - new AtomicInteger(), - fileHashCacheActor = None, - blacklistCache = None - ) { - override def createSubWorkflowPreparationActor(subWorkflowId: WorkflowId): ActorRef = preparationActor.ref - override def createSubWorkflowActor(createSubWorkflowActor: EngineWorkflowDescriptor): ActorRef = - subWorkflowActor.ref - }), parentProbe.ref, s"SubWorkflowExecutionActorSpec-${UUID.randomUUID()}") - } + private def buildSWEA(startState: StartableState = Submitted) = + new TestFSMRef[SubWorkflowExecutionActorState, SubWorkflowExecutionActorData, SubWorkflowExecutionActor]( + system, + Props( + new SubWorkflowExecutionActor( + subKey, + parentWorkflowDescriptor, + new EngineIoFunctions(List.empty, new AsyncIo(simpleIoActor, DefaultIoCommandBuilder), system.dispatcher), + Map.empty, + ioActorProbe.ref, + serviceRegistryProbe.ref, + jobStoreProbe.ref, + subWorkflowStoreProbe.ref, + callCacheReadActorProbe.ref, + callCacheWriteActorProbe.ref, + dockerHashActorProbe.ref, + jobRestartCheckTokenDispenserProbe.ref, + jobExecutionTokenDispenserProbe.ref, + BackendSingletonCollection(Map.empty), + AllBackendInitializationData(Map.empty), + startState, + rootConfig, + new AtomicInteger(), + fileHashCacheActor = None, + blacklistCache = None + ) { + override def createSubWorkflowPreparationActor(subWorkflowId: WorkflowId): ActorRef = preparationActor.ref + override def createSubWorkflowActor(createSubWorkflowActor: EngineWorkflowDescriptor): ActorRef = + subWorkflowActor.ref + } + ), + parentProbe.ref, + s"SubWorkflowExecutionActorSpec-${UUID.randomUUID()}" + ) it should "Check the sub workflow store when restarting" in { val swea = buildSWEA(startState = RestartableRunning) @@ -138,7 +152,16 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi swea.setState(SubWorkflowCheckingStoreState) val subWorkflowUuid = WorkflowId.randomId() - swea ! SubWorkflowFound(SubWorkflowStoreEntry(Option(0), parentWorkflowId.toString, subKey.node.fullyQualifiedName, subKey.index.fromIndex, subKey.attempt, subWorkflowUuid.toString, None)) + swea ! SubWorkflowFound( + SubWorkflowStoreEntry(Option(0), + parentWorkflowId.toString, + subKey.node.fullyQualifiedName, + subKey.index.fromIndex, + subKey.attempt, + subWorkflowUuid.toString, + None + ) + ) parentProbe.expectMsg(RequestValueStore) eventually { @@ -210,8 +233,8 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val preparationFailedMessage: CallPreparationFailed = CallPreparationFailed(subWorkflowKey, throwable) swea ! preparationFailedMessage - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteSuccess(events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteSuccess(events) } parentProbe.expectMsg(SubWorkflowFailedResponse(subKey, Map.empty, throwable)) deathWatch.expectTerminated(swea, awaitTimeout) @@ -228,8 +251,8 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val outputs: CallOutputs = CallOutputs.empty val workflowSuccessfulMessage = WorkflowExecutionSucceededResponse(jobExecutionMap, Set.empty[WorkflowId], outputs) swea ! workflowSuccessfulMessage - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteSuccess(events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteSuccess(events) } parentProbe.expectMsg(SubWorkflowSucceededResponse(subKey, jobExecutionMap, Set.empty[WorkflowId], outputs)) deathWatch.expectTerminated(swea, awaitTimeout) @@ -246,8 +269,8 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val workflowFailedMessage = WorkflowExecutionFailedResponse(jobExecutionMap, expectedException) swea ! workflowFailedMessage - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteSuccess(events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteSuccess(events) } parentProbe.expectMsg(SubWorkflowFailedResponse(subKey, jobExecutionMap, expectedException)) deathWatch.expectTerminated(swea, awaitTimeout) @@ -265,33 +288,31 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val outputs: CallOutputs = CallOutputs.empty val workflowSuccessfulMessage = WorkflowExecutionSucceededResponse(jobExecutionMap, Set.empty[WorkflowId], outputs) swea ! workflowSuccessfulMessage - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteFailure(expectedException, events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteFailure(expectedException, events) } import ManyTimes.intWithTimes 10.times { - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => - events.size should be(1) - events.head.key.key should be("status") - events.head.value.get.value should be("Failed") - swea ! MetadataWriteFailure(expectedException, events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + events.size should be(1) + events.head.key.key should be("status") + events.head.value.get.value should be("Failed") + swea ! MetadataWriteFailure(expectedException, events) } // Check there are no messages going to the parent yet: parentProbe.expectNoMessage(10.millis) } // Now let's say eventually the write does somehow get through. - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteSuccess(events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteSuccess(events) } // The workflow is now considered failed since lots of metadata is probably lost: - parentProbe.expectMsgPF(awaitTimeout) { - case SubWorkflowFailedResponse(`subKey`, `jobExecutionMap`, reason) => - reason.getMessage should be("Sub workflow execution actor unable to write final state to metadata") - reason.getCause should be(expectedException) + parentProbe.expectMsgPF(awaitTimeout) { case SubWorkflowFailedResponse(`subKey`, `jobExecutionMap`, reason) => + reason.getMessage should be("Sub workflow execution actor unable to write final state to metadata") + reason.getCause should be(expectedException) } deathWatch.expectTerminated(swea, awaitTimeout) } @@ -305,8 +326,8 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val jobExecutionMap: JobExecutionMap = Map.empty val workflowAbortedMessage = WorkflowExecutionAbortedResponse(jobExecutionMap) swea ! workflowAbortedMessage - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteSuccess(events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteSuccess(events) } parentProbe.expectMsg(SubWorkflowAbortedResponse(subKey, jobExecutionMap)) deathWatch.expectTerminated(swea, awaitTimeout) @@ -316,7 +337,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val swea = buildSWEA() swea.setState( SubWorkflowRunningState, - SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), Option(subWorkflowActor.ref)), + SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), Option(subWorkflowActor.ref)) ) deathWatch watch swea @@ -325,8 +346,8 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi swea ! EngineLifecycleActorAbortCommand subWorkflowActor.expectMsg(EngineLifecycleActorAbortCommand) subWorkflowActor.reply(WorkflowExecutionAbortedResponse(jobExecutionMap)) - serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { - case PutMetadataActionAndRespond(events, _, _) => swea ! MetadataWriteSuccess(events) + serviceRegistryProbe.fishForSpecificMessage(awaitTimeout) { case PutMetadataActionAndRespond(events, _, _) => + swea ! MetadataWriteSuccess(events) } parentProbe.expectMsg(SubWorkflowAbortedResponse(subKey, jobExecutionMap)) deathWatch.expectTerminated(swea, awaitTimeout) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala index 61877de8a1a..094a41c78a5 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActorSpec.scala @@ -15,7 +15,12 @@ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import spray.json.{JsArray, JsField, JsObject, JsString, JsValue} -class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender with Eventually { +class CallCacheDiffActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with Eventually { behavior of "CallCacheDiffActor" @@ -50,23 +55,37 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc ) val eventsA = List( - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "executionStatus"), MetadataValue("Done")), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:allowResultReuse"), MetadataValue(true)), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in only in A"), MetadataValue("hello from A")), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with same value"), MetadataValue("we are thinking the same thought")), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with different value"), MetadataValue("I'm the hash for A !")) + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "executionStatus"), MetadataValue("Done")), + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:allowResultReuse"), MetadataValue(true)), + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in only in A"), + MetadataValue("hello from A") + ), + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with same value"), + MetadataValue("we are thinking the same thought") + ), + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with different value"), + MetadataValue("I'm the hash for A !") + ) ) - val workflowMetadataA: JsObject = MetadataBuilderActor.workflowMetadataResponse(workflowIdA, eventsA, includeCallsIfEmpty = false, Map.empty) + val workflowMetadataA: JsObject = + MetadataBuilderActor.workflowMetadataResponse(workflowIdA, eventsA, includeCallsIfEmpty = false, Map.empty) val responseForA = SuccessfulMetadataJsonResponse(MetadataService.GetMetadataAction(queryA), workflowMetadataA) val eventsB = List( MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "executionStatus"), MetadataValue("Failed")), MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:allowResultReuse"), MetadataValue(false)), - MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:hashes:hash in only in B"), MetadataValue("hello from B")), - MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:hashes:hash in A and B with same value"), MetadataValue("we are thinking the same thought")), - MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:hashes:hash in A and B with different value"), MetadataValue("I'm the hash for B !")) + MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:hashes:hash in only in B"), + MetadataValue("hello from B") + ), + MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:hashes:hash in A and B with same value"), + MetadataValue("we are thinking the same thought") + ), + MetadataEvent(MetadataKey(workflowIdB, metadataJobKeyB, "callCaching:hashes:hash in A and B with different value"), + MetadataValue("I'm the hash for B !") + ) ) - val workflowMetadataB: JsObject = MetadataBuilderActor.workflowMetadataResponse(workflowIdB, eventsB, includeCallsIfEmpty = false, Map.empty) + val workflowMetadataB: JsObject = + MetadataBuilderActor.workflowMetadataResponse(workflowIdB, eventsB, includeCallsIfEmpty = false, Map.empty) val responseForB = SuccessfulMetadataJsonResponse(MetadataService.GetMetadataAction(queryB), workflowMetadataB) it should "send correct queries to MetadataService when receiving a CallCacheDiffRequest" in { @@ -90,7 +109,12 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc actor ! responseForA eventually { - actor.stateData shouldBe CallCacheDiffWithRequest(queryA, queryB, Some(WorkflowMetadataJson(workflowMetadataA)), None, self) + actor.stateData shouldBe CallCacheDiffWithRequest(queryA, + queryB, + Some(WorkflowMetadataJson(workflowMetadataA)), + None, + self + ) actor.stateName shouldBe WaitingForMetadata } @@ -106,7 +130,12 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc actor ! responseForB eventually { - actor.stateData shouldBe CallCacheDiffWithRequest(queryA, queryB, None, Some(WorkflowMetadataJson(workflowMetadataB)), self) + actor.stateData shouldBe CallCacheDiffWithRequest(queryA, + queryB, + None, + Some(WorkflowMetadataJson(workflowMetadataB)), + self + ) actor.stateName shouldBe WaitingForMetadata } @@ -118,7 +147,9 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc val actor = TestFSMRef(new CallCacheDiffActor(mockServiceRegistryActor.ref)) watch(actor) - actor.setState(WaitingForMetadata, CallCacheDiffWithRequest(queryA, queryB, None, Some(WorkflowMetadataJson(workflowMetadataB)), self)) + actor.setState(WaitingForMetadata, + CallCacheDiffWithRequest(queryA, queryB, None, Some(WorkflowMetadataJson(workflowMetadataB)), self) + ) actor ! responseForA @@ -131,7 +162,9 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc val actor = TestFSMRef(new CallCacheDiffActor(mockServiceRegistryActor.ref)) watch(actor) - actor.setState(WaitingForMetadata, CallCacheDiffWithRequest(queryA, queryB, Some(WorkflowMetadataJson(workflowMetadataA)), None, self)) + actor.setState(WaitingForMetadata, + CallCacheDiffWithRequest(queryA, queryB, Some(WorkflowMetadataJson(workflowMetadataA)), None, self) + ) actor ! responseForB @@ -194,11 +227,11 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc expectMsgPF() { case r: SuccessfulCallCacheDiffResponse => withClue(s""" - |Expected: - |${correctCallCacheDiff.prettyPrint} - | - |Actual: - |${r.toJson.prettyPrint}""".stripMargin) { + |Expected: + |${correctCallCacheDiff.prettyPrint} + | + |Actual: + |${r.toJson.prettyPrint}""".stripMargin) { r.toJson should be(correctCallCacheDiff) } case other => fail(s"Expected SuccessfulCallCacheDiffResponse but got $other") @@ -219,19 +252,30 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc val eventsAAttempt1 = List( MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "executionStatus"), MetadataValue("Failed")), MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:allowResultReuse"), MetadataValue(false)), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in only in A"), MetadataValue("ouch!")), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with same value"), MetadataValue("ouch!")), - MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with different value"), MetadataValue("ouch!")) + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in only in A"), + MetadataValue("ouch!") + ), + MetadataEvent(MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with same value"), + MetadataValue("ouch!") + ), + MetadataEvent( + MetadataKey(workflowIdA, metadataJobKeyA, "callCaching:hashes:hash in A and B with different value"), + MetadataValue("ouch!") + ) ) // And update the old "eventsA" to represent attempt 2: - val eventsAAttempt2 = eventsA.map(event => event.copy(key = event.key.copy(jobKey = event.key.jobKey.map(_.copy(attempt = 2))))) + val eventsAAttempt2 = + eventsA.map(event => event.copy(key = event.key.copy(jobKey = event.key.jobKey.map(_.copy(attempt = 2))))) val modifiedEventsA = eventsAAttempt1 ++ eventsAAttempt2 - val workflowMetadataA: JsObject = MetadataBuilderActor.workflowMetadataResponse(workflowIdA, modifiedEventsA, includeCallsIfEmpty = false, Map.empty) + val workflowMetadataA: JsObject = MetadataBuilderActor.workflowMetadataResponse(workflowIdA, + modifiedEventsA, + includeCallsIfEmpty = false, + Map.empty + ) val responseForA = SuccessfulMetadataJsonResponse(MetadataService.GetMetadataAction(queryA), workflowMetadataA) - actor ! responseForB actor ! responseForA @@ -267,9 +311,8 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc actor ! responseA - expectMsgPF(1 second) { - case FailedCallCacheDiffResponse(e: Throwable) => - e.getMessage shouldBe "Query lookup failed - but it's ok ! this is a test !" + expectMsgPF(1 second) { case FailedCallCacheDiffResponse(e: Throwable) => + e.getMessage shouldBe "Query lookup failed - but it's ok ! this is a test !" } expectTerminated(actor) @@ -278,18 +321,20 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc it should "respond with an appropriate error if calls' hashes are missing" in { testExpectedErrorForModifiedMetadata( metadataFilter = _.key.key.contains("hashes"), - error = s"""Failed to calculate diff for call A and call B: - |Failed to extract relevant metadata for call A (971652a6-139c-4ef3-96b5-aeb611a40dbf / callFqnA:1) (reason 1 of 1): No 'hashes' field found - |Failed to extract relevant metadata for call B (bb85b3ec-e179-4f12-b90f-5191216da598 / callFqnB:-1) (reason 1 of 1): No 'hashes' field found""".stripMargin + error = + s"""Failed to calculate diff for call A and call B: + |Failed to extract relevant metadata for call A (971652a6-139c-4ef3-96b5-aeb611a40dbf / callFqnA:1) (reason 1 of 1): No 'hashes' field found + |Failed to extract relevant metadata for call B (bb85b3ec-e179-4f12-b90f-5191216da598 / callFqnB:-1) (reason 1 of 1): No 'hashes' field found""".stripMargin ) } it should "respond with an appropriate error if both calls are missing" in { testExpectedErrorForModifiedMetadata( metadataFilter = _.key.jobKey.nonEmpty, - error = s"""Failed to calculate diff for call A and call B: - |Failed to extract relevant metadata for call A (971652a6-139c-4ef3-96b5-aeb611a40dbf / callFqnA:1) (reason 1 of 1): No 'calls' field found - |Failed to extract relevant metadata for call B (bb85b3ec-e179-4f12-b90f-5191216da598 / callFqnB:-1) (reason 1 of 1): No 'calls' field found""".stripMargin + error = + s"""Failed to calculate diff for call A and call B: + |Failed to extract relevant metadata for call A (971652a6-139c-4ef3-96b5-aeb611a40dbf / callFqnA:1) (reason 1 of 1): No 'calls' field found + |Failed to extract relevant metadata for call B (bb85b3ec-e179-4f12-b90f-5191216da598 / callFqnB:-1) (reason 1 of 1): No 'calls' field found""".stripMargin ) } @@ -301,7 +346,9 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc def str(s: String): JsString = JsString(s) it should "handle nested JsObjects if field names collide" in { - val objectToParse = obj("hashes" -> obj("subObj" -> obj("field" -> str("fieldValue1"), "subObj" -> obj("field" -> str("fieldValue2"))))) + val objectToParse = obj( + "hashes" -> obj("subObj" -> obj("field" -> str("fieldValue1"), "subObj" -> obj("field" -> str("fieldValue2")))) + ) val res = CallCacheDiffActor.extractHashes(objectToParse).toOption res should be(defined) @@ -324,9 +371,16 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc val actor = TestFSMRef(new CallCacheDiffActor(mockServiceRegistryActor.ref)) watch(actor) - def getModifiedResponse(workflowId: WorkflowId, query: MetadataQuery, events: Seq[MetadataEvent]): SuccessfulMetadataJsonResponse = { + def getModifiedResponse(workflowId: WorkflowId, + query: MetadataQuery, + events: Seq[MetadataEvent] + ): SuccessfulMetadataJsonResponse = { val modifiedEvents = events.filterNot(metadataFilter) // filters out any "call" level metadata - val modifiedWorkflowMetadata = MetadataBuilderActor.workflowMetadataResponse(workflowId, modifiedEvents, includeCallsIfEmpty = false, Map.empty) + val modifiedWorkflowMetadata = MetadataBuilderActor.workflowMetadataResponse(workflowId, + modifiedEvents, + includeCallsIfEmpty = false, + Map.empty + ) SuccessfulMetadataJsonResponse(MetadataService.GetMetadataAction(query), modifiedWorkflowMetadata) } @@ -335,8 +389,8 @@ class CallCacheDiffActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc actor ! getModifiedResponse(workflowIdA, queryA, eventsA) actor ! getModifiedResponse(workflowIdB, queryB, eventsB) - expectMsgPF(1 second) { - case FailedCallCacheDiffResponse(e) => e.getMessage shouldBe error + expectMsgPF(1 second) { case FailedCallCacheDiffResponse(e) => + e.getMessage shouldBe error } expectTerminated(actor) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorDataSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorDataSpec.scala index 4cb279602c2..13b1a2205de 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorDataSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorDataSpec.scala @@ -5,13 +5,24 @@ import cromwell.backend._ import cromwell.backend.standard.callcaching.StandardFileHashingActor.SingleFileHashRequest import cromwell.core.TestKitSuite import cromwell.core.callcaching._ -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{CallCacheHashingJobActorData, CompleteFileHashingResult, NoFileHashesResult, PartialFileHashingResult} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{ + CallCacheHashingJobActorData, + CompleteFileHashingResult, + NoFileHashesResult, + PartialFileHashingResult +} import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -class CallCacheHashingJobActorDataSpec extends TestKitSuite with AnyFlatSpecLike with BackendSpec with Matchers with Eventually with TableDrivenPropertyChecks { +class CallCacheHashingJobActorDataSpec + extends TestKitSuite + with AnyFlatSpecLike + with BackendSpec + with Matchers + with Eventually + with TableDrivenPropertyChecks { behavior of "CallCacheReadingJobActorData" private val fileHash1 = HashResult(HashKey("key"), HashValue("value")) @@ -20,71 +31,107 @@ class CallCacheHashingJobActorDataSpec extends TestKitSuite with AnyFlatSpecLike private val fileHashRequest1 = SingleFileHashRequest(null, fileHash1.hashKey, null, null) private val fileHashRequest2 = SingleFileHashRequest(null, fileHash2.hashKey, null, null) private val fileHashRequest3 = SingleFileHashRequest(null, fileHash3.hashKey, null, null) - + private val testCases = Table( ("dataBefore", "dataAfter", "result"), // No fileHashRequestsRemaining ( CallCacheHashingJobActorData( - List.empty, List.empty, None, 50 + List.empty, + List.empty, + None, + 50 ), CallCacheHashingJobActorData( - List.empty, List(fileHash1), None, 50 + List.empty, + List(fileHash1), + None, + 50 ), Option(NoFileHashesResult) ), // Last fileHashRequestsRemaining ( CallCacheHashingJobActorData( - List(List(fileHashRequest1)), List.empty, None, 50 + List(List(fileHashRequest1)), + List.empty, + None, + 50 ), CallCacheHashingJobActorData( - List.empty, List(fileHash1), None, 50 + List.empty, + List(fileHash1), + None, + 50 ), Option(CompleteFileHashingResult(Set(fileHash1), "6A02F950958AEDA3DBBF83FBB306A030")) ), // Last batch and not last value ( CallCacheHashingJobActorData( - List(List(fileHashRequest1, fileHashRequest2)), List.empty, None, 50 + List(List(fileHashRequest1, fileHashRequest2)), + List.empty, + None, + 50 ), CallCacheHashingJobActorData( - List(List(fileHashRequest2)), List(fileHash1), None, 50 + List(List(fileHashRequest2)), + List(fileHash1), + None, + 50 ), None ), // Not last batch but last value of this batch ( CallCacheHashingJobActorData( - List(List(fileHashRequest1), List(fileHashRequest2)), List.empty, None, 50 + List(List(fileHashRequest1), List(fileHashRequest2)), + List.empty, + None, + 50 ), CallCacheHashingJobActorData( - List(List(fileHashRequest2)), List(fileHash1), None, 50 + List(List(fileHashRequest2)), + List(fileHash1), + None, + 50 ), Option(PartialFileHashingResult(NonEmptyList.of(fileHash1))) ), // Not last batch and not last value of this batch ( CallCacheHashingJobActorData( - List(List(fileHashRequest1, fileHashRequest2), List(fileHashRequest3)), List.empty, None, 50 + List(List(fileHashRequest1, fileHashRequest2), List(fileHashRequest3)), + List.empty, + None, + 50 ), CallCacheHashingJobActorData( - List(List(fileHashRequest2), List(fileHashRequest3)), List(fileHash1), None, 50 + List(List(fileHashRequest2), List(fileHashRequest3)), + List(fileHash1), + None, + 50 ), None ), // Makes sure new hash is added at the front of the list ( CallCacheHashingJobActorData( - List(List(fileHashRequest1, fileHashRequest2), List(fileHashRequest3)), List(fileHash2), None, 50 + List(List(fileHashRequest1, fileHashRequest2), List(fileHashRequest3)), + List(fileHash2), + None, + 50 ), CallCacheHashingJobActorData( - List(List(fileHashRequest2), List(fileHashRequest3)), List(fileHash1, fileHash2), None, 50 + List(List(fileHashRequest2), List(fileHashRequest3)), + List(fileHash1, fileHash2), + None, + 50 ), None ) ) - + it should "process new file hashes" in { forAll(testCases) { case (oldData, newData, result) => oldData.withFileHash(fileHash1) shouldBe (newData -> result) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala index 602d04c145d..b70722c4200 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala @@ -10,7 +10,17 @@ import cromwell.backend._ import cromwell.backend.standard.callcaching.StandardFileHashingActor.{FileHashResponse, SingleFileHashRequest} import cromwell.core.TestKitSuite import cromwell.core.callcaching.{HashingFailedMessage, _} -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{CCHJAFileHashResponse, CallCacheHashingJobActorData, CompleteFileHashingResult, HashingFiles, InitialHashingResult, NextBatchOfFileHashesRequest, NoFileHashesResult, PartialFileHashingResult, WaitingForHashFileRequest} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{ + CallCacheHashingJobActorData, + CCHJAFileHashResponse, + CompleteFileHashingResult, + HashingFiles, + InitialHashingResult, + NextBatchOfFileHashesRequest, + NoFileHashesResult, + PartialFileHashingResult, + WaitingForHashFileRequest +} import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.CacheMiss import cromwell.util.WomMocks import org.scalatest.Assertion @@ -24,12 +34,20 @@ import wom.values.{WomInteger, WomSingleFile, WomString, WomValue} import scala.util.control.NoStackTrace -class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike with BackendSpec with Matchers - with Eventually with TableDrivenPropertyChecks with MockSugar { +class CallCacheHashingJobActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with BackendSpec + with Matchers + with Eventually + with TableDrivenPropertyChecks + with MockSugar { behavior of "CallCacheReadingJobActor" def templateJobDescriptor(inputs: Map[LocallyQualifiedName, WomValue] = Map.empty): BackendJobDescriptor = { - val task = WomMocks.mockTaskDefinition("task").copy(commandTemplateBuilder = Function.const(List(StringCommandPart("Do the stuff... now!!")).validNel)) + val task = WomMocks + .mockTaskDefinition("task") + .copy(commandTemplateBuilder = Function.const(List(StringCommandPart("Do the stuff... now!!")).validNel)) val call = WomMocks.mockTaskCall(WomIdentifier("call"), definition = task) val workflowDescriptor = mock[BackendWorkflowDescriptor] val runtimeAttributes = Map( @@ -38,24 +56,34 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit "continueOnReturnCode" -> WomInteger(0), "docker" -> WomString("ubuntu:latest") ) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, BackendJobDescriptorKey(call, None, 1), runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + BackendJobDescriptorKey(call, None, 1), + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) jobDescriptor } it should "die immediately if created without cache read actor and write to cache turned off" in { val parent = TestProbe() - val testActor = TestFSMRef(new CallCacheHashingJobActor( - templateJobDescriptor(), - None, - None, - Set.empty, - "backedName", - Props.empty, - DockerWithHash("ubuntu@sha256:blablablba"), - CallCachingActivity(readWriteMode = ReadCache), - callCachePathPrefixes = None, - fileHashBatchSize = 100 - ), parent.ref) + val testActor = TestFSMRef( + new CallCacheHashingJobActor( + templateJobDescriptor(), + None, + None, + Set.empty, + "backedName", + Props.empty, + DockerWithHash("ubuntu@sha256:blablablba"), + CallCachingActivity(readWriteMode = ReadCache), + callCachePathPrefixes = None, + fileHashBatchSize = 100 + ), + parent.ref + ) watch(testActor) expectTerminated(testActor) parent.expectMsgClass(classOf[InitialHashingResult]) @@ -80,18 +108,21 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit ) val callCacheRead = TestProbe() val jobDescriptor: BackendJobDescriptor = templateJobDescriptor(inputs) - val actorUnderTest = TestFSMRef(new CallCacheHashingJobActor( - jobDescriptor, - Option(callCacheRead.ref), - None, - runtimeAttributeDefinitions, - "backedName", - Props.empty, - DockerWithHash("ubuntu@sha256:blablablba"), - CallCachingActivity(readWriteMode = ReadAndWriteCache), - callCachePathPrefixes = None, - fileHashBatchSize = 100 - ), parent.ref) + val actorUnderTest = TestFSMRef( + new CallCacheHashingJobActor( + jobDescriptor, + Option(callCacheRead.ref), + None, + runtimeAttributeDefinitions, + "backedName", + Props.empty, + DockerWithHash("ubuntu@sha256:blablablba"), + CallCachingActivity(readWriteMode = ReadAndWriteCache), + callCachePathPrefixes = None, + fileHashBatchSize = 100 + ), + parent.ref + ) val expectedInitialHashes = Set( // md5 of Do the stuff... now @@ -104,7 +135,9 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit HashResult(HashKey("output count"), HashValue("CFCD208495D565EF66E7DFF9F98764DA")), HashResult(HashKey("runtime attribute", "failOnStderr"), HashValue("N/A")), // md5 of 1 - HashResult(HashKey(checkForHitOrMiss = false, "runtime attribute", "cpu"), HashValue("C4CA4238A0B923820DCC509A6F75849B")), + HashResult(HashKey(checkForHitOrMiss = false, "runtime attribute", "cpu"), + HashValue("C4CA4238A0B923820DCC509A6F75849B") + ), // md5 of 0 HashResult(HashKey("runtime attribute", "continueOnReturnCode"), HashValue("CFCD208495D565EF66E7DFF9F98764DA")), // md5 of "hello" (with quotes) @@ -128,25 +161,32 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit testFileHashingActor: ActorRef, parent: ActorRef, writeToCache: Boolean = true, - addFileHashMockResult: Option[(CallCacheHashingJobActorData, Option[CCHJAFileHashResponse])] = None): TestFSMRef[CallCacheHashingJobActor.CallCacheHashingJobActorState, CallCacheHashingJobActorData, CallCacheHashingJobActor] = { - TestFSMRef(new CallCacheHashingJobActor( - templateJobDescriptor(), - callCacheReader, - None, - Set.empty, - "backend", - Props.empty, - DockerWithHash("ubuntu@256:blablabla"), - CallCachingActivity(readWriteMode = if (writeToCache) ReadAndWriteCache else ReadCache), - callCachePathPrefixes = None, - fileHashBatchSize = 100 - ) { - override def makeFileHashingActor(): ActorRef = testFileHashingActor - override def addFileHash(hashResult: HashResult, data: CallCacheHashingJobActorData): (CallCacheHashingJobActorData, Option[CCHJAFileHashResponse]) = { - addFileHashMockResult.getOrElse(super.addFileHash(hashResult, data)) - } - }, parent) - } + addFileHashMockResult: Option[(CallCacheHashingJobActorData, Option[CCHJAFileHashResponse])] = None + ): TestFSMRef[CallCacheHashingJobActor.CallCacheHashingJobActorState, + CallCacheHashingJobActorData, + CallCacheHashingJobActor + ] = + TestFSMRef( + new CallCacheHashingJobActor( + templateJobDescriptor(), + callCacheReader, + None, + Set.empty, + "backend", + Props.empty, + DockerWithHash("ubuntu@256:blablabla"), + CallCachingActivity(readWriteMode = if (writeToCache) ReadAndWriteCache else ReadCache), + callCachePathPrefixes = None, + fileHashBatchSize = 100 + ) { + override def makeFileHashingActor(): ActorRef = testFileHashingActor + override def addFileHash(hashResult: HashResult, + data: CallCacheHashingJobActorData + ): (CallCacheHashingJobActorData, Option[CCHJAFileHashResponse]) = + addFileHashMockResult.getOrElse(super.addFileHash(hashResult, data)) + }, + parent + ) it should "send hash file requests when receiving a NextBatchOfFileHashesRequest" in { val callCacheReadProbe = TestProbe() @@ -198,7 +238,12 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit // still gives a CCReader when instantiating the actor, but not in the data (above) // This ensures the check is done with the data and not the actor attribute, as the data will change if the ccreader dies but the actor attribute // will stay Some(...) - val cchja = makeCCHJA(Option(TestProbe().ref), fileHashingActor.ref, TestProbe().ref, writeToCache = true, Option(newData -> Option(result))) + val cchja = makeCCHJA(Option(TestProbe().ref), + fileHashingActor.ref, + TestProbe().ref, + writeToCache = true, + Option(newData -> Option(result)) + ) watch(cchja) cchja.setState(HashingFiles) @@ -211,19 +256,24 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit List( ("send itself a NextBatchOfFileHashesRequest when a batch is complete and there is no CC Reader", None), - ("send itself a NextBatchOfFileHashesRequest when a batch is complete and there is a CC Reader", Option(TestProbe().ref)) - ) foreach { - case (description, ccReader) => - it should description in selfSendNextBatchRequest(ccReader) + ("send itself a NextBatchOfFileHashesRequest when a batch is complete and there is a CC Reader", + Option(TestProbe().ref) + ) + ) foreach { case (description, ccReader) => + it should description in selfSendNextBatchRequest(ccReader) } it should "send FinalFileHashingResult to parent and CCReader and die" in { val parent = TestProbe() val callCacheReadProbe = TestProbe() - List(CompleteFileHashingResult(Set(mock[HashResult]), "AggregatedFileHash"), NoFileHashesResult) foreach - { result => + List(CompleteFileHashingResult(Set(mock[HashResult]), "AggregatedFileHash"), NoFileHashesResult) foreach { result => val newData = CallCacheHashingJobActorData(List.empty, List.empty, Option(callCacheReadProbe.ref), 50) - val cchja = makeCCHJA(Option(callCacheReadProbe.ref), TestProbe().ref, parent.ref, writeToCache = true, Option(newData -> Option(result))) + val cchja = makeCCHJA(Option(callCacheReadProbe.ref), + TestProbe().ref, + parent.ref, + writeToCache = true, + Option(newData -> Option(result)) + ) parent.expectMsgClass(classOf[InitialHashingResult]) callCacheReadProbe.expectMsgClass(classOf[InitialHashingResult]) @@ -242,8 +292,14 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit it should "wait for next file hash if the batch is not complete yet" in { val callCacheReadProbe = TestProbe() val parent = TestProbe() - val newData: CallCacheHashingJobActorData = CallCacheHashingJobActorData(List.empty, List.empty, Option(callCacheReadProbe.ref), 50) - val cchja = makeCCHJA(Option(callCacheReadProbe.ref), TestProbe().ref, parent.ref, writeToCache = true, Option(newData -> None)) + val newData: CallCacheHashingJobActorData = + CallCacheHashingJobActorData(List.empty, List.empty, Option(callCacheReadProbe.ref), 50) + val cchja = makeCCHJA(Option(callCacheReadProbe.ref), + TestProbe().ref, + parent.ref, + writeToCache = true, + Option(newData -> None) + ) parent.expectMsgClass(classOf[InitialHashingResult]) callCacheReadProbe.expectMsgClass(classOf[InitialHashingResult]) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala index a39597fa347..78106628ca3 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala @@ -2,10 +2,19 @@ package cromwell.engine.workflow.lifecycle.execution.callcaching import akka.testkit.{TestFSMRef, TestProbe} import cromwell.core.TestKitSuite -import cromwell.core.callcaching.{HashKey, HashResult, HashValue, HashingFailedMessage} -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{CompleteFileHashingResult, InitialHashingResult, NextBatchOfFileHashesRequest, NoFileHashesResult} +import cromwell.core.callcaching.{HashingFailedMessage, HashKey, HashResult, HashValue} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{ + CompleteFileHashingResult, + InitialHashingResult, + NextBatchOfFileHashesRequest, + NoFileHashesResult +} import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadActor._ -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.{CCRJAWithData, WaitingForCacheHitOrMiss, _} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.{ + CCRJAWithData, + WaitingForCacheHitOrMiss, + _ +} import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CacheHit, CacheMiss, HashError} import cromwell.services.CallCaching.CallCachingEntryId import org.scalatest.concurrent.Eventually @@ -35,7 +44,9 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None)) - actorUnderTest.setState(WaitingForHashCheck, CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, Set.empty)) + actorUnderTest.setState(WaitingForHashCheck, + CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, Set.empty) + ) callCacheReadProbe.send(actorUnderTest, HasMatchingEntries) callCacheHashingActor.expectMsg(NextBatchOfFileHashesRequest) @@ -49,10 +60,13 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheHashingActor = TestProbe() val parent = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) - actorUnderTest.setState(WaitingForHashCheck, CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, Set.empty)) + actorUnderTest.setState(WaitingForHashCheck, + CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, Set.empty) + ) callCacheReadProbe.send(actorUnderTest, NoMatchingEntries) parent.expectMsg(CacheMiss) @@ -63,19 +77,31 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" val aggregatedFileHash: String = "AggregatedFileHash" - actorUnderTest.setState(WaitingForFileHashes, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) + actorUnderTest.setState(WaitingForFileHashes, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) + ) val fileHashes = Set(HashResult(HashKey("f1"), HashValue("h1")), HashResult(HashKey("f2"), HashValue("h2"))) callCacheHashingActor.send(actorUnderTest, CompleteFileHashingResult(fileHashes, aggregatedFileHash)) - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), Set.empty, prefixesHint = None)) + callCacheReadProbe.expectMsg( + CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), + Set.empty, + prefixesHint = None + ) + ) eventually { actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss - actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Some(aggregatedFileHash), Set.empty) + actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, + aggregatedInitialHash, + Some(aggregatedFileHash), + Set.empty + ) } } @@ -83,13 +109,18 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForFileHashes, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) + actorUnderTest.setState(WaitingForFileHashes, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) + ) callCacheHashingActor.send(actorUnderTest, NoFileHashesResult) - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), Set.empty, prefixesHint = None)) + callCacheReadProbe.expectMsg( + CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), Set.empty, prefixesHint = None) + ) eventually { actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss @@ -102,10 +133,13 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheHashingActor = TestProbe() val parent = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) + ) val id: CallCachingEntryId = CallCachingEntryId(8) callCacheReadProbe.send(actorUnderTest, CacheLookupNextHit(id)) @@ -122,11 +156,14 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheHashingActor = TestProbe() val parent = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) + ) callCacheReadProbe.send(actorUnderTest, CacheLookupNoHit) parent.expectMsg(CacheMiss) @@ -138,14 +175,19 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" val seenCaches: Set[CallCachingEntryId] = Set(CallCachingEntryId(0)) - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, seenCaches)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, seenCaches) + ) actorUnderTest ! NextHit - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), seenCaches, prefixesHint = None)) + callCacheReadProbe.expectMsg( + CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), seenCaches, prefixesHint = None) + ) actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss } @@ -154,15 +196,24 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" val aggregatedFileHash: String = "AggregatedFileHash" val seenCaches: Set[CallCachingEntryId] = Set(CallCachingEntryId(0)) - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Option(aggregatedFileHash), seenCaches)) + actorUnderTest.setState( + WaitingForCacheHitOrMiss, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Option(aggregatedFileHash), seenCaches) + ) actorUnderTest ! NextHit - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, Option(aggregatedFileHash)), seenCaches, prefixesHint = None)) + callCacheReadProbe.expectMsg( + CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, Option(aggregatedFileHash)), + seenCaches, + prefixesHint = None + ) + ) actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss } @@ -172,11 +223,14 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheHashingActor = TestProbe() val parent = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) + ) callCacheHashingActor.send(actorUnderTest, HashingFailedMessage("file", new Exception("Hashing failed"))) parent.expectMsg(CacheMiss) @@ -189,11 +243,14 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheHashingActor = TestProbe() val parent = TestProbe() - val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) + val actorUnderTest = + TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, + CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) + ) val reason: Exception = new Exception("Lookup failed") callCacheHashingActor.send(actorUnderTest, CacheResultLookupFailure(reason)) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala index 279ee80681c..d3dcf58e3d3 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala @@ -18,7 +18,11 @@ import org.scalatest.time.{Millis, Seconds, Span} import scala.concurrent.ExecutionContext class CallCachingSlickDatabaseSpec - extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with BeforeAndAfterAll + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalaFutures + with BeforeAndAfterAll with TableDrivenPropertyChecks { implicit val ec: ExecutionContext = ExecutionContext.global @@ -30,7 +34,7 @@ class CallCachingSlickDatabaseSpec ("description", "prefixOption"), ("without prefixes", None), ("with some prefixes", Option(List("prefix1", "prefix2", "prefix3", "prefix4"))), - ("with thousands of prefixes", Option((1 to 10000).map("prefix" + _).toList)), + ("with thousands of prefixes", Option((1 to 10000).map("prefix" + _).toList)) ) DatabaseSystem.All foreach { databaseSystem => @@ -38,14 +42,14 @@ class CallCachingSlickDatabaseSpec val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val dataAccess = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) + lazy val dataAccess = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } forAll(allowResultReuseTests) { (description, prefixOption) => - val idA = WorkflowId.randomId().toString val callA = "AwesomeWorkflow.GoodJob" val callCachingEntryA = CallCachingEntry( @@ -85,14 +89,15 @@ class CallCachingSlickDatabaseSpec it should s"honor allowResultReuse $description" taggedAs DbmsTest in { (for { _ <- dataAccess.addCallCaching(Seq( - CallCachingJoin( - callCachingEntryA, - callCachingHashEntriesA, - aggregation, - callCachingSimpletonsA, callCachingDetritusesA - ) - ), - 100 + CallCachingJoin( + callCachingEntryA, + callCachingHashEntriesA, + aggregation, + callCachingSimpletonsA, + callCachingDetritusesA + ) + ), + 100 ) hasBaseAggregation <- dataAccess.hasMatchingCallCachingEntriesForBaseAggregation( "BASE_AGGREGATION", @@ -115,20 +120,16 @@ class CallCachingSlickDatabaseSpec _ = join shouldBe defined getJoin = join.get // We can't compare directly because the ones out from the DB have IDs filled in, so just compare the relevant values - _ = getJoin - .callCachingHashEntries + _ = getJoin.callCachingHashEntries .map(e => (e.hashKey, e.hashValue)) should contain theSameElementsAs callCachingHashEntriesA.map(e => (e.hashKey, e.hashValue)) - _ = getJoin - .callCachingSimpletonEntries + _ = getJoin.callCachingSimpletonEntries .map(e => (e.simpletonKey, e.simpletonValue.map(_.toRawString))) should contain theSameElementsAs callCachingSimpletonsA.map(e => (e.simpletonKey, e.simpletonValue.map(_.toRawString))) - _ = getJoin - .callCachingAggregationEntry + _ = getJoin.callCachingAggregationEntry .map(e => (e.baseAggregation, e.inputFilesAggregation)) shouldBe aggregation.map(e => (e.baseAggregation, e.inputFilesAggregation)) - _ = getJoin - .callCachingDetritusEntries + _ = getJoin.callCachingDetritusEntries .map(e => (e.detritusKey, e.detritusValue.map(_.toRawString))) should contain theSameElementsAs callCachingDetritusesA.map(e => (e.detritusKey, e.detritusValue.map(_.toRawString))) } yield ()).futureValue @@ -141,7 +142,7 @@ class CallCachingSlickDatabaseSpec } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala index 1d78da0e37b..5e3bff33671 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala @@ -6,7 +6,11 @@ import cats.syntax.validated._ import cromwell.backend._ import cromwell.core._ import cromwell.core.callcaching._ -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{CompleteFileHashingResult, InitialHashingResult, NoFileHashesResult} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashingJobActor.{ + CompleteFileHashingResult, + InitialHashingResult, + NoFileHashesResult +} import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.NextHit import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor._ import cromwell.services.metadata.MetadataService.PutMetadataAction @@ -21,24 +25,42 @@ import wom.graph.WomIdentifier import wom.values.WomValue import common.mock.MockSugar -class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BackendSpec - with MockSugar with TableDrivenPropertyChecks with Eventually { +class EngineJobHashingActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with BackendSpec + with MockSugar + with TableDrivenPropertyChecks + with Eventually { behavior of "EngineJobHashingActor" def templateJobDescriptor(inputs: Map[LocallyQualifiedName, WomValue] = Map.empty): BackendJobDescriptor = { - val task = WomMocks.mockTaskDefinition("hello").copy( - commandTemplateBuilder = Function.const(List(StringCommandPart("Do the stuff... now!!")).validNel) - ) + val task = WomMocks + .mockTaskDefinition("hello") + .copy( + commandTemplateBuilder = Function.const(List(StringCommandPart("Do the stuff... now!!")).validNel) + ) val call = WomMocks.mockTaskCall(WomIdentifier("hello", "workflow.hello")).copy(callable = task) val workflowDescriptor = mock[BackendWorkflowDescriptor] workflowDescriptor.id returns WorkflowId.randomId() - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, BackendJobDescriptorKey(call, None, 1), Map.empty, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + BackendJobDescriptorKey(call, None, 1), + Map.empty, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) jobDescriptor } val serviceRegistryActorProbe: TestProbe = TestProbe() - def makeEJHA(receiver: ActorRef, activity: CallCachingActivity, ccReaderProps: Props = Props.empty): TestActorRef[EngineJobHashingActor] = { + def makeEJHA(receiver: ActorRef, + activity: CallCachingActivity, + ccReaderProps: Props = Props.empty + ): TestActorRef[EngineJobHashingActor] = TestActorRef[EngineJobHashingActor]( EngineJobHashingActorTest.props( receiver, @@ -54,7 +76,6 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M fileHashBatchSize = 100 ) ) - } it should "record initial hashes" in { val receiver = TestProbe() @@ -106,7 +127,9 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M actorUnderTest ! initialResult actorUnderTest ! fileResult - receiver.expectMsg(CallCacheHashes(initialHashes, initialAggregatedHash, Option(FileHashes(fileHashes, fileAggregatedHash)))) + receiver.expectMsg( + CallCacheHashes(initialHashes, initialAggregatedHash, Option(FileHashes(fileHashes, fileAggregatedHash))) + ) } it should "forward CacheMiss to receiver" in { @@ -133,8 +156,8 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M val activity = CallCachingActivity(ReadAndWriteCache) val monitorProbe = TestProbe() val ccReadActorProps = Props(new Actor { - override def receive: Receive = { - case NextHit => monitorProbe.ref forward NextHit + override def receive: Receive = { case NextHit => + monitorProbe.ref forward NextHit } }) @@ -188,18 +211,22 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M backendName: String, activity: CallCachingActivity, callCachingEligible: CallCachingEligible, - fileHashBatchSize: Int): Props = Props(new EngineJobHashingActorTest( - receiver = receiver, - serviceRegistryActor = serviceRegistryActor, - jobDescriptor = jobDescriptor, - initializationData = initializationData, - fileHashingActorProps = fileHashingActorProps, - callCacheReadingJobActorProps = callCacheReadingJobActorProps, - runtimeAttributeDefinitions = runtimeAttributeDefinitions, - backendName = backendName, - activity = activity, - callCachingEligible = callCachingEligible, - fileHashBatchSize = fileHashBatchSize)) + fileHashBatchSize: Int + ): Props = Props( + new EngineJobHashingActorTest( + receiver = receiver, + serviceRegistryActor = serviceRegistryActor, + jobDescriptor = jobDescriptor, + initializationData = initializationData, + fileHashingActorProps = fileHashingActorProps, + callCacheReadingJobActorProps = callCacheReadingJobActorProps, + runtimeAttributeDefinitions = runtimeAttributeDefinitions, + backendName = backendName, + activity = activity, + callCachingEligible = callCachingEligible, + fileHashBatchSize = fileHashBatchSize + ) + ) } class EngineJobHashingActorTest(receiver: ActorRef, @@ -212,19 +239,21 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M backendName: String, activity: CallCachingActivity, callCachingEligible: CallCachingEligible, - fileHashBatchSize: Int) extends EngineJobHashingActor( - receiver = receiver, - serviceRegistryActor = serviceRegistryActor, - jobDescriptor = jobDescriptor, - initializationData = initializationData, - fileHashingActorProps = fileHashingActorProps, - callCacheReadingJobActorProps = callCacheReadingJobActorProps, - runtimeAttributeDefinitions = runtimeAttributeDefinitions, - backendNameForCallCachingPurposes = backendName, - activity = activity, - callCachingEligible = callCachingEligible, - callCachePathPrefixes = None, - fileHashBatchSize = fileHashBatchSize) { + fileHashBatchSize: Int + ) extends EngineJobHashingActor( + receiver = receiver, + serviceRegistryActor = serviceRegistryActor, + jobDescriptor = jobDescriptor, + initializationData = initializationData, + fileHashingActorProps = fileHashingActorProps, + callCacheReadingJobActorProps = callCacheReadingJobActorProps, + runtimeAttributeDefinitions = runtimeAttributeDefinitions, + backendNameForCallCachingPurposes = backendName, + activity = activity, + callCachingEligible = callCachingEligible, + callCachePathPrefixes = None, + fileHashBatchSize = fileHashBatchSize + ) { // override preStart to nothing to prevent the creation of the CCHJA. // This way it doesn't interfere with the tests and we can manually inject the messages we want override def preStart(): Unit = () diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala index 7d0d40fd6f7..5b876319a8e 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala @@ -19,7 +19,8 @@ class CallPreparationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "disallow empty Strings being input as Files" in { val callKey = mock[CallKey] - val inputExpressionPointer: InputDefinitionPointer = Coproduct[InputDefinitionPointer](WomString("").asWomExpression: WomExpression) + val inputExpressionPointer: InputDefinitionPointer = + Coproduct[InputDefinitionPointer](WomString("").asWomExpression: WomExpression) val inputs: InputDefinitionMappings = List( (RequiredInputDefinition("inputVal", WomSingleFileType), inputExpressionPointer) ) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala index 3744ea9e3cd..23e44e32900 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala @@ -4,10 +4,14 @@ import akka.testkit.{ImplicitSender, TestActorRef} import cats.syntax.validated._ import cromwell.core.TestKitSuite import cromwell.core.callcaching.{DockerWithHash, FloatingDockerTagWithoutHash} -import cromwell.docker.DockerInfoActor.{DockerInfoSuccessResponse, DockerInformation, DockerSize} +import cromwell.docker.DockerInfoActor.{DockerInformation, DockerInfoSuccessResponse, DockerSize} import cromwell.docker.{DockerHashResult, DockerImageIdentifier, DockerImageIdentifierWithoutHash, DockerInfoRequest} import cromwell.engine.workflow.WorkflowDockerLookupActor.WorkflowDockerLookupFailure -import cromwell.engine.workflow.lifecycle.execution.job.preparation.CallPreparation.{BackendJobPreparationSucceeded, CallPreparationFailed, Start} +import cromwell.engine.workflow.lifecycle.execution.job.preparation.CallPreparation.{ + BackendJobPreparationSucceeded, + CallPreparationFailed, + Start +} import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore import cromwell.services.keyvalue.KeyValueServiceActor.{KvGet, KvKeyLookupFailed, KvPair} import org.scalatest.BeforeAndAfter @@ -24,7 +28,12 @@ import scala.language.postfixOps import scala.util.control.NoStackTrace class JobPreparationActorSpec - extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender with BeforeAndAfter with MockSugar { + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with BeforeAndAfter + with MockSugar { behavior of "JobPreparationActor" @@ -41,8 +50,8 @@ class JobPreparationActorSpec val error = "Failed to prepare inputs/attributes - part of test flow" val actor = TestActorRef(helper.buildTestJobPreparationActor(null, null, null, error.invalidNel, List.empty), self) actor ! Start(ValueStore.empty) - expectMsgPF(1.second) { - case CallPreparationFailed(_, ex) => ex.getMessage shouldBe "Call input and runtime attributes evaluation failed for JobPreparationSpec_call:\nFailed to prepare inputs/attributes - part of test flow" + expectMsgPF(1.second) { case CallPreparationFailed(_, ex) => + ex.getMessage shouldBe "Call input and runtime attributes evaluation failed for JobPreparationSpec_call:\nFailed to prepare inputs/attributes - part of test flow" } helper.workflowDockerLookupActor.expectNoMessage(100 millis) } @@ -50,11 +59,11 @@ class JobPreparationActorSpec it should "prepare successfully a job without docker attribute" in { val attributes = Map.empty[LocallyQualifiedName, WomValue] val inputsAndAttributes = (inputs, attributes).validNel - val actor = TestActorRef(helper.buildTestJobPreparationActor(null, null, null, inputsAndAttributes, List.empty), self) + val actor = + TestActorRef(helper.buildTestJobPreparationActor(null, null, null, inputsAndAttributes, List.empty), self) actor ! Start(ValueStore.empty) - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.maybeCallCachingEligible.dockerHash shouldBe None + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.maybeCallCachingEligible.dockerHash shouldBe None } helper.workflowDockerLookupActor.expectNoMessage(1 second) } @@ -65,7 +74,8 @@ class JobPreparationActorSpec "docker" -> WomString(dockerValue) ) val inputsAndAttributes = (inputs, attributes).validNel - val actor = TestActorRef(helper.buildTestJobPreparationActor(null, null, null, inputsAndAttributes, List.empty), self) + val actor = + TestActorRef(helper.buildTestJobPreparationActor(null, null, null, inputsAndAttributes, List.empty), self) actor ! Start(ValueStore.empty) helper.workflowDockerLookupActor.expectMsgClass(classOf[DockerInfoRequest]) actor ! DockerInfoSuccessResponse( @@ -75,17 +85,18 @@ class JobPreparationActorSpec ), null ) - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue - success.jobDescriptor.maybeCallCachingEligible shouldBe DockerWithHash("ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950") - success.jobDescriptor.dockerSize shouldBe Option(DockerSize(100)) + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue + success.jobDescriptor.maybeCallCachingEligible shouldBe DockerWithHash( + "ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950" + ) + success.jobDescriptor.dockerSize shouldBe Option(DockerSize(100)) } } it should "lookup any requested key/value prefetches after (not) performing a docker hash lookup" in { val dockerValue = "ubuntu:latest" - val attributes = Map ( + val attributes = Map( "docker" -> WomString(dockerValue) ) val hashResult = DockerHashResult("sha256", "71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950") @@ -96,73 +107,88 @@ class JobPreparationActorSpec val prefetchedVal2 = KvKeyLookupFailed(KvGet(helper.scopedKeyMaker(prefetchedKey2))) val prefetchedValues = Map(prefetchedKey1 -> prefetchedVal1, prefetchedKey2 -> prefetchedVal2) var keysToPrefetch = List(prefetchedKey1, prefetchedKey2) - val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List(prefetchedKey1, prefetchedKey2)), self) + val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, + 1 minutes, + List.empty, + inputsAndAttributes, + List(prefetchedKey1, prefetchedKey2) + ), + self + ) actor ! Start(ValueStore.empty) val req = helper.workflowDockerLookupActor.expectMsgClass(classOf[DockerInfoRequest]) helper.workflowDockerLookupActor.reply(DockerInfoSuccessResponse(DockerInformation(hashResult, None), req)) - def respondFromKv(): Unit = { + def respondFromKv(): Unit = helper.serviceRegistryProbe.expectMsgPF(max = 100 milliseconds) { case KvGet(k) if keysToPrefetch.contains(k.key) => actor.tell(msg = prefetchedValues(k.key), sender = helper.serviceRegistryProbe.ref) keysToPrefetch = keysToPrefetch diff List(k.key) } - } respondFromKv() helper.workflowDockerLookupActor.expectNoMessage(max = 100 milliseconds) respondFromKv() - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.prefetchedKvStoreEntries should be(Map(prefetchedKey1 -> prefetchedVal1, prefetchedKey2 -> prefetchedVal2)) + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.prefetchedKvStoreEntries should be( + Map(prefetchedKey1 -> prefetchedVal1, prefetchedKey2 -> prefetchedVal2) + ) } } it should "leave the docker attribute as is and provide a DockerWithHash value" in { val dockerValue = "ubuntu:latest" - val attributes = Map ( + val attributes = Map( "docker" -> WomString(dockerValue) ) val hashResult = DockerHashResult("sha256", "71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950") val inputsAndAttributes = (inputs, attributes).validNel val finalValue = "ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950" - val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List.empty), self) + val actor = TestActorRef( + helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List.empty), + self + ) actor ! Start(ValueStore.empty) helper.workflowDockerLookupActor.expectMsgClass(classOf[DockerInfoRequest]) helper.workflowDockerLookupActor.reply( DockerInfoSuccessResponse(DockerInformation(hashResult, None), mock[DockerInfoRequest]) ) - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue - success.jobDescriptor.maybeCallCachingEligible shouldBe DockerWithHash(finalValue) + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue + success.jobDescriptor.maybeCallCachingEligible shouldBe DockerWithHash(finalValue) } } it should "not provide a DockerWithHash value if it can't get the docker hash" in { val dockerValue = "ubuntu:latest" - val request = DockerInfoRequest(DockerImageIdentifier.fromString(dockerValue).get.asInstanceOf[DockerImageIdentifierWithoutHash]) - val attributes = Map ( + val request = DockerInfoRequest( + DockerImageIdentifier.fromString(dockerValue).get.asInstanceOf[DockerImageIdentifierWithoutHash] + ) + val attributes = Map( "docker" -> WomString(dockerValue) ) val inputsAndAttributes = (inputs, attributes).validNel - val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List.empty), self) + val actor = TestActorRef( + helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List.empty), + self + ) actor ! Start(ValueStore.empty) helper.workflowDockerLookupActor.expectMsgClass(classOf[DockerInfoRequest]) - helper.workflowDockerLookupActor.reply(WorkflowDockerLookupFailure( - new Exception("Failed to get docker hash - part of test flow") with NoStackTrace, - request - )) - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue - success.jobDescriptor.maybeCallCachingEligible shouldBe FloatingDockerTagWithoutHash("ubuntu:latest") + helper.workflowDockerLookupActor.reply( + WorkflowDockerLookupFailure( + new Exception("Failed to get docker hash - part of test flow") with NoStackTrace, + request + ) + ) + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue + success.jobDescriptor.maybeCallCachingEligible shouldBe FloatingDockerTagWithoutHash("ubuntu:latest") } } it should "lookup MemoryMultiplier key/value if available and accordingly update runtime attributes" in { - val attributes = Map ( + val attributes = Map( "memory" -> WomString("1.1 GB") ) val inputsAndAttributes = (inputs, attributes).validNel @@ -170,7 +196,10 @@ class JobPreparationActorSpec val prefetchedVal = KvPair(helper.scopedKeyMaker(prefetchedKey), "1.1") val prefetchedValues = Map(prefetchedKey -> prefetchedVal) var keysToPrefetch = List(prefetchedKey) - val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List(prefetchedKey)), self) + val actor = TestActorRef( + helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List(prefetchedKey)), + self + ) actor ! Start(ValueStore.empty) helper.serviceRegistryProbe.expectMsgPF(max = 100 milliseconds) { @@ -179,10 +208,11 @@ class JobPreparationActorSpec keysToPrefetch = keysToPrefetch diff List(k.key) } - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.prefetchedKvStoreEntries should be(Map(prefetchedKey -> prefetchedVal)) - success.jobDescriptor.runtimeAttributes(RuntimeAttributesKeys.MemoryKey) shouldBe WomString("1.2100000000000002 GB") + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.prefetchedKvStoreEntries should be(Map(prefetchedKey -> prefetchedVal)) + success.jobDescriptor.runtimeAttributes(RuntimeAttributesKeys.MemoryKey) shouldBe WomString( + "1.2100000000000002 GB" + ) } } @@ -190,7 +220,7 @@ class JobPreparationActorSpec val prefetchedKey = "MemoryMultiplier" val retryFactor = 1.1 val taskMemory = 1.0 - val attributes = Map ("memory" -> WomString(s"$taskMemory GB")) + val attributes = Map("memory" -> WomString(s"$taskMemory GB")) val inputsAndAttributes = (inputs, attributes).validNel var previousMultiplier = 1.0 @@ -207,7 +237,10 @@ class JobPreparationActorSpec val prefetchedValues = Map(prefetchedKey -> prefetchedVal) var keysToPrefetch = List(prefetchedKey) - val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List(prefetchedKey)), self) + val actor = TestActorRef( + helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List(prefetchedKey)), + self + ) actor ! Start(ValueStore.empty) helper.serviceRegistryProbe.expectMsgPF(max = 100 milliseconds) { @@ -216,10 +249,11 @@ class JobPreparationActorSpec keysToPrefetch = keysToPrefetch diff List(k.key) } - expectMsgPF(5 seconds) { - case success: BackendJobPreparationSucceeded => - success.jobDescriptor.prefetchedKvStoreEntries should be(Map(prefetchedKey -> prefetchedVal)) - success.jobDescriptor.runtimeAttributes(RuntimeAttributesKeys.MemoryKey) shouldBe WomString(s"${taskMemory * nextMultiplier} GB") + expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => + success.jobDescriptor.prefetchedKvStoreEntries should be(Map(prefetchedKey -> prefetchedVal)) + success.jobDescriptor.runtimeAttributes(RuntimeAttributesKeys.MemoryKey) shouldBe WomString( + s"${taskMemory * nextMultiplier} GB" + ) } } } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala index d4e62c262f6..58498d5ca4c 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala @@ -36,48 +36,56 @@ class JobPreparationTestHelper(implicit val system: ActorSystem) extends MockSug val ioActor: TestProbe = TestProbe() val workflowDockerLookupActor: TestProbe = TestProbe() - val scopedKeyMaker: ScopedKeyMaker = key => ScopedKey(workflowId, KvJobKey("correct.horse.battery.staple", None, 1), key) + val scopedKeyMaker: ScopedKeyMaker = key => + ScopedKey(workflowId, KvJobKey("correct.horse.battery.staple", None, 1), key) - def buildTestJobPreparationActor(backpressureTimeout: FiniteDuration, - noResponseTimeout: FiniteDuration, - dockerHashCredentials: List[Any], - inputsAndAttributes: ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])], - kvStoreKeysForPrefetch: List[String], - jobKey: BackendJobDescriptorKey = mockJobKey): Props = { + def buildTestJobPreparationActor( + backpressureTimeout: FiniteDuration, + noResponseTimeout: FiniteDuration, + dockerHashCredentials: List[Any], + inputsAndAttributes: ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])], + kvStoreKeysForPrefetch: List[String], + jobKey: BackendJobDescriptorKey = mockJobKey + ): Props = + Props( + new TestJobPreparationActor( + kvStoreKeysForPrefetch = kvStoreKeysForPrefetch, + dockerHashCredentialsInput = dockerHashCredentials, + backpressureWaitTimeInput = backpressureTimeout, + dockerNoResponseTimeoutInput = noResponseTimeout, + inputsAndAttributes = inputsAndAttributes, + workflowDescriptor = workflowDescriptor, + jobKey = jobKey, + workflowDockerLookupActor = workflowDockerLookupActor.ref, + serviceRegistryActor = serviceRegistryProbe.ref, + ioActor = ioActor.ref, + scopedKeyMaker + ) + ) +} - Props(new TestJobPreparationActor( - kvStoreKeysForPrefetch = kvStoreKeysForPrefetch, - dockerHashCredentialsInput = dockerHashCredentials, - backpressureWaitTimeInput = backpressureTimeout, - dockerNoResponseTimeoutInput = noResponseTimeout, - inputsAndAttributes = inputsAndAttributes, +private[preparation] class TestJobPreparationActor( + kvStoreKeysForPrefetch: List[String], + dockerHashCredentialsInput: List[Any], + backpressureWaitTimeInput: FiniteDuration, + dockerNoResponseTimeoutInput: FiniteDuration, + inputsAndAttributes: ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])], + workflowDescriptor: EngineWorkflowDescriptor, + jobKey: BackendJobDescriptorKey, + workflowDockerLookupActor: ActorRef, + serviceRegistryActor: ActorRef, + ioActor: ActorRef, + scopedKeyMaker: ScopedKeyMaker +) extends JobPreparationActor( workflowDescriptor = workflowDescriptor, jobKey = jobKey, - workflowDockerLookupActor = workflowDockerLookupActor.ref, - serviceRegistryActor = serviceRegistryProbe.ref, - ioActor = ioActor.ref, - scopedKeyMaker)) - } -} - -private[preparation] class TestJobPreparationActor(kvStoreKeysForPrefetch: List[String], - dockerHashCredentialsInput: List[Any], - backpressureWaitTimeInput: FiniteDuration, - dockerNoResponseTimeoutInput: FiniteDuration, - inputsAndAttributes: ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])], - workflowDescriptor: EngineWorkflowDescriptor, - jobKey: BackendJobDescriptorKey, - workflowDockerLookupActor: ActorRef, - serviceRegistryActor: ActorRef, - ioActor: ActorRef, - scopedKeyMaker: ScopedKeyMaker) extends JobPreparationActor(workflowDescriptor = workflowDescriptor, - jobKey = jobKey, - factory = null, - workflowDockerLookupActor = workflowDockerLookupActor, - initializationData = None, - serviceRegistryActor = serviceRegistryActor, - ioActor = ioActor, - backendSingletonActor = None) { + factory = null, + workflowDockerLookupActor = workflowDockerLookupActor, + initializationData = None, + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor, + backendSingletonActor = None + ) { override private[preparation] lazy val kvStoreKeysToPrefetch = kvStoreKeysForPrefetch @@ -93,7 +101,8 @@ private[preparation] class TestJobPreparationActor(kvStoreKeysForPrefetch: List[ initializationData: Option[BackendInitializationData], serviceRegistryActor: ActorRef, ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]) = Props.empty + backendSingletonActor: Option[ActorRef] + ) = Props.empty } object JobPreparationTestHelper { diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStoreSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStoreSpec.scala index 20b1ae40c0a..a1a741c27ff 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStoreSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStoreSpec.scala @@ -13,20 +13,23 @@ import scala.util.Random class ExecutionStoreSpec extends AnyFlatSpec with Matchers with BeforeAndAfter { - it should "allow 10000 unconnected call keys to be enqueued and started in small batches" in { - def jobKeys: Map[JobKey, ExecutionStatus] = (0.until(10000).toList map { - i => BackendJobDescriptorKey(noConnectionsGraphNode, Option(i), 1) -> NotStarted }) - .toMap + def jobKeys: Map[JobKey, ExecutionStatus] = (0.until(10000).toList map { i => + BackendJobDescriptorKey(noConnectionsGraphNode, Option(i), 1) -> NotStarted + }).toMap var store: ExecutionStore = ActiveExecutionStore(jobKeys, needsUpdate = true) var iterationNumber = 0 while (store.needsUpdate) { // Assert that we're increasing the queue size by 1000 each time - store.store.getOrElse(NotStarted, List.empty).size should be(10000 - iterationNumber * ExecutionStore.MaxJobsToStartPerTick) - store.store.getOrElse(QueuedInCromwell, List.empty).size should be(iterationNumber * ExecutionStore.MaxJobsToStartPerTick) + store.store.getOrElse(NotStarted, List.empty).size should be( + 10000 - iterationNumber * ExecutionStore.MaxJobsToStartPerTick + ) + store.store.getOrElse(QueuedInCromwell, List.empty).size should be( + iterationNumber * ExecutionStore.MaxJobsToStartPerTick + ) val update = store.update store = update.updatedStore.updateKeys(update.runnableKeys.map(_ -> QueuedInCromwell).toMap) iterationNumber = iterationNumber + 1 @@ -36,7 +39,7 @@ class ExecutionStoreSpec extends AnyFlatSpec with Matchers with BeforeAndAfter { var previouslyRunning = store.store.getOrElse(Running, List.empty).size previouslyRunning should be(0) - while(store.store.getOrElse(Running, List.empty).size < 10000) { + while (store.store.getOrElse(Running, List.empty).size < 10000) { val toStartRunning = store.store(QueuedInCromwell).take(Random.nextInt(1000)) store = store.updateKeys(toStartRunning.map(j => j -> Running).toMap) val nowRunning = store.store.getOrElse(Running, List.empty).size @@ -60,7 +63,7 @@ object ExecutionStoreSpec { val noConnectionsGraphNode: CommandCallNode = CommandCallNode( identifier = WomIdentifier("mock_task", "mock_wf.mock_task"), callable = null, - inputPorts = Set.empty[GraphNodePort.InputPort], + inputPorts = Set.empty[GraphNodePort.InputPort], inputDefinitionMappings = List.empty, nonInputBasedPrerequisites = Set.empty[GraphNode], outputIdentifierCompoundingFunction = (wi, _) => wi, diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActorSpec.scala index 554ad52abd0..f2b2a615e58 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowLogsActorSpec.scala @@ -14,8 +14,7 @@ import scala.concurrent.duration._ import scala.util.control.NoStackTrace import scala.util.{Failure, Try} -class CopyWorkflowLogsActorSpec - extends TestKitSuite with AnyFlatSpecLike with Matchers { +class CopyWorkflowLogsActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { behavior of "CopyWorkflowLogsActor" @@ -25,9 +24,8 @@ class CopyWorkflowLogsActorSpec private val deathWatch = TestProbe("deathWatch") private val tempDir = DefaultPathBuilder.createTempDirectory("tempDir.") - override protected def beforeAll(): Unit = { + override protected def beforeAll(): Unit = super.beforeAll() - } override protected def afterAll(): Unit = { super.afterAll() @@ -43,7 +41,7 @@ class CopyWorkflowLogsActorSpec ioActor = ioActor.ref, workflowLogConfigurationOption = WorkflowLogger.workflowLogConfiguration, copyCommandBuilder = DefaultIoCommandBuilder, - deleteCommandBuilder = DefaultIoCommandBuilder, + deleteCommandBuilder = DefaultIoCommandBuilder ) val copyWorkflowLogsActor = system.actorOf(props, "testCopyWorkflowLogsActor") @@ -60,7 +58,7 @@ class CopyWorkflowLogsActorSpec copyWorkflowLogsActor ! (( workflowId, - IoFailure(copyCommand, new Exception("everything's fine, I am an expected copy fail") with NoStackTrace), + IoFailure(copyCommand, new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) )) // There should now be a delete command sent to the ioActor @@ -83,8 +81,8 @@ class CopyWorkflowLogsActorSpec val workflowId = WorkflowId.randomId() val destinationPath = DefaultPathBuilder.createTempFile(s"test_file_$workflowId.", ".file", Option(tempDir)) val partialIoCommandBuilder = new PartialIoCommandBuilder { - override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { - case _ => Failure(new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) + override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { case _ => + Failure(new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) } } val ioCommandBuilder = new IoCommandBuilder(List(partialIoCommandBuilder)) @@ -93,7 +91,7 @@ class CopyWorkflowLogsActorSpec serviceRegistryActor = serviceRegistryActor.ref, ioActor = ioActor.ref, workflowLogConfigurationOption = WorkflowLogger.workflowLogConfiguration, - copyCommandBuilder = ioCommandBuilder, + copyCommandBuilder = ioCommandBuilder ) val copyWorkflowLogsActor = system.actorOf(props, "testCopyWorkflowLogsActorFailCopy") @@ -105,7 +103,9 @@ class CopyWorkflowLogsActorSpec val deleteCommand = DefaultIoDeleteCommand(workflowLogPath, swallowIOExceptions = true) ioActor.expectMsg(msgWait, deleteCommand) - copyWorkflowLogsActor ! IoFailure(deleteCommand, new Exception("everything's fine, I am an expected delete fail") with NoStackTrace) + copyWorkflowLogsActor ! IoFailure(deleteCommand, + new Exception("everything's fine, I am an expected delete fail") with NoStackTrace + ) // Send a shutdown after the delete deathWatch.watch(copyWorkflowLogsActor) @@ -119,8 +119,8 @@ class CopyWorkflowLogsActorSpec val workflowId = WorkflowId.randomId() val destinationPath = DefaultPathBuilder.createTempFile(s"test_file_$workflowId.", ".file", Option(tempDir)) val partialIoCommandBuilder = new PartialIoCommandBuilder { - override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { - case _ => Failure(new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) + override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { case _ => + Failure(new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) } } val ioCommandBuilder = new IoCommandBuilder(List(partialIoCommandBuilder)) @@ -128,7 +128,7 @@ class CopyWorkflowLogsActorSpec serviceRegistryActor = serviceRegistryActor.ref, ioActor = ioActor.ref, workflowLogConfigurationOption = WorkflowLogger.workflowLogConfiguration, - copyCommandBuilder = ioCommandBuilder, + copyCommandBuilder = ioCommandBuilder ) val copyWorkflowLogsActor = system.actorOf(props, "testCopyWorkflowLogsActorFailCopyShutdown") @@ -146,7 +146,10 @@ class CopyWorkflowLogsActorSpec // Test that the actor is still alive and receiving messages even after a shutdown was requested EventFilter.error(pattern = "Failed to delete workflow logs", occurrences = 1).intercept { - copyWorkflowLogsActor ! IoFailure(deleteCommand, new Exception("everything's fine, I am an expected delete fail") with NoStackTrace) + copyWorkflowLogsActor ! IoFailure(deleteCommand, + new Exception("everything's fine, I am an expected delete fail") + with NoStackTrace + ) } // Then the actor should shutdown @@ -157,12 +160,12 @@ class CopyWorkflowLogsActorSpec val workflowId = WorkflowId.randomId() val destinationPath = DefaultPathBuilder.createTempFile(s"test_file_$workflowId.", ".file", Option(tempDir)) val partialIoCommandBuilder = new PartialIoCommandBuilder { - override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { - case _ => Failure(new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) + override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { case _ => + Failure(new Exception("everything's fine, I am an expected copy fail") with NoStackTrace) } - override def deleteCommand: PartialFunction[(Path, Boolean), Try[IoDeleteCommand]] = { - case _ => Failure(new Exception("everything's fine, I am an expected delete fail") with NoStackTrace) + override def deleteCommand: PartialFunction[(Path, Boolean), Try[IoDeleteCommand]] = { case _ => + Failure(new Exception("everything's fine, I am an expected delete fail") with NoStackTrace) } } val ioCommandBuilder = new IoCommandBuilder(List(partialIoCommandBuilder)) @@ -172,7 +175,7 @@ class CopyWorkflowLogsActorSpec ioActor = ioActor.ref, workflowLogConfigurationOption = WorkflowLogger.workflowLogConfiguration, copyCommandBuilder = ioCommandBuilder, - deleteCommandBuilder = ioCommandBuilder, + deleteCommandBuilder = ioCommandBuilder ) val copyWorkflowLogsActor = system.actorOf(props, "testCopyWorkflowLogsActorFailDelete") diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala index 383e75458ff..976a8994c9e 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowCallbackActorSpec.scala @@ -25,22 +25,25 @@ import java.time.Instant import scala.concurrent.Future import scala.concurrent.duration._ -class WorkflowCallbackActorSpec - extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar { +class WorkflowCallbackActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar { behavior of "WorkflowCallbackActor" - private implicit val ec = system.dispatcher + implicit private val ec = system.dispatcher private val msgWait = 10.second.dilated private val awaitAlmostNothing = 1.second private val serviceRegistryActor = TestProbe("testServiceRegistryActor") private val deathWatch = TestProbe("deathWatch") private val mockUri = new URI("http://example.com") - private val basicConfig = WorkflowCallbackConfig.empty.copy(enabled = true).copy(retryBackoff = SimpleExponentialBackoff(100.millis, 200.millis, 1.1)) - private val basicOutputs = CallOutputs(Map( - GraphNodeOutputPort(WomIdentifier("foo", "wf.foo"), WomStringType, null) -> WomString("bar") - )) + private val basicConfig = WorkflowCallbackConfig.empty + .copy(enabled = true) + .copy(retryBackoff = SimpleExponentialBackoff(100.millis, 200.millis, 1.1)) + private val basicOutputs = CallOutputs( + Map( + GraphNodeOutputPort(WomIdentifier("foo", "wf.foo"), WomStringType, null) -> WomString("bar") + ) + ) private val httpSuccess = Future.successful(HttpResponse.apply(StatusCodes.OK)) private val httpFailure = Future.successful(HttpResponse.apply(StatusCodes.GatewayTimeout)) @@ -85,7 +88,11 @@ class WorkflowCallbackActorSpec // Do the thing val cmd = PerformCallbackCommand( - workflowId = workflowId, uri = None, terminalState = WorkflowSucceeded, workflowOutputs = basicOutputs, List.empty + workflowId = workflowId, + uri = None, + terminalState = WorkflowSucceeded, + workflowOutputs = basicOutputs, + List.empty ) workflowCallbackActor ! cmd @@ -97,7 +104,7 @@ class WorkflowCallbackActorSpec uriEvent.key shouldBe expectedUriMetadata.key uriEvent.value shouldBe expectedUriMetadata.value timestampEvent.key shouldBe expectedTimestampMetadata.key - // Not checking timestamp value because it won't match + // Not checking timestamp value because it won't match case _ => } @@ -136,7 +143,11 @@ class WorkflowCallbackActorSpec // Do the thing val cmd = PerformCallbackCommand( - workflowId = workflowId, uri = None, terminalState = WorkflowSucceeded, workflowOutputs = basicOutputs, List.empty + workflowId = workflowId, + uri = None, + terminalState = WorkflowSucceeded, + workflowOutputs = basicOutputs, + List.empty ) workflowCallbackActor ! cmd @@ -183,7 +194,7 @@ class WorkflowCallbackActorSpec serviceRegistryActor.ref, basicConfig.copy( retryBackoff = SimpleExponentialBackoff(500.millis, 1.minute, 1.1), - maxRetries = 5, + maxRetries = 5 ), httpClient = mockHttpClient ) @@ -234,7 +245,11 @@ class WorkflowCallbackActorSpec // Do the thing val cmd = PerformCallbackCommand( - workflowId = workflowId, uri = None, terminalState = WorkflowSucceeded, workflowOutputs = basicOutputs, List.empty + workflowId = workflowId, + uri = None, + terminalState = WorkflowSucceeded, + workflowOutputs = basicOutputs, + List.empty ) workflowCallbackActor ! cmd diff --git a/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala b/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala index be7b0d741a3..5f8f846d7d4 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala @@ -9,9 +9,7 @@ object DeclarationMock { } trait DeclarationMock extends MockSugar { - def mockDeclaration(name: String, - womType: WomType, - expression: WdlExpression): Declaration = { + def mockDeclaration(name: String, womType: WomType, expression: WdlExpression): Declaration = { val declaration = mock[Declaration] declaration.unqualifiedName returns name declaration.expression returns Option(expression) diff --git a/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala b/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala index 7ce565a4b6c..77aaa449c3b 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala @@ -12,15 +12,15 @@ trait TaskMock extends MockSugar { runtimeAttributes: WdlRuntimeAttributes = new WdlRuntimeAttributes(Map.empty), commandTemplateString: String = "!!shazam!!", outputs: Seq[DeclarationMockType] = Seq.empty - ): WdlTask = { + ): WdlTask = { val task = mock[WdlTask] task.declarations returns declarations task.runtimeAttributes returns runtimeAttributes task.commandTemplateString returns commandTemplateString task.name returns name task.unqualifiedName returns name - task.outputs returns (outputs map { - case (outputName, womType, expression) => TaskOutput(outputName, womType, expression, mock[Ast], Option(task)) + task.outputs returns (outputs map { case (outputName, womType, expression) => + TaskOutput(outputName, womType, expression, mock[Ast], Option(task)) }) task } diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala index adaadb0fef7..ca2409b4931 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala @@ -18,8 +18,14 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.util.Random -class JobTokenDispenserActorSpec extends TestKitSuite - with ImplicitSender with AnyFlatSpecLike with Matchers with BeforeAndAfter with BeforeAndAfterAll with Eventually { +class JobTokenDispenserActorSpec + extends TestKitSuite + with ImplicitSender + with AnyFlatSpecLike + with Matchers + with BeforeAndAfter + with BeforeAndAfterAll + with Eventually { val MaxWaitTime: FiniteDuration = 10.seconds implicit val pc: PatienceConfig = PatienceConfig(MaxWaitTime) @@ -30,43 +36,49 @@ class JobTokenDispenserActorSpec extends TestKitSuite val hogGroupB: HogGroup = HogGroup("hogGroupB") private def getActorRefUnderTest(serviceRegistryActorName: String, - jobExecutionTokenDispenserActorName: String, - ): TestActorRef[JobTokenDispenserActor] = { + jobExecutionTokenDispenserActorName: String + ): TestActorRef[JobTokenDispenserActor] = TestActorRef( - factory = - new JobTokenDispenserActor( - serviceRegistryActor = TestProbe(serviceRegistryActorName).ref, - dispensingRate = Rate(10, 100.millis), - logInterval = None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), - name = jobExecutionTokenDispenserActorName, + factory = new JobTokenDispenserActor( + serviceRegistryActor = TestProbe(serviceRegistryActorName).ref, + dispensingRate = Rate(10, 100.millis), + logInterval = None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + name = jobExecutionTokenDispenserActorName ) - } it should "dispense an infinite token correctly" in { val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-dispense-infinite", - jobExecutionTokenDispenserActorName = "dispense-infinite", + jobExecutionTokenDispenserActorName = "dispense-infinite" ) actorRefUnderTest ! JobTokenRequest(hogGroupA, TestInfiniteTokenType) expectMsg(max = MaxWaitTime, JobTokenDispensed) actorRefUnderTest.underlyingActor.tokenAssignments.contains(self) shouldBe true - actorRefUnderTest.underlyingActor.tokenAssignments(self).tokenLease.get().jobTokenType shouldBe TestInfiniteTokenType + actorRefUnderTest.underlyingActor + .tokenAssignments(self) + .tokenLease + .get() + .jobTokenType shouldBe TestInfiniteTokenType } it should "accept return of an infinite token correctly" in { val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-accept-return", - jobExecutionTokenDispenserActorName = "accept-return", + jobExecutionTokenDispenserActorName = "accept-return" ) actorRefUnderTest ! JobTokenRequest(hogGroupA, TestInfiniteTokenType) expectMsg(max = MaxWaitTime, JobTokenDispensed) actorRefUnderTest.underlyingActor.tokenAssignments.contains(self) shouldBe true - actorRefUnderTest.underlyingActor.tokenAssignments(self).tokenLease.get().jobTokenType shouldBe TestInfiniteTokenType + actorRefUnderTest.underlyingActor + .tokenAssignments(self) + .tokenLease + .get() + .jobTokenType shouldBe TestInfiniteTokenType actorRefUnderTest ! JobTokenReturn actorRefUnderTest.underlyingActor.tokenAssignments.contains(self) shouldBe false } @@ -75,10 +87,12 @@ class JobTokenDispenserActorSpec extends TestKitSuite val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-dispense-indefinitely", - jobExecutionTokenDispenserActorName = "dispense-indefinitely", + jobExecutionTokenDispenserActorName = "dispense-indefinitely" ) val senders = (1 to 20).map(index => TestProbe(s"sender-dispense-indefinitely-$index")) - senders.foreach(sender => actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, TestInfiniteTokenType), sender = sender.ref)) + senders.foreach(sender => + actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, TestInfiniteTokenType), sender = sender.ref) + ) senders.foreach(_.expectMsg(max = MaxWaitTime, JobTokenDispensed)) actorRefUnderTest.underlyingActor.tokenAssignments.size shouldBe 20 } @@ -87,19 +101,20 @@ class JobTokenDispenserActorSpec extends TestKitSuite // Override with a slower distribution rate for this one test: val actorRefUnderTest = TestActorRef( - factory = - new JobTokenDispenserActor( - serviceRegistryActor = TestProbe("serviceRegistryActor-dispense-correct-amount").ref, - dispensingRate = Rate(10, 4.seconds), - logInterval = None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), - name = "dispense-correct-amount", + factory = new JobTokenDispenserActor( + serviceRegistryActor = TestProbe("serviceRegistryActor-dispense-correct-amount").ref, + dispensingRate = Rate(10, 4.seconds), + logInterval = None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + name = "dispense-correct-amount" ) val senders = (1 to 20).map(index => TestProbe(s"sender-dispense-correct-amount-$index")) - senders.foreach(sender => actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, TestInfiniteTokenType), sender = sender.ref)) + senders.foreach(sender => + actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, TestInfiniteTokenType), sender = sender.ref) + ) // The first 10 should get their token senders.take(10).foreach(_.expectMsg(max = MaxWaitTime, JobTokenDispensed)) // Couldn't figure out a cleaner way to "verify that none of this probes gets a message in the next X seconds" @@ -113,7 +128,7 @@ class JobTokenDispenserActorSpec extends TestKitSuite val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-dispense-limited", - jobExecutionTokenDispenserActorName = "dispense-limited", + jobExecutionTokenDispenserActorName = "dispense-limited" ) actorRefUnderTest ! JobTokenRequest(hogGroupA, LimitedTo5Tokens) expectMsg(max = MaxWaitTime, JobTokenDispensed) @@ -125,7 +140,7 @@ class JobTokenDispenserActorSpec extends TestKitSuite val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-accept-return-limited", - jobExecutionTokenDispenserActorName = "accept-return-limited", + jobExecutionTokenDispenserActorName = "accept-return-limited" ) actorRefUnderTest ! JobTokenRequest(hogGroupA, LimitedTo5Tokens) expectMsg(max = MaxWaitTime, JobTokenDispensed) @@ -142,23 +157,32 @@ class JobTokenDispenserActorSpec extends TestKitSuite val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-limit-dispensing-limited", - jobExecutionTokenDispenserActorName = "limit-dispensing-limited", + jobExecutionTokenDispenserActorName = "limit-dispensing-limited" ) val senders = (1 to 15).map(index => TestProbe(s"sender-limit-dispensing-limited-$index")) // Ask for 20 tokens - senders.foreach(sender => actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, LimitedTo5Tokens), sender = sender.ref)) + senders.foreach(sender => + actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, LimitedTo5Tokens), sender = sender.ref) + ) // Force token distribution actorRefUnderTest ! TokensAvailable(100) senders.take(5).foreach(_.expectMsg(JobTokenDispensed)) actorRefUnderTest.underlyingActor.tokenAssignments.size shouldBe 5 - actorRefUnderTest.underlyingActor.tokenAssignments.keySet should contain theSameElementsAs senders.map(_.ref).take(5).toSet + actorRefUnderTest.underlyingActor.tokenAssignments.keySet should contain theSameElementsAs senders + .map(_.ref) + .take(5) + .toSet // The last 10 should be queued // At this point [0, 1, 2, 3, 4] are the ones with tokens, and [5, 14] are still queued actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).size shouldBe 10 - actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).queues.flatMap(_._2).toList should contain theSameElementsInOrderAs senders.drop(5).map(asHogGroupAPlaceholder) + actorRefUnderTest.underlyingActor + .tokenQueues(LimitedTo5Tokens) + .queues + .flatMap(_._2) + .toList should contain theSameElementsInOrderAs senders.drop(5).map(asHogGroupAPlaceholder) // Force token distribution actorRefUnderTest ! TokensAvailable(100) @@ -174,7 +198,11 @@ class JobTokenDispenserActorSpec extends TestKitSuite senders.slice(5, 8).foreach(_.expectMsg(JobTokenDispensed)) // At this point [3, 4, 5, 6, 7] are the ones with tokens, and [8, 19] are still queued - actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).queues.flatMap(_._2).toList should contain theSameElementsInOrderAs senders.slice(8, 20).map(asHogGroupAPlaceholder) + actorRefUnderTest.underlyingActor + .tokenQueues(LimitedTo5Tokens) + .queues + .flatMap(_._2) + .toList should contain theSameElementsInOrderAs senders.slice(8, 20).map(asHogGroupAPlaceholder) // Double-check the queue state: when we request a token now, we should still be denied: actorRefUnderTest ! JobTokenRequest(hogGroupA, LimitedTo5Tokens) @@ -182,13 +210,19 @@ class JobTokenDispenserActorSpec extends TestKitSuite actorRefUnderTest ! TokensAvailable(100) expectNoMessage() // We should be enqueued and the last in the queue though - actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).queues.flatMap(_._2).last shouldBe TokenQueuePlaceholder(self, "hogGroupA") + actorRefUnderTest.underlyingActor + .tokenQueues(LimitedTo5Tokens) + .queues + .flatMap(_._2) + .last shouldBe TokenQueuePlaceholder(self, "hogGroupA") // Release all currently owned tokens senders.slice(3, 8).foreach(_.send(actorRefUnderTest, JobTokenReturn)) // Force token distribution actorRefUnderTest ! TokensAvailable(100) - actorRefUnderTest.underlyingActor.tokenAssignments.keySet should contain theSameElementsAs senders.map(_.ref).slice(8, 13) + actorRefUnderTest.underlyingActor.tokenAssignments.keySet should contain theSameElementsAs senders + .map(_.ref) + .slice(8, 13) // Keep accepting and returning tokens immediately senders.slice(8, 13).foreach(_.expectMsg(JobTokenDispensed)) senders.slice(8, 13).foreach(_.reply(JobTokenReturn)) @@ -203,14 +237,16 @@ class JobTokenDispenserActorSpec extends TestKitSuite actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).size shouldBe 0 // There should be 3 assigned tokens: index 18, 19, and this test actor - actorRefUnderTest.underlyingActor.tokenAssignments.keySet should contain theSameElementsAs senders.map(_.ref).slice(13, 15) :+ self + actorRefUnderTest.underlyingActor.tokenAssignments.keySet should contain theSameElementsAs senders + .map(_.ref) + .slice(13, 15) :+ self } it should "resend the same token to an actor which already has one" in { val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-resend-same", - jobExecutionTokenDispenserActorName = "resend-same", + jobExecutionTokenDispenserActorName = "resend-same" ) 5 indexedTimes { _ => actorRefUnderTest ! JobTokenRequest(hogGroupA, LimitedTo5Tokens) @@ -225,17 +261,18 @@ class JobTokenDispenserActorSpec extends TestKitSuite actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).pool.leased() shouldBe 1 } - - //Incidentally, also covers: it should "not be fooled if the wrong actor returns a token" + // Incidentally, also covers: it should "not be fooled if the wrong actor returns a token" it should "not be fooled by a doubly-returned token" in { val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-not-fooled", - jobExecutionTokenDispenserActorName = "not-fooled", + jobExecutionTokenDispenserActorName = "not-fooled" ) val senders = (1 to 7).map(index => TestProbe(s"sender-not-fooled-$index")) // Ask for 7 tokens - senders.foreach(sender => actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, LimitedTo5Tokens), sender = sender.ref)) + senders.foreach(sender => + actorRefUnderTest.tell(msg = JobTokenRequest(hogGroupA, LimitedTo5Tokens), sender = sender.ref) + ) // Force token distribution actorRefUnderTest ! TokensAvailable(5) @@ -261,16 +298,21 @@ class JobTokenDispenserActorSpec extends TestKitSuite it should s"recover tokens lost to actors which are $name before they hand back their token" in { val actorRefUnderTest = TestActorRef( - new JobTokenDispenserActor(TestProbe(s"serviceRegistryActor-$name").ref, Rate(10, 100.millis), None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" + new JobTokenDispenserActor(TestProbe(s"serviceRegistryActor-$name").ref, + Rate(10, 100.millis), + None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" ), - s"lost-to-$name", + s"lost-to-$name" ) val grabberSupervisor = TestActorRef(new StoppingSupervisor(), s"lost-to-$name-supervisor") // The first 5 get a token and the 6th one is queued val tokenGrabbingActors = (1 to 6).map { i => - TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), grabberSupervisor, s"grabber_${name}_" + i) + TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), + grabberSupervisor, + s"grabber_${name}_" + i + ) } // Force token distribution @@ -289,7 +331,7 @@ class JobTokenDispenserActorSpec extends TestKitSuite deathwatch watch actorToStop stopMethod(actorToStop) deathwatch.expectTerminated(actorToStop) - eventually { nextInLine.underlyingActor.hasToken shouldBe true } + eventually(nextInLine.underlyingActor.hasToken shouldBe true) } } @@ -297,12 +339,15 @@ class JobTokenDispenserActorSpec extends TestKitSuite val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-skip-dead", - jobExecutionTokenDispenserActorName = "skip-dead", + jobExecutionTokenDispenserActorName = "skip-dead" ) val grabberSupervisor = TestActorRef(new StoppingSupervisor(), "skip-dead-supervisor") // The first 5 get a token and the 6th and 7h one are queued val tokenGrabbingActors = (1 to 7).map { i => - TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), grabberSupervisor, s"grabber_" + i) + TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), + grabberSupervisor, + s"grabber_" + i + ) } // Force token distribution @@ -315,7 +360,11 @@ class JobTokenDispenserActorSpec extends TestKitSuite // Check that the next in lines have no tokens and are indeed in the queue nextInLine1.underlyingActor.hasToken shouldBe false nextInLine2.underlyingActor.hasToken shouldBe false - actorRefUnderTest.underlyingActor.tokenQueues(LimitedTo5Tokens).queues.flatMap(_._2).toList should contain theSameElementsInOrderAs List(nextInLine1, nextInLine2).map(asHogGroupAPlaceholder) + actorRefUnderTest.underlyingActor + .tokenQueues(LimitedTo5Tokens) + .queues + .flatMap(_._2) + .toList should contain theSameElementsInOrderAs List(nextInLine1, nextInLine2).map(asHogGroupAPlaceholder) // First, kill off the actor which would otherwise be first in line: val deathwatch = TestProbe("death-watch-skip-dead") @@ -327,19 +376,22 @@ class JobTokenDispenserActorSpec extends TestKitSuite actorRefUnderTest.tell(msg = JobTokenReturn, sender = tokenGrabbingActors.head) // Force token distribution actorRefUnderTest ! TokensAvailable(1) - eventually { nextInLine2.underlyingActor.hasToken shouldBe true } + eventually(nextInLine2.underlyingActor.hasToken shouldBe true) } it should "skip over dead actors repeatedly when assigning tokens to the actor queue" in { val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-skip-dead-repeatedly", - jobExecutionTokenDispenserActorName = "skip-dead-repeatedly", + jobExecutionTokenDispenserActorName = "skip-dead-repeatedly" ) val grabberSupervisor = TestActorRef(new StoppingSupervisor(), "skip-dead-repeatedly-supervisor") // The first 5 get a token and the 6th and 7th one are queued val tokenGrabbingActors = (0 until 1000).toVector.map { i => - TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), grabberSupervisor, s"grabber_" + i) + TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), + grabberSupervisor, + s"grabber_" + i + ) } // Create a sliding window of 10 actors, skipping by 10 so the windows do not overlap. @@ -374,7 +426,7 @@ class JobTokenDispenserActorSpec extends TestKitSuite actorRefUnderTest.tell(msg = JobTokenReturn, sender = withTokens(3)) actorRefUnderTest ! TokensAvailable(100) - eventually { nextInLine(3).underlyingActor.hasToken shouldBe true } + eventually(nextInLine(3).underlyingActor.hasToken shouldBe true) // And kill off the rest of the actors: (withTokens :+ nextInLine(3)) foreach { actor => actor ! PoisonPill } @@ -387,7 +439,7 @@ class JobTokenDispenserActorSpec extends TestKitSuite val actorRefUnderTest = getActorRefUnderTest( serviceRegistryActorName = "serviceRegistryActor-resilient-last-request", - jobExecutionTokenDispenserActorName = "resilient-last-request", + jobExecutionTokenDispenserActorName = "resilient-last-request" ) val tokenType = JobTokenType(s"mini", maxPoolSize = Option(6), hogFactor = 2) @@ -404,7 +456,9 @@ class JobTokenDispenserActorSpec extends TestKitSuite actorRefUnderTest.underlyingActor.tokenAssignments.keys should contain(probe.ref) } // And both groups should have one item in the queue: - actorRefUnderTest.underlyingActor.tokenQueues(tokenType).queues.values.foreach { queue => queue.size should be(1) } + actorRefUnderTest.underlyingActor.tokenQueues(tokenType).queues.values.foreach { queue => + queue.size should be(1) + } } // Group B gets bored and aborts all jobs (in reverse order to make sure ): @@ -430,6 +484,7 @@ object JobTokenDispenserActorSpec { } val TestInfiniteTokenType: JobTokenType = JobTokenType("infinite", maxPoolSize = None, hogFactor = 1) - def limitedTokenType(limit: Int): JobTokenType = JobTokenType(s"$limit-limit", maxPoolSize = Option(limit), hogFactor = 1) + def limitedTokenType(limit: Int): JobTokenType = + JobTokenType(s"$limit-limit", maxPoolSize = Option(limit), hogFactor = 1) val LimitedTo5Tokens: JobTokenType = limitedTokenType(5) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIteratorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIteratorSpec.scala index b2c22f3bebc..d26cde04faf 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIteratorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIteratorSpec.scala @@ -40,7 +40,9 @@ class RoundRobinQueueIteratorSpec extends TestKitSuite with AnyFlatSpecLike with val probe2 = TestProbe("probe-2").ref val probe3 = TestProbe("probe-3").ref val queues = List( - TokenQueue(InfiniteTokenType, tokenEventLogger).enqueue(TokenQueuePlaceholder(probe1, "hogGroupA")).enqueue(TokenQueuePlaceholder(probe3, "hogGroupA")), + TokenQueue(InfiniteTokenType, tokenEventLogger) + .enqueue(TokenQueuePlaceholder(probe1, "hogGroupA")) + .enqueue(TokenQueuePlaceholder(probe3, "hogGroupA")), TokenQueue(Pool2, tokenEventLogger).enqueue(TokenQueuePlaceholder(probe2, "hogGroupA")) ) val iterator = new RoundRobinQueueIterator(queues, 0) diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/TestTokenGrabbingActor.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/TestTokenGrabbingActor.scala index caaef991994..2fe028910ef 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/TestTokenGrabbingActor.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/TestTokenGrabbingActor.scala @@ -13,8 +13,8 @@ class TestTokenGrabbingActor(tokenDispenser: ActorRef, tokenType: JobTokenType) var hasToken: Boolean = false - override def receive = stoppingReceive orElse { - case JobTokenDispensed => hasToken = true + override def receive = stoppingReceive orElse { case JobTokenDispensed => + hasToken = true } tokenDispenser ! JobTokenRequest(HogGroup("hogGroupA"), tokenType) @@ -22,7 +22,9 @@ class TestTokenGrabbingActor(tokenDispenser: ActorRef, tokenType: JobTokenType) object TestTokenGrabbingActor { - def props(tokenDispenserActor: ActorRef, tokenType: JobTokenType) = Props(new TestTokenGrabbingActor(tokenDispenserActor, tokenType)) + def props(tokenDispenserActor: ActorRef, tokenType: JobTokenType) = Props( + new TestTokenGrabbingActor(tokenDispenserActor, tokenType) + ) class StoppingSupervisor extends Actor { override val supervisorStrategy = SupervisorStrategy.stoppingStrategy diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/TokenQueueSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/TokenQueueSpec.scala index 6786c02ae83..306e589a241 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/TokenQueueSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/TokenQueueSpec.scala @@ -156,7 +156,7 @@ class TokenQueueSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { val expectedOrder = (0 until 23).toVector.map(i => s"hogGroup${i + 2}") ++ Vector("hogGroup0", "hogGroup1") usedQueue.queueOrder should be(expectedOrder) - usedQueue.size should be(jobCount - poolSize) + usedQueue.size should be(jobCount - poolSize) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPoolSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPoolSpec.scala index f9bbd73fd34..d120b87b605 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPoolSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPoolSpec.scala @@ -2,7 +2,12 @@ package cromwell.engine.workflow.tokens import common.assertion.CromwellTimeoutSpec import cromwell.core.JobToken.JobTokenType -import cromwell.engine.workflow.tokens.UnhoggableTokenPool.{HogLimitExceeded, TokenHoggingLease, TokenTypeExhausted, TokensAvailable} +import cromwell.engine.workflow.tokens.UnhoggableTokenPool.{ + HogLimitExceeded, + TokenHoggingLease, + TokensAvailable, + TokenTypeExhausted +} import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -25,7 +30,7 @@ class UnhoggableTokenPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with JobTokenType("backend", Some(150), 200) -> Some(1), JobTokenType("backend", None, 1) -> None, JobTokenType("backend", None, 150) -> None - ) ++ (hogLimitingTokenTypeToHogLimit map { case (k,v) => (k, Some(v)) }) + ) ++ (hogLimitingTokenTypeToHogLimit map { case (k, v) => (k, Some(v)) }) tokenTypeToHogLimit foreach { case (tokenType, expectedHogLimit) => it should s"correctly calculate hogLimit for $tokenType as $expectedHogLimit" in { @@ -44,8 +49,12 @@ class UnhoggableTokenPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with (0 until hogLimit) foreach { index => pool.tryAcquire(hogGroup) match { case _: TokenHoggingLease => // great! - case TokenTypeExhausted => fail(s"Unhoggable token pool ran out after $index tokens distributed to $hogGroupNumber") - case HogLimitExceeded => fail(s"Unhoggable token pool making unfounded accusations of hogging after $index tokens distributed to $hogGroupNumber") + case TokenTypeExhausted => + fail(s"Unhoggable token pool ran out after $index tokens distributed to $hogGroupNumber") + case HogLimitExceeded => + fail( + s"Unhoggable token pool making unfounded accusations of hogging after $index tokens distributed to $hogGroupNumber" + ) } val acquiredTokensForGroup = index + 1 @@ -89,7 +98,7 @@ class UnhoggableTokenPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with hogLimitPool.tryAcquire("group1") should be(HogLimitExceeded) lease1.release() - eventually { hogLimitPool.available("group1") shouldBe TokensAvailable } + eventually(hogLimitPool.available("group1") shouldBe TokensAvailable) hogLimitPool.tryAcquire("group1") match { case _: TokenHoggingLease => // Great! case other => fail(s"expected lease but got $other") @@ -97,7 +106,7 @@ class UnhoggableTokenPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with hogLimitPool.tryAcquire("group1") should be(HogLimitExceeded) lease2.release() - eventually { hogLimitPool.available("group1") shouldBe TokensAvailable } + eventually(hogLimitPool.available("group1") shouldBe TokensAvailable) hogLimitPool.tryAcquire("group1") match { case _: TokenHoggingLease => // Great! case other => fail(s"expected lease but got $other") diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/LargeScaleJobTokenDispenserActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/LargeScaleJobTokenDispenserActorSpec.scala index af93aaaa73e..779dd98065a 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/LargeScaleJobTokenDispenserActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/LargeScaleJobTokenDispenserActorSpec.scala @@ -17,7 +17,14 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ -class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDASpec")) with ImplicitSender with AnyFlatSpecLike with Matchers with BeforeAndAfter with BeforeAndAfterAll with Eventually { +class LargeScaleJobTokenDispenserActorSpec + extends TestKit(ActorSystem("LSJETDASpec")) + with ImplicitSender + with AnyFlatSpecLike + with Matchers + with BeforeAndAfter + with BeforeAndAfterAll + with Eventually { val multipleTokenUsingActorIndex: AtomicInteger = new AtomicInteger(0) def multipleTokenUsingActorName() = s"multipleTokenUsingActor${multipleTokenUsingActorIndex.getAndIncrement()}" @@ -32,14 +39,35 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS val totalJobsPerWorkflow = maxConcurrencyToTest + 1 val tokenType = JobTokenType(backendName, Some(maxConcurrencyToTest), hogFactor) - val tokenDispenserUnderTest = TestActorRef(new JobTokenDispenserActor(TestProbe().ref, Rate(maxConcurrencyToTest + 1, 100.millis), None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), "tokenDispenserUnderTest1") + val tokenDispenserUnderTest = TestActorRef( + new JobTokenDispenserActor(TestProbe().ref, + Rate(maxConcurrencyToTest + 1, 100.millis), + None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + "tokenDispenserUnderTest1" + ) val globalRunningJobsCounter = new RunningJobCounter() - val bigWorkflow1 = TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = "hogGroupA", globalRunningJobsCounter), multipleTokenUsingActorName()) - val bigWorkflow2 = TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = "hogGroupA", globalRunningJobsCounter), multipleTokenUsingActorName()) + val bigWorkflow1 = TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = "hogGroupA", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) + val bigWorkflow2 = TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = "hogGroupA", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) val parentProbe = new TestProbe(system, "parent") @@ -47,11 +75,12 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS parentProbe.send(bigWorkflow2, Begin) (0 until 2) foreach { _ => - parentProbe.expectMsgPF(100.seconds) { - case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => - Assertions.assert(maximumConcurrency <= maxConcurrencyToTest, "(asserting maxActualConcurrency <= maxRequestedConcurrency)") - queueWaits.size should be(totalJobsPerWorkflow) - errors shouldBe List.empty + parentProbe.expectMsgPF(100.seconds) { case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => + Assertions.assert(maximumConcurrency <= maxConcurrencyToTest, + "(asserting maxActualConcurrency <= maxRequestedConcurrency)" + ) + queueWaits.size should be(totalJobsPerWorkflow) + errors shouldBe List.empty } } @@ -69,14 +98,35 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS val totalJobsPerWorkflow = maxConcurrencyExpected + 1 val tokenType = JobTokenType(backendName, Some(totalTokensAvailable), hogFactor) - val tokenDispenserUnderTest = TestActorRef(new JobTokenDispenserActor(TestProbe().ref, Rate(maxConcurrencyExpected + 1, 100.millis), None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), "tokenDispenserUnderTest2") + val tokenDispenserUnderTest = TestActorRef( + new JobTokenDispenserActor(TestProbe().ref, + Rate(maxConcurrencyExpected + 1, 100.millis), + None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + "tokenDispenserUnderTest2" + ) val globalRunningJobsCounter = new RunningJobCounter() - val bigWorkflow1 = TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = "hogGroupA", globalRunningJobsCounter), multipleTokenUsingActorName()) - val bigWorkflow2 = TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = "hogGroupA", globalRunningJobsCounter), multipleTokenUsingActorName()) + val bigWorkflow1 = TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = "hogGroupA", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) + val bigWorkflow2 = TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = "hogGroupA", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) val parentProbe = new TestProbe(system, "parent") @@ -84,11 +134,12 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS parentProbe.send(bigWorkflow2, Begin) (0 until 2) foreach { _ => - parentProbe.expectMsgPF(100.seconds) { - case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => - Assertions.assert(maximumConcurrency <= maxConcurrencyExpected, "(asserting maxActualConcurrency <= maxRequestedConcurrency)") - queueWaits.size should be(totalJobsPerWorkflow) - errors shouldBe List.empty + parentProbe.expectMsgPF(100.seconds) { case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => + Assertions.assert(maximumConcurrency <= maxConcurrencyExpected, + "(asserting maxActualConcurrency <= maxRequestedConcurrency)" + ) + queueWaits.size should be(totalJobsPerWorkflow) + errors shouldBe List.empty } } @@ -106,14 +157,35 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS val totalJobsPerWorkflow = maxConcurrencyPerWorkflow + 1 val tokenType = JobTokenType(backendName, Some(totalTokensAvailable), hogFactor) - val tokenDispenserUnderTest = TestActorRef(new JobTokenDispenserActor(TestProbe().ref, Rate(maxConcurrencyOverall + 1, 100.millis), None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), "tokenDispenserUnderTest3") + val tokenDispenserUnderTest = TestActorRef( + new JobTokenDispenserActor(TestProbe().ref, + Rate(maxConcurrencyOverall + 1, 100.millis), + None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + "tokenDispenserUnderTest3" + ) val globalRunningJobsCounter = new RunningJobCounter() - val bigWorkflow1 = TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = "hogGroupA", globalRunningJobsCounter), multipleTokenUsingActorName()) - val bigWorkflow2 = TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = "hogGroupB", globalRunningJobsCounter), multipleTokenUsingActorName()) + val bigWorkflow1 = TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = "hogGroupA", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) + val bigWorkflow2 = TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = "hogGroupB", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) val parentProbe = new TestProbe(system, "parent") @@ -121,11 +193,12 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS parentProbe.send(bigWorkflow2, Begin) (0 until 2) foreach { _ => - parentProbe.expectMsgPF(100.seconds) { - case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => - Assertions.assert(maximumConcurrency == maxConcurrencyPerWorkflow, "(asserting maxActualConcurrency <= maxRequestedConcurrency)") - queueWaits.size should be(totalJobsPerWorkflow) - errors shouldBe List.empty + parentProbe.expectMsgPF(100.seconds) { case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => + Assertions.assert(maximumConcurrency == maxConcurrencyPerWorkflow, + "(asserting maxActualConcurrency <= maxRequestedConcurrency)" + ) + queueWaits.size should be(totalJobsPerWorkflow) + errors shouldBe List.empty } } @@ -143,27 +216,41 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS val totalJobsPerWorkflow = maxConcurrencyPerWorkflow * 2 val tokenType = JobTokenType(backendName, Some(totalTokensAvailable), hogFactor) - val tokenDispenserUnderTest = TestActorRef(new JobTokenDispenserActor(TestProbe().ref, Rate(maxConcurrencyOverall + 1, 100.millis), None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), "tokenDispenserUnderTest4") + val tokenDispenserUnderTest = TestActorRef( + new JobTokenDispenserActor(TestProbe().ref, + Rate(maxConcurrencyOverall + 1, 100.millis), + None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + "tokenDispenserUnderTest4" + ) val globalRunningJobsCounter = new RunningJobCounter() val workflows = (0 until 100) map { i => - TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = s"hogGroup$i", globalRunningJobsCounter), multipleTokenUsingActorName()) + TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = s"hogGroup$i", + globalRunningJobsCounter + ), + multipleTokenUsingActorName() + ) } val parentProbe = new TestProbe(system, "parent") - workflows foreach { parentProbe.send(_, Begin)} + workflows foreach { parentProbe.send(_, Begin) } workflows.indices foreach { _ => - parentProbe.expectMsgPF(100.seconds) { - case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => - Assertions.assert(maximumConcurrency == maxConcurrencyPerWorkflow, "(asserting maxActualConcurrency <= maxRequestedConcurrency)") - queueWaits.size should be(totalJobsPerWorkflow) - errors shouldBe List.empty + parentProbe.expectMsgPF(100.seconds) { case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => + Assertions.assert(maximumConcurrency == maxConcurrencyPerWorkflow, + "(asserting maxActualConcurrency <= maxRequestedConcurrency)" + ) + queueWaits.size should be(totalJobsPerWorkflow) + errors shouldBe List.empty } } @@ -182,28 +269,42 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS val totalJobsPerWorkflow = maxConcurrencyPerHogGroup * 2 val tokenType = JobTokenType(backendName, Some(totalTokensAvailable), hogFactor) - val tokenDispenserUnderTest = TestActorRef(new JobTokenDispenserActor(TestProbe().ref, Rate(maxConcurrencyOverall + 1, 100.millis), None, - dispenserType = "execution", - tokenAllocatedDescription = "Running" - ), "tokenDispenserUnderTest5") + val tokenDispenserUnderTest = TestActorRef( + new JobTokenDispenserActor(TestProbe().ref, + Rate(maxConcurrencyOverall + 1, 100.millis), + None, + dispenserType = "execution", + tokenAllocatedDescription = "Running" + ), + "tokenDispenserUnderTest5" + ) val hogGroupConcurrencyCounters = (0 until totalHogGroups).toVector map { _ => new RunningJobCounter() } val workflows = (0 until totalWorkflows) map { i => val hogGroupNumber = i % totalHogGroups - TestActorRef(new MultipleTokenUsingActor(tokenDispenserUnderTest, tokenType, totalJobsPerWorkflow, hogGroup = s"hogGroup$hogGroupNumber", hogGroupConcurrencyCounters(hogGroupNumber)), multipleTokenUsingActorName()) + TestActorRef( + new MultipleTokenUsingActor(tokenDispenserUnderTest, + tokenType, + totalJobsPerWorkflow, + hogGroup = s"hogGroup$hogGroupNumber", + hogGroupConcurrencyCounters(hogGroupNumber) + ), + multipleTokenUsingActorName() + ) } val parentProbe = new TestProbe(system, "parent") - workflows foreach { parentProbe.send(_, Begin)} + workflows foreach { parentProbe.send(_, Begin) } workflows.indices foreach { _ => - parentProbe.expectMsgPF(100.seconds) { - case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => - Assertions.assert(maximumConcurrency <= maxConcurrencyPerHogGroup, "(asserting maxActualConcurrency per workflow <= maxRequestedConcurrency per hog group)") - queueWaits.size should be(totalJobsPerWorkflow) - errors shouldBe List.empty + parentProbe.expectMsgPF(100.seconds) { case TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) => + Assertions.assert(maximumConcurrency <= maxConcurrencyPerHogGroup, + "(asserting maxActualConcurrency per workflow <= maxRequestedConcurrency per hog group)" + ) + queueWaits.size should be(totalJobsPerWorkflow) + errors shouldBe List.empty } } @@ -219,12 +320,21 @@ class LargeScaleJobTokenDispenserActorSpec extends TestKit(ActorSystem("LSJETDAS (0 until totalHogGroups).toVector foreach { hogGroupNumber => val c: RunningJobCounter = hogGroupConcurrencyCounters(hogGroupNumber) - if (c.getMax == maxConcurrencyPerHogGroup) { exactlyAtLimit += 1 } else { - Assertions.assert(c.getMax <= maxConcurrencyPerHogGroup, s"(asserting maxActualConcurrency for each hog group <= maxRequestedConcurrency per hog group)") - Assertions.assert(c.getMax >= maxConcurrencyPerHogGroup * 0.95, s"(asserting maxActualConcurrency for each hog group >= (95% of maxRequestedConcurrency per hog group))") + if (c.getMax == maxConcurrencyPerHogGroup) { exactlyAtLimit += 1 } + else { + Assertions.assert( + c.getMax <= maxConcurrencyPerHogGroup, + s"(asserting maxActualConcurrency for each hog group <= maxRequestedConcurrency per hog group)" + ) + Assertions.assert( + c.getMax >= maxConcurrencyPerHogGroup * 0.95, + s"(asserting maxActualConcurrency for each hog group >= (95% of maxRequestedConcurrency per hog group))" + ) } } - Assertions.assert(exactlyAtLimit >= (totalHogGroups * 0.95), "(at least 95% of the hog groups reached the full concurrency limit)") + Assertions.assert(exactlyAtLimit >= (totalHogGroups * 0.95), + "(at least 95% of the hog groups reached the full concurrency limit)" + ) workflows foreach { system.stop(_) } system.stop(tokenDispenserUnderTest) diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala index 79bf6951948..c35315c19ca 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala @@ -1,6 +1,5 @@ package cromwell.engine.workflow.tokens.large - import akka.actor.{Actor, ActorRef} import cromwell.core.JobToken.JobTokenType import cromwell.engine.workflow.tokens.large.LargeScaleJobTokenDispenserActorSpec.RunningJobCounter @@ -15,7 +14,12 @@ import cromwell.engine.workflow.tokens.large.PatientTokenNeedingActor.{AllDone, * * Because I'm a good citizen, I'm going to record and return a value representing my "peak concurrent tokens distributed" */ -class MultipleTokenUsingActor(tokenDispenser: ActorRef, tokenType: JobTokenType, totalJobs: Int, hogGroup: String, globalRunningJobCounter: RunningJobCounter) extends Actor { +class MultipleTokenUsingActor(tokenDispenser: ActorRef, + tokenType: JobTokenType, + totalJobs: Int, + hogGroup: String, + globalRunningJobCounter: RunningJobCounter +) extends Actor { var hasToken: Boolean = false @@ -32,7 +36,9 @@ class MultipleTokenUsingActor(tokenDispenser: ActorRef, tokenType: JobTokenType, case Begin => starter = sender() (0 until totalJobs) foreach { i => - val jobActor = context.actorOf(PatientTokenNeedingActor.props(tokenDispenser, tokenType, hogGroup), name = self.path.name + s"job$i") + val jobActor = context.actorOf(PatientTokenNeedingActor.props(tokenDispenser, tokenType, hogGroup), + name = self.path.name + s"job$i" + ) jobActor ! Begin } startedJobs = totalJobs @@ -55,7 +61,6 @@ class MultipleTokenUsingActor(tokenDispenser: ActorRef, tokenType: JobTokenType, } } - object MultipleTokenUsingActor { final case class TokenUsingActorCompletion(queueWaits: Seq[Long], maximumConcurrency: Int, errors: List[String]) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala index debe5fe2537..817fa46934e 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala @@ -60,5 +60,7 @@ object PatientTokenNeedingActor { // Indicate to myself that I'm done (and gets forwarded to my parent) case object AllDone - def props(tokenDispenser: ActorRef, tokenType: JobTokenType, hogGroup: String): Props = Props(new PatientTokenNeedingActor(tokenDispenser, tokenType, hogGroup)) + def props(tokenDispenser: ActorRef, tokenType: JobTokenType, hogGroup: String): Props = Props( + new PatientTokenNeedingActor(tokenDispenser, tokenType, hogGroup) + ) } diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/TokenDispenserBenchmark.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/TokenDispenserBenchmark.scala index 8a5cbe6729f..590b5624b20 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/TokenDispenserBenchmark.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/TokenDispenserBenchmark.scala @@ -38,7 +38,7 @@ object TokenDispenserBenchmark extends Bench[Double] with DefaultJsonProtocol { def fillQueue(tokenQueueIn: TokenQueue, jobsPerGroup: Int, hogGroups: List[String]): TokenQueue = { var tokenQueue = tokenQueueIn hogGroups foreach { hogGroup => - (0 until jobsPerGroup) foreach { _ => + (0 until jobsPerGroup) foreach { _ => tokenQueue = tokenQueue.enqueue(TokenQueuePlaceholder(actorToQueue, hogGroup)) } } @@ -71,7 +71,8 @@ object TokenDispenserBenchmark extends Bench[Double] with DefaultJsonProtocol { measure method "enqueuing and dequeuing with multiple hog groups" in { val poolSize = 5 * ScaleFactor - val jobCounts: Gen[Int] = Gen.range("initialJobsInQueue")(from = 1 * ScaleFactor, upto = 15 * ScaleFactor, hop = 3 * ScaleFactor) + val jobCounts: Gen[Int] = + Gen.range("initialJobsInQueue")(from = 1 * ScaleFactor, upto = 15 * ScaleFactor, hop = 3 * ScaleFactor) val jobsAtATime = 50 val queues = for { diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala index fe51c9b065b..497482ddcc2 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala @@ -20,9 +20,12 @@ import spray.json.{JsObject, JsString} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} - -class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures - with BeforeAndAfterAll { +class SqlWorkflowStoreSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalaFutures + with BeforeAndAfterAll { implicit val ec: ExecutionContextExecutor = ExecutionContext.global implicit val defaultPatience: PatienceConfig = PatienceConfig(scaled(Span(20, Seconds)), scaled(Span(100, Millis))) @@ -112,25 +115,27 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ) DatabaseSystem.All foreach { databaseSystem => - behavior of s"SqlWorkflowStore on ${databaseSystem.name}" val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val dataAccess = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) - lazy val metadataDataAccess = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) + lazy val dataAccess = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) + lazy val metadataDataAccess = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) lazy val workflowStore = SqlWorkflowStore(dataAccess, metadataDataAccess) - def updateWfToRunning(startableWorkflows: List[WorkflowToStart]): Unit = { + def updateWfToRunning(startableWorkflows: List[WorkflowToStart]): Unit = startableWorkflows.foreach { wf => Await.result(workflowStore.sqlDatabase.updateWorkflowState( - wf.id.toString, - WorkflowStoreState.Submitted.toString, - WorkflowStoreState.Running.toString - ), 5.seconds) + wf.id.toString, + WorkflowStoreState.Submitted.toString, + WorkflowStoreState.Running.toString + ), + 5.seconds + ) } - } it should "start container if required" taggedAs DbmsTest in { containerOpt.foreach { @@ -257,52 +262,72 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "start workflows from hog group with lowest count of running workflows" taggedAs DbmsTest in { // first submission of 50 workflows for hogGroup "Goldfinger" - val goldFingerWorkflowIds = (for (_ <- 1 to 50) yield Await.result(workflowStore.add(includedGroupSourceFilesCollection1), 5.seconds)).flatMap(_.map(_.id).toList) + val goldFingerWorkflowIds = + (for (_ <- 1 to 50) yield Await.result(workflowStore.add(includedGroupSourceFilesCollection1), 5.seconds)) + .flatMap(_.map(_.id).toList) // second submission of 50 workflows for hogGroup "Highlander" - val highlanderWorkflowIds = (for (_ <- 1 to 50) yield Await.result(workflowStore.add(includedGroupSourceFilesCollection2), 5.seconds)).flatMap(_.map(_.id).toList) - - for (_ <- 1 to 10) yield { - (for { - // since both hog groups have 0 workflows running, the hog group with oldest submission time is picked first - startableWorkflows1 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, Set.empty[String]) - _ = startableWorkflows1.map(_.hogGroup.value).toSet.head should be("Goldfinger") - _ = startableWorkflows1.map(_.id).foreach(x => goldFingerWorkflowIds.toList should contain(x)) - _ = updateWfToRunning(startableWorkflows1) - - startableWorkflows2 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, Set.empty[String]) - _ = startableWorkflows2.map(_.hogGroup.value).toSet.head should be("Highlander") - _ = startableWorkflows2.map(_.id).foreach(x => highlanderWorkflowIds.toList should contain(x)) - _ = updateWfToRunning(startableWorkflows2) - } yield ()).futureValue - } + val highlanderWorkflowIds = + (for (_ <- 1 to 50) yield Await.result(workflowStore.add(includedGroupSourceFilesCollection2), 5.seconds)) + .flatMap(_.map(_.id).toList) + + for (_ <- 1 to 10) yield (for { + // since both hog groups have 0 workflows running, the hog group with oldest submission time is picked first + startableWorkflows1 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, Set.empty[String]) + _ = startableWorkflows1.map(_.hogGroup.value).toSet.head should be("Goldfinger") + _ = startableWorkflows1.map(_.id).foreach(x => goldFingerWorkflowIds.toList should contain(x)) + _ = updateWfToRunning(startableWorkflows1) + + startableWorkflows2 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, Set.empty[String]) + _ = startableWorkflows2.map(_.hogGroup.value).toSet.head should be("Highlander") + _ = startableWorkflows2.map(_.id).foreach(x => highlanderWorkflowIds.toList should contain(x)) + _ = updateWfToRunning(startableWorkflows2) + } yield ()).futureValue // remove entries from WorkflowStore - (goldFingerWorkflowIds ++ highlanderWorkflowIds).foreach(id => Await.result(workflowStore.deleteFromStore(id), 5.seconds)) + (goldFingerWorkflowIds ++ highlanderWorkflowIds).foreach(id => + Await.result(workflowStore.deleteFromStore(id), 5.seconds) + ) } it should "respect excludedHogGroups and start workflows from hog group with lowest count of running workflows" taggedAs DbmsTest in { (for { // first submission of 10 workflows for hogGroup "Goldfinger" - goldFingerSubmissions <- Future.sequence(for (_ <- 1 to 10) yield workflowStore.add(includedGroupSourceFilesCollection1)) + goldFingerSubmissions <- Future.sequence( + for (_ <- 1 to 10) yield workflowStore.add(includedGroupSourceFilesCollection1) + ) goldFingerWorkflowIds = goldFingerSubmissions.flatMap(_.map(_.id).toList) // second submission of 10 workflows for hogGroup "Zardoz" - zardozSubmissions <- Future.sequence(for (_ <- 1 to 10) yield workflowStore.add(excludedGroupSourceFilesCollection)) + zardozSubmissions <- Future.sequence( + for (_ <- 1 to 10) yield workflowStore.add(excludedGroupSourceFilesCollection) + ) zardozWorkflowIds = zardozSubmissions.flatMap(_.map(_.id).toList) - startableWorkflows1 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set("Zardoz")) + startableWorkflows1 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set("Zardoz") + ) _ = startableWorkflows1.map(_.hogGroup.value).toSet.head should be("Goldfinger") _ = startableWorkflows1.map(_.id).foreach(x => goldFingerWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows1) - startableWorkflows2 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set("Zardoz")) + startableWorkflows2 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set("Zardoz") + ) _ = startableWorkflows2.map(_.hogGroup.value).toSet.head should be("Goldfinger") _ = startableWorkflows2.map(_.id).foreach(x => goldFingerWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows2) // there are 10 workflows from hog group "Zardoz" in the store, but since the group is excluded, 0 workflows are returned here - startableWorkflows3 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set("Zardoz")) + startableWorkflows3 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set("Zardoz") + ) _ = startableWorkflows3.size should be(0) // hog group "Zardoz" has tokens to run workflows, hence don't exclude it @@ -319,59 +344,93 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat // remove entries from WorkflowStore workflowsList = goldFingerWorkflowIds ++ zardozWorkflowIds _ = workflowsList.foreach(id => Await.result(workflowStore.deleteFromStore(id), 5.seconds)) - } yield()).futureValue + } yield ()).futureValue } it should "start workflows from hog group with lowest count of running workflows for multiple hog groups" taggedAs DbmsTest in { (for { // first submission of 10 workflows for hogGroup "Goldfinger" - goldFingerSubmissions <- Future.sequence(for (_ <- 1 to 10) yield workflowStore.add(includedGroupSourceFilesCollection1)) + goldFingerSubmissions <- Future.sequence( + for (_ <- 1 to 10) yield workflowStore.add(includedGroupSourceFilesCollection1) + ) goldFingerWorkflowIds = goldFingerSubmissions.flatMap(_.map(_.id).toList) // second submission of 10 workflows for hogGroup "Highlander" - highlanderSubmissions <- Future.sequence(for (_ <- 1 to 15) yield workflowStore.add(includedGroupSourceFilesCollection2)) + highlanderSubmissions <- Future.sequence( + for (_ <- 1 to 15) yield workflowStore.add(includedGroupSourceFilesCollection2) + ) highlanderWorkflowIds = highlanderSubmissions.flatMap(_.map(_.id).toList) // since both hog groups have 0 workflows running, the hog group with oldest submission time is picked first - startableWorkflows1 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows1 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows1.map(_.hogGroup.value).toSet.head should be("Goldfinger") _ = startableWorkflows1.map(_.id).foreach(x => goldFingerWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows1) - startableWorkflows2 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows2 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows2.map(_.hogGroup.value).toSet.head should be("Highlander") _ = startableWorkflows2.map(_.id).foreach(x => highlanderWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows2) // new submission for hog group "Finding Forrester" - foresterSubmissions <- Future.sequence(for (_ <- 1 to 10) yield workflowStore.add(includedGroupSourceFilesCollection3)) + foresterSubmissions <- Future.sequence( + for (_ <- 1 to 10) yield workflowStore.add(includedGroupSourceFilesCollection3) + ) foresterWorkflowIds = foresterSubmissions.flatMap(_.map(_.id).toList) // now hog group "Finding Forrester" has 0 workflows running, hence it is picked to run - startableWorkflows3 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows3 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows3.map(_.hogGroup.value).toSet.head should be("Finding Forrester") _ = startableWorkflows3.map(_.id).foreach(x => foresterWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows3) // since all 3 hog groups have 5 workflows running each, the hog group with oldest submission time is picked first - startableWorkflows5 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows5 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows5.map(_.hogGroup.value).toSet.head should be("Goldfinger") _ = startableWorkflows5.map(_.id).foreach(x => goldFingerWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows5) // since both "Highlander" and "Finding Forrester" have 5 workflows in Running state, the hog group with oldest submission time is picked first - startableWorkflows6 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows6 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows6.map(_.hogGroup.value).toSet.head should be("Highlander") _ = startableWorkflows6.map(_.id).foreach(x => highlanderWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows6) // "Finding Forrester" is now the hog group with least running workflows and has 5 more workflows to run, hence it is picked to run - startableWorkflows4 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows4 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows4.map(_.hogGroup.value).toSet.head should be("Finding Forrester") _ = startableWorkflows4.map(_.id).foreach(x => foresterWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows4) - startableWorkflows7 <- workflowStore.fetchStartableWorkflows(5, "A08", 5.minutes, excludedGroups = Set.empty[String]) + startableWorkflows7 <- workflowStore.fetchStartableWorkflows(5, + "A08", + 5.minutes, + excludedGroups = Set.empty[String] + ) _ = startableWorkflows7.map(_.hogGroup.value).toSet.head should be("Highlander") _ = startableWorkflows7.map(_.id).foreach(x => highlanderWorkflowIds.toList should contain(x)) _ = updateWfToRunning(startableWorkflows7) @@ -385,10 +444,13 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "accept and honor a requested workflow ID" taggedAs DbmsTest in { val requestedId = WorkflowId.randomId() - val sourcesToSubmit = onHoldSourceFilesCollection.map(c => c.asInstanceOf[WorkflowSourceFilesWithoutImports].copy( - requestedWorkflowId = Option(requestedId), - workflowOnHold = false - )) + val sourcesToSubmit = onHoldSourceFilesCollection.map(c => + c.asInstanceOf[WorkflowSourceFilesWithoutImports] + .copy( + requestedWorkflowId = Option(requestedId), + workflowOnHold = false + ) + ) (for { submissionResponses <- workflowStore.add(sourcesToSubmit) @@ -402,9 +464,11 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "not accept a duplicate workflow ID" taggedAs DbmsTest in { val requestedId = WorkflowId.randomId() - val workflowSourceFilesTemplate = onHoldSourceFilesCollection.head.asInstanceOf[WorkflowSourceFilesWithoutImports].copy( - requestedWorkflowId = Option(requestedId) - ) + val workflowSourceFilesTemplate = onHoldSourceFilesCollection.head + .asInstanceOf[WorkflowSourceFilesWithoutImports] + .copy( + requestedWorkflowId = Option(requestedId) + ) val sourcesToSubmit1 = NonEmptyList.of(workflowSourceFilesTemplate) val sourcesToSubmit2 = NonEmptyList.of(workflowSourceFilesTemplate.copy(workflowOnHold = false)) @@ -412,14 +476,16 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ((for { _ <- workflowStore.add(sourcesToSubmit1) _ <- workflowStore.add(sourcesToSubmit2) - } yield "incorrectly accepted") recoverWith { - case error => for { + } yield "incorrectly accepted") recoverWith { case error => + for { message <- Future { error.getMessage should be(s"Requested workflow IDs are already in use: $requestedId") "duplicate ID correctly detected" } stats <- workflowStore.stats - _ = stats should be(Map(WorkflowStoreState.OnHold -> 1)) // Only the original (on-hold) version of requested ID 1 should be in the store + _ = stats should be( + Map(WorkflowStoreState.OnHold -> 1) + ) // Only the original (on-hold) version of requested ID 1 should be in the store _ <- workflowStore.deleteFromStore(requestedId) // tidy up } yield message }).futureValue should be("duplicate ID correctly detected") @@ -430,29 +496,35 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val requestedId2 = WorkflowId.randomId() val requestedId3 = WorkflowId.randomId() - val workflowSourceFilesTemplate = onHoldSourceFilesCollection.head.asInstanceOf[WorkflowSourceFilesWithoutImports].copy( - requestedWorkflowId = Option(requestedId1) - ) + val workflowSourceFilesTemplate = onHoldSourceFilesCollection.head + .asInstanceOf[WorkflowSourceFilesWithoutImports] + .copy( + requestedWorkflowId = Option(requestedId1) + ) val sourcesToSubmit1 = NonEmptyList.of(workflowSourceFilesTemplate) val sourcesToSubmit2 = NonEmptyList.of( workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId2), workflowOnHold = false), workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId3), workflowOnHold = false), - workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId1), workflowOnHold = false) // duplicates the existing ID. + workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId1), + workflowOnHold = false + ) // duplicates the existing ID. ) ((for { _ <- workflowStore.add(sourcesToSubmit1) _ <- workflowStore.add(sourcesToSubmit2) - } yield "incorrectly accepted") recoverWith { - case error => for { + } yield "incorrectly accepted") recoverWith { case error => + for { message <- Future { error.getMessage should be(s"Requested workflow IDs are already in use: $requestedId1") "duplicate ID correctly detected" } stats <- workflowStore.stats - _ = stats should be(Map(WorkflowStoreState.OnHold -> 1)) // Only the original (on-hold) version of requested ID 1 should be in the store + _ = stats should be( + Map(WorkflowStoreState.OnHold -> 1) + ) // Only the original (on-hold) version of requested ID 1 should be in the store _ <- workflowStore.deleteFromStore(requestedId1) } yield message @@ -470,13 +542,15 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId1), workflowOnHold = false), workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId2), workflowOnHold = false), workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId3), workflowOnHold = false), - workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId1), workflowOnHold = false) // duplicates an ID already in the set + workflowSourceFilesTemplate.copy(requestedWorkflowId = Option(requestedId1), + workflowOnHold = false + ) // duplicates an ID already in the set ) ((for { _ <- workflowStore.add(sourcesToSubmit) - } yield "incorrectly accepted") recoverWith { - case error => for { + } yield "incorrectly accepted") recoverWith { case error => + for { message <- Future { error.getMessage should be(s"Requested workflow IDs are duplicated: $requestedId1") "duplicate ID correctly detected" diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfigSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfigSpec.scala index fe4b951ec06..768731b815c 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfigSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowHeartbeatConfigSpec.scala @@ -9,7 +9,11 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.concurrent.duration._ -class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class WorkflowHeartbeatConfigSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { behavior of "WorkflowHeartbeatConfig" @@ -17,7 +21,7 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w val workflowHeartbeatConfig = WorkflowHeartbeatConfig(WorkflowHeartbeatConfigSpec.DefaultConfig) workflowHeartbeatConfig.cromwellId should startWith("cromid-") workflowHeartbeatConfig.cromwellId should have length 14 - workflowHeartbeatConfig.heartbeatInterval should be (2.minutes) + workflowHeartbeatConfig.heartbeatInterval should be(2.minutes) workflowHeartbeatConfig.ttl should be(10.minutes) workflowHeartbeatConfig.failureShutdownDuration should be(5.minutes) workflowHeartbeatConfig.writeBatchSize should be(10000) @@ -29,22 +33,22 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w ( "from an empty config", "system.cromwell_id_random_suffix = false", - WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 5.minutes, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 5.minutes, 10000, 10000) ), ( "with a specified cromid", """system.cromwell_id = "new_crom_name"""", - WorkflowHeartbeatConfig("new_crom_name", 2.minutes, 10.minutes, 5.minutes, 10000, 10000), + WorkflowHeartbeatConfig("new_crom_name", 2.minutes, 10.minutes, 5.minutes, 10000, 10000) ), ( "with a specified heartbeat interval", "system.workflow-heartbeats.heartbeat-interval = 3 minutes", - WorkflowHeartbeatConfig("cromid", 3.minutes, 10.minutes, 5.minutes, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 3.minutes, 10.minutes, 5.minutes, 10000, 10000) ), ( "with a specified ttl", "system.workflow-heartbeats.ttl = 5 minutes", - WorkflowHeartbeatConfig("cromid", 2.minutes, 5.minutes, 5.minutes, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 2.minutes, 5.minutes, 5.minutes, 10000, 10000) ), ( "with a ttl less than the default heartbeat interval", @@ -52,22 +56,22 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w |system.workflow-heartbeats.heartbeat-interval = 59 seconds |system.workflow-heartbeats.write-failure-shutdown-duration = 0 minutes |""".stripMargin, - WorkflowHeartbeatConfig("cromid", 59.seconds, 1.minutes, 0.minutes, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 59.seconds, 1.minutes, 0.minutes, 10000, 10000) ), ( "with a specified shutdown duration", "system.workflow-heartbeats.write-failure-shutdown-duration = 1 minute", - WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 1.minute, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 1.minute, 10000, 10000) ), ( "with a specified batch size", "system.workflow-heartbeats.write-batch-size = 2000", - WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 5.minutes, 2000, 10000), + WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 5.minutes, 2000, 10000) ), ( "with a specified threshold", "system.workflow-heartbeats.write-threshold = 5000", - WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 5.minutes, 10000, 5000), + WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 5.minutes, 10000, 5000) ), ( "when trying to set the ttl below the minimum", @@ -75,28 +79,30 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w |system.workflow-heartbeats.heartbeat-interval = 8 seconds |system.workflow-heartbeats.write-failure-shutdown-duration = 0 minutes |""".stripMargin, - WorkflowHeartbeatConfig("cromid", 8.seconds, 10.seconds, 0.minutes, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 8.seconds, 10.seconds, 0.minutes, 10000, 10000) ), ( "when trying to set the interval below the minimum", "system.workflow-heartbeats.heartbeat-interval = 3 seconds", - WorkflowHeartbeatConfig("cromid", 10.seconds / 3, 10.minutes, 5.minutes, 10000, 10000), + WorkflowHeartbeatConfig("cromid", 10.seconds / 3, 10.minutes, 5.minutes, 10000, 10000) ), ( "when trying to set a negative shutdown duration", "system.workflow-heartbeats.write-failure-shutdown-duration = -1 seconds", - WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 0.minutes, 10000, 10000), - ), + WorkflowHeartbeatConfig("cromid", 2.minutes, 10.minutes, 0.minutes, 10000, 10000) + ) ) forAll(validConfigTests) { (description, configString, expected) => it should s"create an instance $description" in { - val config = ConfigFactory.parseString( - // Remove the randomness from the cromid - s"""|system.cromwell_id_random_suffix = false - |$configString - |""".stripMargin - ).withFallback(WorkflowHeartbeatConfigSpec.DefaultConfig) + val config = ConfigFactory + .parseString( + // Remove the randomness from the cromid + s"""|system.cromwell_id_random_suffix = false + |$configString + |""".stripMargin + ) + .withFallback(WorkflowHeartbeatConfigSpec.DefaultConfig) WorkflowHeartbeatConfig(config) should be(expected) } } @@ -114,7 +120,7 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w "Errors parsing WorkflowHeartbeatConfig", List( "The system.workflow-heartbeats.heartbeat-interval (2 minutes)" + - " is not less than the system.workflow-heartbeats.ttl (2 minutes).", + " is not less than the system.workflow-heartbeats.ttl (2 minutes)." ) ) ), @@ -129,7 +135,7 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w "Errors parsing WorkflowHeartbeatConfig", List( "The system.workflow-heartbeats.heartbeat-interval (2 minutes)" + - " is not less than the system.workflow-heartbeats.ttl (1 minute).", + " is not less than the system.workflow-heartbeats.ttl (1 minute)." ) ) ), @@ -144,10 +150,10 @@ class WorkflowHeartbeatConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec w "Errors parsing WorkflowHeartbeatConfig", List( "The system.workflow-heartbeats.write-failure-shutdown-duration (301 seconds)" + - " is greater than the system.workflow-heartbeats.ttl (5 minutes).", + " is greater than the system.workflow-heartbeats.ttl (5 minutes)." ) ) - ), + ) ) forAll(invalidConfigTests) { (description, configString, expected: AggregatedMessageException) => diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActorSpec.scala index b68a7f9e64b..dcc4f87f897 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreCoordinatedAccessActorSpec.scala @@ -10,7 +10,12 @@ import cats.data.NonEmptyVector import cromwell.core._ import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreAbortResponse import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreAbortResponse.WorkflowStoreAbortResponse -import cromwell.engine.workflow.workflowstore.WorkflowStoreCoordinatedAccessActor.{Abort, DeleteFromStore, FetchStartableWorkflows, WriteHeartbeats} +import cromwell.engine.workflow.workflowstore.WorkflowStoreCoordinatedAccessActor.{ + Abort, + DeleteFromStore, + FetchStartableWorkflows, + WriteHeartbeats +} import org.scalatest.flatspec.AsyncFlatSpecLike import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks @@ -18,13 +23,16 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} -class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite - with AsyncFlatSpecLike with Matchers with TableDrivenPropertyChecks { +class WorkflowStoreCoordinatedAccessActorSpec + extends TestKitSuite + with AsyncFlatSpecLike + with Matchers + with TableDrivenPropertyChecks { behavior of "WorkflowStoreCoordinatedWriteActor" // So that we can timeout the asks below, change from the serial execution context to a parallel one - override implicit def executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global + implicit override def executionContext: ExecutionContext = scala.concurrent.ExecutionContext.Implicits.global def sleepAndThrow: Nothing = { Thread.sleep(30.seconds.dilated.toMillis) @@ -35,10 +43,9 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite val expected = 12345 val workflowStore = new InMemoryWorkflowStore { override def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit ec: ExecutionContext): Future[Int] = { + heartbeatDateTime: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Int] = Future.successful(expected) - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) val request = WriteHeartbeats(NonEmptyVector.of((WorkflowId.randomId(), OffsetDateTime.now)), OffsetDateTime.now) @@ -64,12 +71,15 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite requestedWorkflowId = None ) val now = OffsetDateTime.now() - val expected: List[WorkflowToStart] = List(WorkflowToStart(WorkflowId.randomId(), now, collection, Submitted, HogGroup("foo"))) + val expected: List[WorkflowToStart] = + List(WorkflowToStart(WorkflowId.randomId(), now, collection, Submitted, HogGroup("foo"))) val workflowStore = new InMemoryWorkflowStore { - override def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) - (implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = { + override def fetchStartableWorkflows(n: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = Future.successful(expected) - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) val request = FetchStartableWorkflows(1, "test fetchStartableWorkflows success", 1.second, Set.empty) @@ -95,15 +105,19 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite requestedWorkflowId = None ) val now = OffsetDateTime.now() - val expected: List[WorkflowToStart] = List(WorkflowToStart(WorkflowId.randomId(), now, collection, Submitted, HogGroup("foo"))) + val expected: List[WorkflowToStart] = + List(WorkflowToStart(WorkflowId.randomId(), now, collection, Submitted, HogGroup("foo"))) val workflowStore = new InMemoryWorkflowStore { - override def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) - (implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = { + override def fetchStartableWorkflows(n: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[List[WorkflowToStart]] = Future.successful(expected) - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) - val request = FetchStartableWorkflows(1, "test fetchStartableWorkflows with workflow url success", 1.second, Set.empty) + val request = + FetchStartableWorkflows(1, "test fetchStartableWorkflows with workflow url success", 1.second, Set.empty) implicit val timeout: Timeout = Timeout(2.seconds.dilated) actor.ask(request).mapTo[List[WorkflowToStart]] map { actual => actual should be(expected) @@ -113,9 +127,8 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite it should "abort workflows" in { val expected = WorkflowStoreAbortResponse.AbortRequested val workflowStore = new InMemoryWorkflowStore { - override def abort(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = { + override def abort(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowStoreAbortResponse] = Future.successful(WorkflowStoreAbortResponse.AbortRequested) - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) val request = Abort(WorkflowId.fromString("00001111-2222-3333-aaaa-bbbbccccdddd")) @@ -128,9 +141,8 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite it should "delete workflow store entries" in { val expected = 1 // 1 row deleted val workflowStore = new InMemoryWorkflowStore { - override def deleteFromStore(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Int] = { + override def deleteFromStore(workflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Int] = Future.successful(1) - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) val request = DeleteFromStore(WorkflowId.fromString("00001111-2222-3333-aaaa-bbbbccccdddd")) @@ -143,17 +155,16 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite val failureResponses = Table( ("description", "result", "expectedException", "expectedMessagePrefix"), ("a failure", () => Future.failed(new IOException("expected")), classOf[IOException], "expected"), - ("a timeout", () => Future(sleepAndThrow), classOf[AskTimeoutException], "Ask timed out"), + ("a timeout", () => Future(sleepAndThrow), classOf[AskTimeoutException], "Ask timed out") ) forAll(failureResponses) { (description, result, expectedException, expectedMessagePrefix) => it should s"fail to writeHeartBeats due to $description" in { val workflowStore = new InMemoryWorkflowStore { override def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit ec: ExecutionContext): Future[Nothing] = { + heartbeatDateTime: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Nothing] = result() - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) val request = WriteHeartbeats(NonEmptyVector.of((WorkflowId.randomId(), OffsetDateTime.now)), OffsetDateTime.now) @@ -166,14 +177,17 @@ class WorkflowStoreCoordinatedAccessActorSpec extends TestKitSuite it should s"fail to fetchStartableWorkflows due to $description" in { val workflowStore = new InMemoryWorkflowStore { - override def fetchStartableWorkflows(n: Int, cromwellId: String, heartbeatTtl: FiniteDuration, excludedGroups: Set[String]) - (implicit ec: ExecutionContext): Future[Nothing] = { + override def fetchStartableWorkflows(n: Int, + cromwellId: String, + heartbeatTtl: FiniteDuration, + excludedGroups: Set[String] + )(implicit ec: ExecutionContext): Future[Nothing] = result() - } } val actor = TestActorRef(new WorkflowStoreCoordinatedAccessActor(workflowStore)) val heartbeatTtlNotReallyUsed = 1.second - val request = FetchStartableWorkflows(1, s"test $description fetchStartableWorkflows", heartbeatTtlNotReallyUsed, Set.empty) + val request = + FetchStartableWorkflows(1, s"test $description fetchStartableWorkflows", heartbeatTtlNotReallyUsed, Set.empty) implicit val timeout: Timeout = Timeout(2.seconds.dilated) actor.ask(request).failed map { actual => actual.getMessage should startWith(expectedMessagePrefix) diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActorSpec.scala index 6749f53931c..71b6e440470 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreHeartbeatWriteActorSpec.scala @@ -16,8 +16,7 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.control.NoStackTrace -class WorkflowStoreHeartbeatWriteActorSpec extends TestKitSuite - with AnyFlatSpecLike with Matchers with Eventually { +class WorkflowStoreHeartbeatWriteActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually { behavior of "WorkflowStoreHeartbeatWriteActor" @@ -27,22 +26,23 @@ class WorkflowStoreHeartbeatWriteActorSpec extends TestKitSuite val workflowStore = new InMemoryWorkflowStore { override def writeWorkflowHeartbeats(workflowIds: Set[(WorkflowId, OffsetDateTime)], - heartbeatDateTime: OffsetDateTime) - (implicit ec: ExecutionContext): Future[Int] = { + heartbeatDateTime: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Int] = Future.failed(new RuntimeException("this is expected") with NoStackTrace) - } } val workflowStoreAccess = UncoordinatedWorkflowStoreAccess(workflowStore) - val workflowHeartbeatTypesafeConfig = ConfigFactory.parseString( - """|danger.debug.only.minimum-heartbeat-ttl = 10 ms - |system.workflow-heartbeats { - | heartbeat-interval = 500 ms - | write-batch-size = 1 - | write-failure-shutdown-duration = 1 s - |} - |""".stripMargin - ).withFallback(ConfigFactory.load()) + val workflowHeartbeatTypesafeConfig = ConfigFactory + .parseString( + """|danger.debug.only.minimum-heartbeat-ttl = 10 ms + |system.workflow-heartbeats { + | heartbeat-interval = 500 ms + | write-batch-size = 1 + | write-failure-shutdown-duration = 1 s + |} + |""".stripMargin + ) + .withFallback(ConfigFactory.load()) val workflowHeartbeatConfig = WorkflowHeartbeatConfig(workflowHeartbeatTypesafeConfig) val terminator = new CromwellTerminator { override def beginCromwellShutdown(reason: CoordinatedShutdown.Reason): Future[Done] = { diff --git a/engine/src/test/scala/cromwell/jobstore/JobResultSpec.scala b/engine/src/test/scala/cromwell/jobstore/JobResultSpec.scala index d623e9eee23..a951c479449 100644 --- a/engine/src/test/scala/cromwell/jobstore/JobResultSpec.scala +++ b/engine/src/test/scala/cromwell/jobstore/JobResultSpec.scala @@ -23,7 +23,16 @@ class JobResultSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "write more complicated WdlValues" in { - val success = JobResultSuccess(Some(0), WomMocks.mockOutputExpectations(Map("abc" -> WomMap(WomMapType(WomStringType, WomIntegerType), Map(WomString("hello") -> WomInteger(4), WomString("goodbye") -> WomInteger(6)))))) + val success = JobResultSuccess( + Some(0), + WomMocks.mockOutputExpectations( + Map( + "abc" -> WomMap(WomMapType(WomStringType, WomIntegerType), + Map(WomString("hello") -> WomInteger(4), WomString("goodbye") -> WomInteger(6)) + ) + ) + ) + ) val asJson = success.toJson val jsonString = asJson.toString() diff --git a/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala b/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala index 9f7567e8eee..08e5125e55c 100644 --- a/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala @@ -49,8 +49,8 @@ class EngineStatsActorSpec extends TestKitSuite with AnyFlatSpecLike with Matche object EngineStatsActorSpec { final case class FakeWorkflowActor(jobs: Int) extends Actor { - override def receive = { - case JobCountQuery => sender() ! JobCount(jobs) + override def receive = { case JobCountQuery => + sender() ! JobCount(jobs) } } } diff --git a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala index 700c0274026..b0669721518 100644 --- a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala @@ -22,8 +22,12 @@ import scala.concurrent.Future import scala.concurrent.duration._ import scala.util.Random -class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers - with TableDrivenPropertyChecks with ImplicitSender { +class MetadataBuilderActorSpec + extends TestKitSuite + with AsyncFlatSpecLike + with Matchers + with TableDrivenPropertyChecks + with ImplicitSender { behavior of "MetadataBuilderActor" @@ -37,42 +41,40 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with expectedRes: String, metadataBuilderActorName: String, failedTasks: Boolean = false - ): Future[Assertion] = { + ): Future[Assertion] = { val mockReadMetadataWorkerActor = TestProbe("mockReadMetadataWorkerActor") def readMetadataWorkerMaker = () => mockReadMetadataWorkerActor.props - val mba = system.actorOf( props = MetadataBuilderActor.props(readMetadataWorkerMaker, 1000000), - name = metadataBuilderActorName, + name = metadataBuilderActorName ) val response = mba.ask(action).mapTo[MetadataJsonResponse] mockReadMetadataWorkerActor.expectMsg(defaultTimeout, action) mockReadMetadataWorkerActor.reply( - if(failedTasks) FetchFailedJobsMetadataLookupResponse(events) else MetadataLookupResponse(queryReply, events) + if (failedTasks) FetchFailedJobsMetadataLookupResponse(events) else MetadataLookupResponse(queryReply, events) ) - response map { r => r shouldBe a [SuccessfulMetadataJsonResponse] } - response.mapTo[SuccessfulMetadataJsonResponse] map { b => b.responseJson shouldBe expectedRes.parseJson} + response map { r => r shouldBe a[SuccessfulMetadataJsonResponse] } + response.mapTo[SuccessfulMetadataJsonResponse] map { b => b.responseJson shouldBe expectedRes.parseJson } } - def assertMetadataFailureResponse(action: MetadataServiceAction, metadataServiceResponse: MetadataServiceResponse, expectedException: Exception, - metadataBuilderActorName: String, - ): Future[Assertion] = { + metadataBuilderActorName: String + ): Future[Assertion] = { val mockReadMetadataWorkerActor = TestProbe("mockReadMetadataWorkerActor") val mba = system.actorOf( props = MetadataBuilderActor.props(() => mockReadMetadataWorkerActor.props, defaultSafetyRowNumberThreshold), - name = metadataBuilderActorName, + name = metadataBuilderActorName ) val response = mba.ask(action).mapTo[MetadataServiceResponse] mockReadMetadataWorkerActor.expectMsg(defaultTimeout, action) mockReadMetadataWorkerActor.reply(metadataServiceResponse) - response map { r => r shouldBe a [FailedMetadataJsonResponse] } + response map { r => r shouldBe a[FailedMetadataJsonResponse] } response.mapTo[FailedMetadataJsonResponse] map { b => b.reason.getClass shouldBe expectedException.getClass b.reason.getMessage shouldBe expectedException.getMessage @@ -80,9 +82,8 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with } it should "build workflow scope tree from metadata events" in { - def makeEvent(workflow: WorkflowId, key: Option[MetadataJobKey]) = { + def makeEvent(workflow: WorkflowId, key: Option[MetadataJobKey]) = MetadataEvent(MetadataKey(workflow, key, "NOT_CHECKED"), MetadataValue("NOT_CHECKED")) - } val workflowA = WorkflowId.randomId() @@ -145,30 +146,32 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with type EventBuilder = (String, String, OffsetDateTime) - def makeEvent(workflow: WorkflowId)(key: String, value: MetadataValue, offsetDateTime: OffsetDateTime): MetadataEvent = { + def makeEvent( + workflow: WorkflowId + )(key: String, value: MetadataValue, offsetDateTime: OffsetDateTime): MetadataEvent = MetadataEvent(MetadataKey(workflow, None, key), Option(value), offsetDateTime) - } - def makeCallEvent(workflow: WorkflowId) - (key: String, value: MetadataValue, offsetDateTime: OffsetDateTime): MetadataEvent = { + def makeCallEvent( + workflow: WorkflowId + )(key: String, value: MetadataValue, offsetDateTime: OffsetDateTime): MetadataEvent = { val jobKey = MetadataJobKey("fqn", None, 1) MetadataEvent(MetadataKey(workflow, Option(jobKey), key), Option(value), offsetDateTime) } - //noinspection ScalaUnusedSymbol - def makeEmptyValue(workflow: WorkflowId) - (key: String, value: MetadataValue, offsetDateTime: OffsetDateTime): MetadataEvent = { + // noinspection ScalaUnusedSymbol + def makeEmptyValue( + workflow: WorkflowId + )(key: String, value: MetadataValue, offsetDateTime: OffsetDateTime): MetadataEvent = MetadataEvent(MetadataKey(workflow, None, key), None, offsetDateTime) - } def assertMetadataKeyStructure(eventList: List[EventBuilder], expectedJson: String, workflow: WorkflowId = WorkflowId.randomId(), eventMaker: WorkflowId => (String, MetadataValue, OffsetDateTime) => MetadataEvent = - makeEvent, + makeEvent, metadataBuilderActorName: String, isFailedTaskFetch: Boolean = false - ): Future[Assertion] = { + ): Future[Assertion] = { val events = eventList map { e => (e._1, MetadataValue(e._2), e._3) } map Function.tupled(eventMaker(workflow)) val expectedRes = s"""{ "calls": {}, $expectedJson, "id":"$workflow" }""" @@ -180,9 +183,8 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with it should "build the call list for failed tasks when prompted" in { - def makeEvent(workflow: WorkflowId, key: Option[MetadataJobKey]) = { + def makeEvent(workflow: WorkflowId, key: Option[MetadataJobKey]) = MetadataEvent(MetadataKey(workflow, key, "NOT_CHECKED"), MetadataValue("NOT_CHECKED")) - } val workflowA = WorkflowId.randomId() @@ -242,7 +244,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with events = workflowAEvents, expectedRes = expectedRes, failedTasks = true, - metadataBuilderActorName = "mba-failed-tasks", + metadataBuilderActorName = "mba-failed-tasks" ) } @@ -257,7 +259,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-same-key", + metadataBuilderActorName = "mba-same-key" ) } @@ -272,7 +274,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-not-workflow-state", + metadataBuilderActorName = "mba-not-workflow-state" ) } @@ -284,30 +286,29 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val workflowId = WorkflowId.randomId() val expectedRes = s""""calls": { - | "fqn": [{ - | "attempt": 1, - | "executionStatus": "Done", - | "shardIndex": -1 - | }] - | }, - | "id": "$workflowId"""".stripMargin + | "fqn": [{ + | "attempt": 1, + | "executionStatus": "Done", + | "shardIndex": -1 + | }] + | }, + | "id": "$workflowId"""".stripMargin assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, workflow = workflowId, eventMaker = makeCallEvent, - metadataBuilderActorName = "mba-not-execution-status", + metadataBuilderActorName = "mba-not-execution-status" ) } - it should "use reverse date ordering (oldest first) for event start and stop values" in { val eventBuilderList = List( ("start", "1990-12-20T12:30:00.000Z", OffsetDateTime.now), ("start", "1990-12-20T12:30:01.000Z", OffsetDateTime.now.plusSeconds(1)), ("end", "2018-06-02T12:30:00.000Z", OffsetDateTime.now.plusSeconds(2)), - ("end", "2018-06-02T12:30:01.000Z", OffsetDateTime.now.plusSeconds(3)), + ("end", "2018-06-02T12:30:01.000Z", OffsetDateTime.now.plusSeconds(3)) ) val workflowId = WorkflowId.randomId() val expectedRes = @@ -326,11 +327,10 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with expectedJson = expectedRes, workflow = workflowId, eventMaker = makeCallEvent, - metadataBuilderActorName = "mba-start-end-values", + metadataBuilderActorName = "mba-start-end-values" ) } - it should "build JSON object structure from dotted key syntax" in { val eventBuilderList = List( ("a:b:c", "abc", OffsetDateTime.now), @@ -352,11 +352,10 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-object-key", + metadataBuilderActorName = "mba-object-key" ) } - it should "build numerically sorted JSON list structure from dotted key syntax" in { val eventBuilderList = List( ("l[1]", "l1", OffsetDateTime.now), @@ -375,7 +374,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-list-key", + metadataBuilderActorName = "mba-list-key" ) } @@ -397,7 +396,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-same-index", + metadataBuilderActorName = "mba-same-index" ) } @@ -442,7 +441,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-nest-objects", + metadataBuilderActorName = "mba-nest-objects" ) } @@ -467,7 +466,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with assertMetadataKeyStructure( eventList = eventBuilderList, expectedJson = expectedRes, - metadataBuilderActorName = "mba-nest-lists", + metadataBuilderActorName = "mba-nest-lists" ) } @@ -486,7 +485,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with eventList = eventBuilderList, expectedJson = expectedRes, eventMaker = makeEmptyValue, - metadataBuilderActorName = "mba-nest-empty", + metadataBuilderActorName = "mba-nest-empty" ) } @@ -497,19 +496,18 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val kiv4 = ("key[0]", "value4", OffsetDateTime.now.plusSeconds(3)) val tuples = List( - ("mba-json-1", List(kv), """"key": "value""""), - ("mba-json-2", List(kv, ksv2), """"key": { "subkey": "value2" }"""), - ("mba-json-3", List(kv, ksv2, kisv3), """"key": [ { "subkey": "value3" } ]"""), - ("mba-json-4", List(kv, ksv2, kisv3, kiv4), """"key": [ "value4" ]""") - ) - - Future.sequence(tuples map { - case (metadataBuilderActorName, eventList, expectedJson) => - assertMetadataKeyStructure( - eventList = eventList, - expectedJson = expectedJson, - metadataBuilderActorName = metadataBuilderActorName, - ) + ("mba-json-1", List(kv), """"key": "value""""), + ("mba-json-2", List(kv, ksv2), """"key": { "subkey": "value2" }"""), + ("mba-json-3", List(kv, ksv2, kisv3), """"key": [ { "subkey": "value3" } ]"""), + ("mba-json-4", List(kv, ksv2, kisv3, kiv4), """"key": [ "value4" ]""") + ) + + Future.sequence(tuples map { case (metadataBuilderActorName, eventList, expectedJson) => + assertMetadataKeyStructure( + eventList = eventList, + expectedJson = expectedJson, + metadataBuilderActorName = metadataBuilderActorName + ) }) map { assertions => assertions should contain only Succeeded } @@ -530,17 +528,17 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val expectedResponse = s"""{ - | "calls": {}, - | "a": 2, - | "b": 2, - | "c": 2, - | "d": 2.9, - | "e": 2.9, - | "f": true, - | "g": false, - | "h": "false", - | "id":"$workflowId" - | } + | "calls": {}, + | "a": 2, + | "b": 2, + | "c": 2, + | "d": 2.9, + | "e": 2.9, + | "f": true, + | "g": false, + | "h": "false", + | "id":"$workflowId" + | } """.stripMargin val mdQuery = MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) @@ -550,7 +548,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with queryReply = mdQuery, events = events, expectedRes = expectedResponse, - metadataBuilderActorName = "mba-coerce-type", + metadataBuilderActorName = "mba-coerce-type" ) } @@ -564,10 +562,10 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val expectedResponse = s"""{ - | "calls": {}, - | "i": "UnknownClass(50)", - | "id":"$workflowId" - |} + | "calls": {}, + | "i": "UnknownClass(50)", + | "id":"$workflowId" + |} """.stripMargin val mdQuery = MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) @@ -577,7 +575,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with queryReply = mdQuery, events = events, expectedRes = expectedResponse, - metadataBuilderActorName = "mba-unknown-type", + metadataBuilderActorName = "mba-unknown-type" ) } @@ -590,10 +588,10 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val expectedResponse = s"""{ - | "calls": {}, - | "i": "notAnInt", - | "id":"$workflowId" - |} + | "calls": {}, + | "i": "notAnInt", + | "id":"$workflowId" + |} """.stripMargin val mdQuery = MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) @@ -603,7 +601,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with queryReply = mdQuery, events = events, expectedRes = expectedResponse, - metadataBuilderActorName = "mba-coerce-fails", + metadataBuilderActorName = "mba-coerce-fails" ) } @@ -624,11 +622,11 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val expectedEmptyResponse = s"""{ - | "calls": {}, - | "hey": {}, - | "emptyList": [], - | "id":"$workflowId" - |} + | "calls": {}, + | "hey": {}, + | "emptyList": [], + | "id":"$workflowId" + |} """.stripMargin val mdQuery = MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) @@ -638,16 +636,16 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with queryReply = mdQuery, events = emptyEvents, expectedRes = expectedEmptyResponse, - metadataBuilderActorName = "mba-empty-values", + metadataBuilderActorName = "mba-empty-values" ) val expectedNonEmptyResponse = s"""{ - | "calls": {}, - | "hey": "something", - | "emptyList": ["something", "something"], - | "id":"$workflowId" - |} + | "calls": {}, + | "hey": "something", + | "emptyList": ["something", "something"], + | "id":"$workflowId" + |} """.stripMargin assertMetadataResponse( @@ -655,7 +653,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with queryReply = mdQuery, events = valueEvents, expectedRes = expectedNonEmptyResponse, - metadataBuilderActorName = "mba-non-empty-values", + metadataBuilderActorName = "mba-non-empty-values" ) } @@ -664,7 +662,9 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val subWorkflowId = WorkflowId.randomId() val mainEvents = List( - MetadataEvent(MetadataKey(mainWorkflowId, Option(MetadataJobKey("callA", None, 1)), "subWorkflowId"), MetadataValue(subWorkflowId)) + MetadataEvent(MetadataKey(mainWorkflowId, Option(MetadataJobKey("callA", None, 1)), "subWorkflowId"), + MetadataValue(subWorkflowId) + ) ) val subEvents = List( @@ -686,7 +686,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with TestActorRef( props = MetadataBuilderActor.props(readMetadataWorkerMaker, 1000000), supervisor = parentProbe.ref, - name = s"MetadataActor-$mainWorkflowId", + name = s"MetadataActor-$mainWorkflowId" ) val response = metadataBuilder.ask(mainQueryAction).mapTo[MetadataJsonResponse] mockReadMetadataWorkerActor.expectMsg(defaultTimeout, mainQueryAction) @@ -714,9 +714,9 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with |} """.stripMargin - response map { r => r shouldBe a [SuccessfulMetadataJsonResponse] } + response map { r => r shouldBe a[SuccessfulMetadataJsonResponse] } val bmr = response.mapTo[SuccessfulMetadataJsonResponse] - bmr map { b => b.responseJson shouldBe expandedRes.parseJson} + bmr map { b => b.responseJson shouldBe expandedRes.parseJson } } it should "NOT expand sub workflow metadata when NOT asked for" in { @@ -724,7 +724,9 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val subWorkflowId = WorkflowId.randomId() val mainEvents = List( - MetadataEvent(MetadataKey(mainWorkflowId, Option(MetadataJobKey("callA", None, 1)), "subWorkflowId"), MetadataValue(subWorkflowId)) + MetadataEvent(MetadataKey(mainWorkflowId, Option(MetadataJobKey("callA", None, 1)), "subWorkflowId"), + MetadataValue(subWorkflowId) + ) ) val queryNoExpand = MetadataQuery(mainWorkflowId, None, None, None, None, expandSubWorkflows = false) @@ -733,18 +735,17 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val parentProbe = TestProbe("parentProbe") val mockReadMetadataWorkerActor = TestProbe("mockReadMetadataWorkerActor") - def readMetadataWorkerMaker= () => mockReadMetadataWorkerActor.props + def readMetadataWorkerMaker = () => mockReadMetadataWorkerActor.props val metadataBuilder = TestActorRef( props = MetadataBuilderActor.props(readMetadataWorkerMaker, 1000000), supervisor = parentProbe.ref, - name = s"MetadataActor-$mainWorkflowId", + name = s"MetadataActor-$mainWorkflowId" ) val response = metadataBuilder.ask(queryNoExpandAction).mapTo[MetadataJsonResponse] mockReadMetadataWorkerActor.expectMsg(defaultTimeout, queryNoExpandAction) mockReadMetadataWorkerActor.reply(MetadataLookupResponse(queryNoExpand, mainEvents)) - val nonExpandedRes = s""" |{ @@ -761,9 +762,9 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with |} """.stripMargin - response map { r => r shouldBe a [SuccessfulMetadataJsonResponse] } + response map { r => r shouldBe a[SuccessfulMetadataJsonResponse] } val bmr = response.mapTo[SuccessfulMetadataJsonResponse] - bmr map { b => b.responseJson shouldBe nonExpandedRes.parseJson} + bmr map { b => b.responseJson shouldBe nonExpandedRes.parseJson } } @@ -788,7 +789,6 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with ee(workflowId, Bar, eventIndex = 5, StartTime, Interval4.start), ee(workflowId, Bar, eventIndex = 5, EndTime, Interval4.end), ee(workflowId, Bar, eventIndex = 5, Grouping, Delocalizing), - ee(workflowId, Baz, eventIndex = 6, StartTime, Interval2.start), ee(workflowId, Baz, eventIndex = 6, EndTime, Interval2.end), ee(workflowId, Baz, eventIndex = 6, Grouping, Localizing), @@ -804,7 +804,6 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with // cause problems. pe(workflowId, Quux, StartTime, Interval8.start), pe(workflowId, Quux, EndTime, Interval8.end), - pe(workflowId, Corge, StartTime, Interval9.start), pe(workflowId, Corge, EndTime, Interval9.end) ) @@ -817,36 +816,32 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with ee(workflowId, Foo, eventIndex = 1, StartTime, Interval1.start), ee(workflowId, Foo, eventIndex = 1, EndTime, Interval2.end), ee(workflowId, Foo, eventIndex = 1, Description, Localizing.name), - ee(workflowId, Bar, eventIndex = 3, StartTime, Interval3.start), ee(workflowId, Bar, eventIndex = 3, Description, Delocalizing.name), - ee(workflowId, Baz, eventIndex = 6, StartTime, Interval1.start), ee(workflowId, Baz, eventIndex = 6, EndTime, Interval2.end), ee(workflowId, Baz, eventIndex = 6, Description, Localizing.name), - ee(workflowId, Qux, eventIndex = 8, StartTime, Interval7.start), ee(workflowId, Qux, eventIndex = 8, EndTime, Interval7.end), - pe(workflowId, Quux, StartTime, Interval8.start), pe(workflowId, Quux, EndTime, Interval8.end), - pe(workflowId, Corge, StartTime, Interval9.start), pe(workflowId, Corge, EndTime, Interval9.end) ) val actual = MetadataBuilderActor.groupEvents(events).toSet - def filterEventsByCall(events: Iterable[MetadataEvent])(call: Call): Iterable[MetadataEvent] = { - events collect { case e@MetadataEvent(MetadataKey(_, Some(MetadataJobKey(n, _, _)), _), _, _) if call.name == n => e} - } + def filterEventsByCall(events: Iterable[MetadataEvent])(call: Call): Iterable[MetadataEvent] = + events collect { + case e @ MetadataEvent(MetadataKey(_, Some(MetadataJobKey(n, _, _)), _), _, _) if call.name == n => e + } val calls = List(Foo, Bar, Baz, Qux, Quux, Corge) val actuals = calls map filterEventsByCall(actual) val expecteds = calls map filterEventsByCall(expectations) - val matchesExpectations = (actuals zip expecteds) map { - case (as, es) => as.toList.map(_.toString).sorted == es.toList.map(_.toString).sorted + val matchesExpectations = (actuals zip expecteds) map { case (as, es) => + as.toList.map(_.toString).sorted == es.toList.map(_.toString).sorted } matchesExpectations.reduceLeft(_ && _) shouldBe true } @@ -854,9 +849,10 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with it should "correctly order statuses (even unused ones)" in { val workflowId = WorkflowId.randomId() - def statusEvent(callName: String, status: String) = { - MetadataEvent(MetadataKey(workflowId, Option(MetadataJobKey(callName, None, 1)), "executionStatus"), MetadataValue(status)) - } + def statusEvent(callName: String, status: String) = + MetadataEvent(MetadataKey(workflowId, Option(MetadataJobKey(callName, None, 1)), "executionStatus"), + MetadataValue(status) + ) // Combines standard "setup" statuses plus the conclusion status(es). def setupStatusesPlusConclusion(callName: String, conclusionStatuses: String*): Vector[MetadataEvent] = Vector( @@ -874,11 +870,11 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val events = setupStatusesPlusConclusion("Foo", "Done") ++ - setupStatusesPlusConclusion("Bar", "Aborting", "Aborted") ++ - setupStatusesPlusConclusion("Baz", "Failed") ++ - setupStatusesPlusConclusion("Qux", "RetryableFailure") ++ - setupStatusesPlusConclusion("Quux", "Bypassed") ++ - setupStatusesPlusConclusion("Quuux", "Unstartable") + setupStatusesPlusConclusion("Bar", "Aborting", "Aborted") ++ + setupStatusesPlusConclusion("Baz", "Failed") ++ + setupStatusesPlusConclusion("Qux", "RetryableFailure") ++ + setupStatusesPlusConclusion("Quux", "Bypassed") ++ + setupStatusesPlusConclusion("Quuux", "Unstartable") val expectedRes = s"""{ @@ -909,12 +905,13 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val action = GetMetadataAction(mdQuery) val metadataRowNumber = 100500 - val expectedException = new MetadataTooLargeNumberOfRowsException(workflowId, metadataRowNumber, defaultSafetyRowNumberThreshold) + val expectedException = + new MetadataTooLargeNumberOfRowsException(workflowId, metadataRowNumber, defaultSafetyRowNumberThreshold) assertMetadataFailureResponse( action = action, metadataServiceResponse = MetadataLookupFailedTooLargeResponse(mdQuery, metadataRowNumber), expectedException = expectedException, - metadataBuilderActorName = "mba-too-large", + metadataBuilderActorName = "mba-too-large" ) } @@ -929,7 +926,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with action = action, metadataServiceResponse = MetadataLookupFailedTimeoutResponse(mdQuery), expectedException = expectedException, - metadataBuilderActorName = "mba-read-timeout", + metadataBuilderActorName = "mba-read-timeout" ) } } @@ -990,8 +987,8 @@ object MetadataBuilderActorSpec { def executionEventName(i: Int, a: Attr): String = s"executionEvents[$i]:${a.name}" - def executionEventKey(workflowId: WorkflowId, call: Call, eventIndex: Int, attr: Attr): MetadataKey = MetadataKey(workflowId, Option(MetadataJobKey(call.name, None, 1)), executionEventName(eventIndex, attr)) - + def executionEventKey(workflowId: WorkflowId, call: Call, eventIndex: Int, attr: Attr): MetadataKey = + MetadataKey(workflowId, Option(MetadataJobKey(call.name, None, 1)), executionEventName(eventIndex, attr)) def ee(workflowId: WorkflowId, call: Call, eventIndex: Int, attr: Attr, value: Any): MetadataEvent = { val metadataValue = value match { @@ -1001,7 +998,9 @@ object MetadataBuilderActorSpec { new MetadataEvent(executionEventKey(workflowId, call, eventIndex, attr), Option(MetadataValue(metadataValue)), y2k) } - def eventKey(workflowId: WorkflowId, call: Call, attr: Attr): MetadataKey = MetadataKey(workflowId, Option(MetadataJobKey(call.name, None, 1)), attr.name) + def eventKey(workflowId: WorkflowId, call: Call, attr: Attr): MetadataKey = + MetadataKey(workflowId, Option(MetadataJobKey(call.name, None, 1)), attr.name) - def pe(workflowId: WorkflowId, call: Call, attr: Attr, value: Any): MetadataEvent = new MetadataEvent(eventKey(workflowId, call, attr), Option(MetadataValue(value)), y2k) + def pe(workflowId: WorkflowId, call: Call, attr: Attr, value: Any): MetadataEvent = + new MetadataEvent(eventKey(workflowId, call, attr), Option(MetadataValue(value)), y2k) } diff --git a/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala b/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala index f19e3c667ab..d1b86bfa216 100644 --- a/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/PartialWorkflowSourcesSpec.scala @@ -16,13 +16,13 @@ class PartialWorkflowSourcesSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val input1 = Map("wf.a1" -> "hello", "wf.a2" -> "world").toJson.toString val input2 = Map.empty[String, String].toJson.toString val overrideInput1 = Map("wf.a2" -> "universe").toJson.toString - val mergedMapsErrorOr = PartialWorkflowSources.mergeMaps(Seq(Option(input1), Option(input2), Option(overrideInput1))) + val mergedMapsErrorOr = + PartialWorkflowSources.mergeMaps(Seq(Option(input1), Option(input2), Option(overrideInput1))) mergedMapsErrorOr match { - case Valid(inputs) => { - inputs.fields.keys should contain allOf("wf.a1", "wf.a2") + case Valid(inputs) => + inputs.fields.keys should contain allOf ("wf.a1", "wf.a2") inputs.fields("wf.a2") should be(JsString("universe")) - } case Invalid(error) => fail(s"This is unexpected! This test should pass! Error: $error") } } @@ -33,7 +33,8 @@ class PartialWorkflowSourcesSpec extends AnyFlatSpec with CromwellTimeoutSpec wi mergedMapsErrorOr match { case Valid(_) => fail("This is unexpected! This test is designed to fail!") - case Invalid(error) => error.head shouldBe "Submitted input '\"invalidInput\"' of type JsString is not a valid JSON object." + case Invalid(error) => + error.head shouldBe "Submitted input '\"invalidInput\"' of type JsString is not a valid JSON object." } } @@ -44,7 +45,8 @@ class PartialWorkflowSourcesSpec extends AnyFlatSpec with CromwellTimeoutSpec wi mergedMapsErrorOr match { case Valid(_) => fail("This is unexpected! This test is designed to fail!") - case Invalid(error) => error.head shouldBe "Failed to parse input: 'invalidInput', which is not a valid json. Please check for syntactical errors. (reason 1 of 1): Unexpected character 'i' at input index 0 (line 1, position 1), expected JSON Value:\ninvalidInput\n^\n" + case Invalid(error) => + error.head shouldBe "Failed to parse input: 'invalidInput', which is not a valid json. Please check for syntactical errors. (reason 1 of 1): Unexpected character 'i' at input index 0 (line 1, position 1), expected JSON Value:\ninvalidInput\n^\n" } } @@ -76,9 +78,8 @@ class PartialWorkflowSourcesSpec extends AnyFlatSpec with CromwellTimeoutSpec wi |}] |""".stripMargin - val expected = Vector( - """{"mywf.inInt":1,"mywf.inString":"one"}""", - """{"mywf.inInt":2,"mywf.inString":"two"}""").validNel + val expected = + Vector("""{"mywf.inInt":1,"mywf.inString":"one"}""", """{"mywf.inInt":2,"mywf.inString":"two"}""").validNel val actual = PartialWorkflowSources.workflowInputsValidation(input) diff --git a/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala index 24cce8ae932..897e4beeec0 100644 --- a/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala @@ -15,8 +15,13 @@ import org.yaml.snakeyaml.{LoaderOptions, Yaml => SnakeYaml} import scala.jdk.CollectionConverters._ -class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with SwaggerService with ScalatestRouteTest with Matchers - with TableDrivenPropertyChecks { +class SwaggerServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with SwaggerService + with ScalatestRouteTest + with Matchers + with TableDrivenPropertyChecks { def actorRefFactory = system behavior of "SwaggerService" @@ -28,7 +33,8 @@ class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Swagg status should be(StatusCodes.OK) val body = responseAs[String] - val yaml = new SnakeYaml(new UniqueKeyConstructor(new LoaderOptions)).loadAs(body, classOf[java.util.Map[String, AnyRef]]) + val yaml = new SnakeYaml(new UniqueKeyConstructor(new LoaderOptions)) + .loadAs(body, classOf[java.util.Map[String, AnyRef]]) yaml.get("swagger") should be("2.0") } @@ -53,27 +59,42 @@ class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Swagg resultWithInfo.getSwagger.getDefinitions.asScala foreach { // If no properties, `getProperties` returns `null` instead of an empty map - case (defKey, defVal) => Option(defVal.getProperties).map(_.asScala).getOrElse(Map.empty) foreach { - /* + case (defKey, defVal) => + Option(defVal.getProperties).map(_.asScala).getOrElse(Map.empty) foreach { + /* Two against one. Swagger parser implementation lets a RefProperty have descriptions. http://swagger.io/specification/#referenceObject & http://editor.swagger.io both say it's ref ONLY! - */ - case (propKey, propVal: RefProperty) => - withClue(s"RefProperty $defKey.$propKey has a description: ") { - propVal.getDescription should be(null) - } - case _ => /* ignore */ - } + */ + case (propKey, propVal: RefProperty) => + withClue(s"RefProperty $defKey.$propKey has a description: ") { + propVal.getDescription should be(null) + } + case _ => /* ignore */ + } } } } it should "return status OK when getting OPTIONS on paths" in { - val pathExamples = Table("path", "/", "/swagger", "/swagger/cromwell.yaml", "/swagger/index.html", "/api", - "/api/workflows/", "/api/workflows/v1", "/workflows/v1/outputs", "/workflows/v1/status", - "/api/workflows/v1/validate", "/workflows", "/workflows/v1", "/workflows/v1/outputs", "/workflows/v1/status", - "/workflows/v1/validate") + val pathExamples = Table( + "path", + "/", + "/swagger", + "/swagger/cromwell.yaml", + "/swagger/index.html", + "/api", + "/api/workflows/", + "/api/workflows/v1", + "/workflows/v1/outputs", + "/workflows/v1/status", + "/api/workflows/v1/validate", + "/workflows", + "/workflows/v1", + "/workflows/v1/outputs", + "/workflows/v1/status", + "/workflows/v1/validate" + ) forAll(pathExamples) { path => Options(path) ~> diff --git a/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala index 6afcf12bac8..e574f93dd0a 100644 --- a/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/SwaggerUiHttpServiceSpec.scala @@ -11,18 +11,37 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.concurrent.duration._ -trait SwaggerUiHttpServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with SwaggerUiHttpService - -trait SwaggerResourceHttpServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalatestRouteTest with -TableDrivenPropertyChecks with SwaggerResourceHttpService { - val testPathsForOptions = Table("endpoint", "/", "/swagger", "/swagger/index.html", "/api", "/api/example", - "/api/example?with=param", "/api/example/path") +trait SwaggerUiHttpServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with SwaggerUiHttpService + +trait SwaggerResourceHttpServiceSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalatestRouteTest + with TableDrivenPropertyChecks + with SwaggerResourceHttpService { + val testPathsForOptions = Table("endpoint", + "/", + "/swagger", + "/swagger/index.html", + "/api", + "/api/example", + "/api/example?with=param", + "/api/example/path" + ) implicit val timeout: RouteTestTimeout = RouteTestTimeout(5.seconds) } -trait SwaggerUiResourceHttpServiceSpec extends SwaggerUiHttpServiceSpec with SwaggerResourceHttpServiceSpec with -SwaggerUiResourceHttpService +trait SwaggerUiResourceHttpServiceSpec + extends SwaggerUiHttpServiceSpec + with SwaggerResourceHttpServiceSpec + with SwaggerUiResourceHttpService object SwaggerUiHttpServiceSpec { val SwaggerIndexPreamble = @@ -95,7 +114,6 @@ class OverrideBasePathSwaggerUiHttpServiceSpec extends SwaggerResourceHttpServic } } - class YamlSwaggerResourceHttpServiceSpec extends SwaggerResourceHttpServiceSpec { override def swaggerServiceName = "testservice" @@ -194,7 +212,6 @@ class YamlSwaggerUiResourceHttpServiceSpec extends SwaggerUiResourceHttpServiceS } } - class JsonSwaggerUiResourceHttpServiceSpec extends SwaggerUiResourceHttpServiceSpec { override def swaggerServiceName = "testservice" diff --git a/engine/src/test/scala/cromwell/webservice/WorkflowJsonSupportSpec.scala b/engine/src/test/scala/cromwell/webservice/WorkflowJsonSupportSpec.scala index fc25bd3cd2f..c6d944c3285 100644 --- a/engine/src/test/scala/cromwell/webservice/WorkflowJsonSupportSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/WorkflowJsonSupportSpec.scala @@ -8,7 +8,8 @@ import spray.json._ class WorkflowJsonSupportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val sampleSuccessResponse1 = SuccessResponse("good", "msg", Option(JsArray(Vector(JsString("data1"), JsString("data2"))))) + val sampleSuccessResponse1 = + SuccessResponse("good", "msg", Option(JsArray(Vector(JsString("data1"), JsString("data2"))))) val sampleSuccessResponse2 = SuccessResponse("good", "msg", None) val sampleSuccessResponseJson1 = """{ diff --git a/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala index 4e7caf932ef..b7440bbe31c 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala @@ -9,14 +9,24 @@ import akka.stream.ActorMaterializer import com.typesafe.scalalogging.StrictLogging import common.util.VersionUtil import cromwell.core._ -import cromwell.core.abort.{WorkflowAbortFailureResponse, WorkflowAbortRequestedResponse, WorkflowAbortedResponse} +import cromwell.core.abort.{WorkflowAbortedResponse, WorkflowAbortFailureResponse, WorkflowAbortRequestedResponse} import cromwell.engine.workflow.WorkflowManagerActor import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException import cromwell.engine.workflow.workflowstore.WorkflowStoreActor._ -import cromwell.engine.workflow.workflowstore.WorkflowStoreEngineActor.{WorkflowOnHoldToSubmittedFailure, WorkflowOnHoldToSubmittedSuccess} -import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{WorkflowSubmittedToStore, WorkflowsBatchSubmittedToStore} +import cromwell.engine.workflow.workflowstore.WorkflowStoreEngineActor.{ + WorkflowOnHoldToSubmittedFailure, + WorkflowOnHoldToSubmittedSuccess +} +import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{ + WorkflowsBatchSubmittedToStore, + WorkflowSubmittedToStore +} import cromwell.services._ -import cromwell.services.healthmonitor.ProtoHealthMonitorServiceActor.{GetCurrentStatus, StatusCheckResponse, SubsystemStatus} +import cromwell.services.healthmonitor.ProtoHealthMonitorServiceActor.{ + GetCurrentStatus, + StatusCheckResponse, + SubsystemStatus +} import cromwell.services.instrumentation.InstrumentationService.InstrumentationServiceMessage import cromwell.services.metadata.MetadataArchiveStatus._ import cromwell.services.metadata.MetadataService._ @@ -71,64 +81,62 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with "REST ENGINE /status endpoint" should "return 200 for status when all is well" in { Get(s"/engine/$version/status") ~> akkaHttpService.engineRoutes ~> - check { - status should be(StatusCodes.OK) - contentType should be(ContentTypes.`application/json`) - val resp = responseAs[JsObject] - val db = resp.fields("Engine Database").asJsObject - db.fields("ok").asInstanceOf[JsBoolean].value should be(true) - } + check { + status should be(StatusCodes.OK) + contentType should be(ContentTypes.`application/json`) + val resp = responseAs[JsObject] + val db = resp.fields("Engine Database").asJsObject + db.fields("ok").asInstanceOf[JsBoolean].value should be(true) + } } + behavior of "REST API /abort endpoint" + it should "return 404 for abort of unknown workflow" in { + val workflowId = CromwellApiServiceSpec.UnrecognizedWorkflowId - behavior of "REST API /abort endpoint" - it should "return 404 for abort of unknown workflow" in { - val workflowId = CromwellApiServiceSpec.UnrecognizedWorkflowId - - Post(s"/workflows/$version/$workflowId/abort") ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.NotFound) { - status - } - assertResult { - s"""|{ - | "status": "error", - | "message": "Couldn't abort $workflowId because no workflow with that ID is in progress" - |} - |""".stripMargin.trim - } { - responseAs[String] - } - assertResult(ContentTypes.`application/json`)(contentType) + Post(s"/workflows/$version/$workflowId/abort") ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.NotFound) { + status } - } - - it should "return 400 for abort of a malformed workflow id" in { - Post(s"/workflows/$version/foobar/abort") ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.BadRequest) { - status - } - assertResult( - """{ - | "status": "fail", - | "message": "Invalid workflow ID: 'foobar'." - |}""".stripMargin - ) { - responseAs[String] - } - assertResult(ContentTypes.`application/json`)(contentType) + assertResult { + s"""|{ + | "status": "error", + | "message": "Couldn't abort $workflowId because no workflow with that ID is in progress" + |} + |""".stripMargin.trim + } { + responseAs[String] } - } + assertResult(ContentTypes.`application/json`)(contentType) + } + } - it should "return 200 Aborted for abort of a workflow which a workflow is in OnHold state" in { - Post(s"/workflows/$version/${CromwellApiServiceSpec.OnHoldWorkflowId}/abort") ~> - akkaHttpService.workflowRoutes ~> + it should "return 400 for abort of a malformed workflow id" in { + Post(s"/workflows/$version/foobar/abort") ~> + akkaHttpService.workflowRoutes ~> check { + assertResult(StatusCodes.BadRequest) { + status + } assertResult( - s"""{"id":"${CromwellApiServiceSpec.OnHoldWorkflowId.toString}","status":"Aborted"}""") { + """{ + | "status": "fail", + | "message": "Invalid workflow ID: 'foobar'." + |}""".stripMargin + ) { + responseAs[String] + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } + + it should "return 200 Aborted for abort of a workflow which a workflow is in OnHold state" in { + Post(s"/workflows/$version/${CromwellApiServiceSpec.OnHoldWorkflowId}/abort") ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(s"""{"id":"${CromwellApiServiceSpec.OnHoldWorkflowId.toString}","status":"Aborted"}""") { responseAs[String] } assertResult(StatusCodes.OK) { @@ -136,14 +144,13 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with } assertResult(ContentTypes.`application/json`)(contentType) } - } + } it should "return 200 Aborted for abort of a workflow which a workflow is in Submitted state" in { Post(s"/workflows/$version/${CromwellApiServiceSpec.SubmittedWorkflowId}/abort") ~> akkaHttpService.workflowRoutes ~> check { - assertResult( - s"""{"id":"${CromwellApiServiceSpec.SubmittedWorkflowId.toString}","status":"Aborted"}""") { + assertResult(s"""{"id":"${CromwellApiServiceSpec.SubmittedWorkflowId.toString}","status":"Aborted"}""") { responseAs[String] } assertResult(StatusCodes.OK) { @@ -153,55 +160,63 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with } } - it should "return 200 Aborting for abort of a known workflow id which is currently running" in { - Post(s"/workflows/$version/${CromwellApiServiceSpec.AbortingWorkflowId}/abort") ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult( - s"""{"id":"${CromwellApiServiceSpec.AbortingWorkflowId.toString}","status":"Aborting"}""") { - responseAs[String] - } - assertResult(StatusCodes.OK) { - status - } - assertResult(ContentTypes.`application/json`)(contentType) + it should "return 200 Aborting for abort of a known workflow id which is currently running" in { + Post(s"/workflows/$version/${CromwellApiServiceSpec.AbortingWorkflowId}/abort") ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(s"""{"id":"${CromwellApiServiceSpec.AbortingWorkflowId.toString}","status":"Aborting"}""") { + responseAs[String] } - } + assertResult(StatusCodes.OK) { + status + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } - behavior of "REST API submission endpoint" - it should "return 201 for a successful workflow submission " in { - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) - val workflowInputs = Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString())) - val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - Post(s"/workflows/$version", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult( - s"""{ - | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}""".stripMargin) { - responseAs[String].parseJson.prettyPrint - } - assertResult(StatusCodes.Created) { - status - } - headers should be(Seq.empty) + behavior of "REST API submission endpoint" + it should "return 201 for a successful workflow submission " in { + val workflowSource = + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + val workflowInputs = + Multipart.FormData.BodyPart("workflowInputs", + HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString()) + ) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() + Post(s"/workflows/$version", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(s"""{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}""".stripMargin) { + responseAs[String].parseJson.prettyPrint } - } + assertResult(StatusCodes.Created) { + status + } + headers should be(Seq.empty) + } + } it should "return 201 for a successful workflow submission using workflowUrl" in { - val workflowUrl = Multipart.FormData.BodyPart("workflowUrl", HttpEntity("https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl")) + val workflowUrl = Multipart.FormData.BodyPart( + "workflowUrl", + HttpEntity( + "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" + ) + ) val formData = Multipart.FormData(workflowUrl).toEntity() Post(s"/workflows/$version", formData) ~> akkaHttpService.workflowRoutes ~> check { - assertResult( - s"""{ - | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}""".stripMargin) { + assertResult(s"""{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}""".stripMargin) { responseAs[String].parseJson.prettyPrint } assertResult(StatusCodes.Created) { @@ -212,17 +227,21 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with } it should "return 400 for a workflow submission using workflowUrl with invalid protocol" in { - val workflowUrl = Multipart.FormData.BodyPart("workflowUrl", HttpEntity("htpps://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl")) + val workflowUrl = Multipart.FormData.BodyPart( + "workflowUrl", + HttpEntity( + "htpps://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" + ) + ) val formData = Multipart.FormData(workflowUrl).toEntity() Post(s"/workflows/$version", formData) ~> akkaHttpService.workflowRoutes ~> check { - assertResult( - s"""{ - | "message": "Error(s): Error while validating workflow url: unknown protocol: htpps", - | "status": "fail" - |}""".stripMargin) { + assertResult(s"""{ + | "message": "Error(s): Error while validating workflow url: unknown protocol: htpps", + | "status": "fail" + |}""".stripMargin) { responseAs[String].parseJson.prettyPrint } assertResult(StatusCodes.BadRequest) { @@ -233,18 +252,23 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with } it should "return 201 for a successful workflow submission with onHold = true" in { - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) - val workflowInputs = Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString())) - val onHold = Multipart.FormData.BodyPart("workflowOnHold", HttpEntity("true")) + val workflowSource = + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + val workflowInputs = + Multipart.FormData.BodyPart("workflowInputs", + HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString()) + ) + val onHold = Multipart.FormData.BodyPart("workflowOnHold", HttpEntity("true")) val formData = Multipart.FormData(workflowSource, workflowInputs, onHold).toEntity() Post(s"/workflows/$version", formData) ~> akkaHttpService.workflowRoutes ~> check { - assertResult( - s"""{ - | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", - | "status": "On Hold" - |}""".stripMargin) { + assertResult(s"""{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "On Hold" + |}""".stripMargin) { responseAs[String].parseJson.prettyPrint } assertResult(StatusCodes.Created) { @@ -259,11 +283,10 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with Post(s"/workflows/$version/$id/releaseHold") ~> akkaHttpService.workflowRoutes ~> check { - assertResult( - s"""{ - | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}""".stripMargin) { + assertResult(s"""{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}""".stripMargin) { responseAs[String].parseJson.prettyPrint } assertResult(StatusCodes.OK) { @@ -282,7 +305,8 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with s"""{ | "message": "Unrecognized workflow ID: ${CromwellApiServiceSpec.UnrecognizedWorkflowId.toString}", | "status": "fail" - |}""".stripMargin) { + |}""".stripMargin + ) { responseAs[String].parseJson.prettyPrint } assertResult(StatusCodes.NotFound) { @@ -293,8 +317,8 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with } it should "return 201 with warnings for a successful v1 workflow submission still using wdlSource" in { - val workflowSource = Multipart.FormData.BodyPart("wdlSource", - HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) + val workflowSource = + Multipart.FormData.BodyPart("wdlSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) val formData = Multipart.FormData(workflowSource).toEntity() Post(s"/workflows/v1", formData) ~> akkaHttpService.workflowRoutes ~> @@ -313,205 +337,230 @@ class CromwellApiServiceSpec extends AsyncFlatSpec with ScalatestRouteTest with warningHeader shouldNot be(empty) warningHeader.get.value should fullyMatch regex s"""299 cromwell/(\\d+-([0-9a-f]){7}(-SNAP)?|${VersionUtil.defaultMessage("cromwell-engine")}) """ + - "\"The 'wdlSource' parameter name has been deprecated in favor of 'workflowSource'. " + - "Support for 'wdlSource' will be removed from future versions of Cromwell. " + - "Please switch to using 'workflowSource' in future submissions.\"" + "\"The 'wdlSource' parameter name has been deprecated in favor of 'workflowSource'. " + + "Support for 'wdlSource' will be removed from future versions of Cromwell. " + + "Please switch to using 'workflowSource' in future submissions.\"" } } - it should "return 400 for an unrecognized form data request parameter " in { - val formData = Multipart.FormData(Map( - "incorrectParameter" -> HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()), - "incorrectParameter2" -> HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) - )).toEntity - - Post(s"/workflows/$version", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult( - s"""{ - | "status": "fail", - | "message": "Error(s): Unexpected body part name: incorrectParameter\\nUnexpected body part name: incorrectParameter2\\nworkflowSource or workflowUrl needs to be supplied" - |}""".stripMargin) { - responseAs[String] - } - assertResult(StatusCodes.BadRequest) { - status - } - assertResult(ContentTypes.`application/json`)(contentType) + it should "return 400 for an unrecognized form data request parameter " in { + val formData = Multipart + .FormData( + Map( + "incorrectParameter" -> HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()), + "incorrectParameter2" -> HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + ) + .toEntity + + Post(s"/workflows/$version", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult( + s"""{ + | "status": "fail", + | "message": "Error(s): Unexpected body part name: incorrectParameter\\nUnexpected body part name: incorrectParameter2\\nworkflowSource or workflowUrl needs to be supplied" + |}""".stripMargin + ) { + responseAs[String] } - } + assertResult(StatusCodes.BadRequest) { + status + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } - it should "return 400 for a workflow submission with unsupported workflow option keys" in { - val options = """ - |{ - | "defaultRuntimeOptions": { - | "cpu":1 - | } - |} - |""".stripMargin - - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) - val workflowInputs = Multipart.FormData.BodyPart("workflowOptions", HttpEntity(MediaTypes.`application/json`, options)) - val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - - Post(s"/workflows/$version", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.BadRequest) { - status - } - assertResult { - """|{ - | "status": "fail", - | "message": "Error(s): Invalid workflow options provided: Unsupported key/value pair in WorkflowOptions: defaultRuntimeOptions -> {\"cpu\":1}" - |} - |""".stripMargin.trim - } { - responseAs[String] - } - assertResult(ContentTypes.`application/json`)(contentType) + it should "return 400 for a workflow submission with unsupported workflow option keys" in { + val options = """ + |{ + | "defaultRuntimeOptions": { + | "cpu":1 + | } + |} + |""".stripMargin + + val workflowSource = + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + val workflowInputs = + Multipart.FormData.BodyPart("workflowOptions", HttpEntity(MediaTypes.`application/json`, options)) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() + + Post(s"/workflows/$version", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.BadRequest) { + status } - } + assertResult { + """|{ + | "status": "fail", + | "message": "Error(s): Invalid workflow options provided: Unsupported key/value pair in WorkflowOptions: defaultRuntimeOptions -> {\"cpu\":1}" + |} + |""".stripMargin.trim + } { + responseAs[String] + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } + + it should "return 400 for a workflow submission with malformed workflow options json" in { + val options = s""" + |{"read_from_cache": "true" + |""".stripMargin + + val workflowSource = + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + val workflowInputs = + Multipart.FormData.BodyPart("workflowOptions", HttpEntity(MediaTypes.`application/json`, options)) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - it should "return 400 for a workflow submission with malformed workflow options json" in { - val options = s""" - |{"read_from_cache": "true" - |""".stripMargin - - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) - val workflowInputs = Multipart.FormData.BodyPart("workflowOptions", HttpEntity(MediaTypes.`application/json`, options)) - val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - - Post(s"/workflows/$version", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.BadRequest) { - status - } - assertResult { - """|{ - | "status": "fail", - | "message": "Error(s): Invalid workflow options provided: Unexpected end-of-input at input index 28 (line 3, position 1), expected '}':\n\n^\n" - |} - |""".stripMargin.trim - } { - responseAs[String] - } - assertResult(ContentTypes.`application/json`)(contentType) + Post(s"/workflows/$version", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.BadRequest) { + status } - } + assertResult { + """|{ + | "status": "fail", + | "message": "Error(s): Invalid workflow options provided: Unexpected end-of-input at input index 28 (line 3, position 1), expected '}':\n\n^\n" + |} + |""".stripMargin.trim + } { + responseAs[String] + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } - it should "return 400 for a workflow submission with invalid workflow custom labels" in { - val labels = s""" - |{"key with more than 255 characters-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpas":"value with more than 255 characters-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa"} - |""".stripMargin + it should "return 400 for a workflow submission with invalid workflow custom labels" in { + val labels = + s""" + |{"key with more than 255 characters-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpas":"value with more than 255 characters-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa"} + |""".stripMargin - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) - val customLabels = Multipart.FormData.BodyPart("labels", HttpEntity(MediaTypes.`application/json`, labels)) - val onHold = Multipart.FormData.BodyPart("workflowOnHold", HttpEntity("true")) - val formData = Multipart.FormData(workflowSource, customLabels, onHold).toEntity() + val workflowSource = + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + val customLabels = Multipart.FormData.BodyPart("labels", HttpEntity(MediaTypes.`application/json`, labels)) + val onHold = Multipart.FormData.BodyPart("workflowOnHold", HttpEntity("true")) + val formData = Multipart.FormData(workflowSource, customLabels, onHold).toEntity() - Post(s"/workflows/$version", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.BadRequest) { - status - } - assertResult(ContentTypes.`application/json`)(contentType) + Post(s"/workflows/$version", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.BadRequest) { + status } - } + assertResult(ContentTypes.`application/json`)(contentType) + } + } + + behavior of "REST API batch submission endpoint" + it should "return 200 for a successful workflow submission " in { + val inputs = HelloWorld.rawInputs.toJson + val workflowSource = + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + val workflowInputs = + Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, s"[$inputs, $inputs]")) + val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - behavior of "REST API batch submission endpoint" - it should "return 200 for a successful workflow submission " in { - val inputs = HelloWorld.rawInputs.toJson - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource())) - val workflowInputs = Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, s"[$inputs, $inputs]")) - val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() - - Post(s"/workflows/$version/batch", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult( - s"""[{ - | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}, { - | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", - | "status": "Submitted" - |}]""".stripMargin) { - responseAs[String].parseJson.prettyPrint - } - assertResult(StatusCodes.Created) { - status - } - assertResult(ContentTypes.`application/json`)(contentType) + Post(s"/workflows/$version/batch", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(s"""[{ + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}, { + | "id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}", + | "status": "Submitted" + |}]""".stripMargin) { + responseAs[String].parseJson.prettyPrint } - } + assertResult(StatusCodes.Created) { + status + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } + + it should "return 400 for an submission with no inputs" in { + val formData = Multipart + .FormData( + Multipart.FormData.BodyPart("workflowSource", + HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()) + ) + ) + .toEntity() - it should "return 400 for an submission with no inputs" in { - val formData = Multipart.FormData(Multipart.FormData.BodyPart("workflowSource", HttpEntity(MediaTypes.`application/json`, HelloWorld.workflowSource()))).toEntity() - - Post(s"/workflows/$version/batch", formData) ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult( - s"""{ - | "status": "fail", - | "message": "Error(s): No inputs were provided" - |}""".stripMargin) { - responseAs[String] - } - assertResult(StatusCodes.BadRequest) { - status - } - assertResult(ContentTypes.`application/json`)(contentType) + Post(s"/workflows/$version/batch", formData) ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(s"""{ + | "status": "fail", + | "message": "Error(s): No inputs were provided" + |}""".stripMargin) { + responseAs[String] } - } + assertResult(StatusCodes.BadRequest) { + status + } + assertResult(ContentTypes.`application/json`)(contentType) + } + } - behavior of "REST API /timing endpoint" - it should "return 200 with an HTML document for the timings route" in { - Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/timing") ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.OK) { status } - assertResult(`text/html(UTF-8)`) { contentType } - assertResult("") { - responseAs[String].substring(0, 6) - } + behavior of "REST API /timing endpoint" + it should "return 200 with an HTML document for the timings route" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/timing") ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.OK)(status) + assertResult(`text/html(UTF-8)`)(contentType) + assertResult("") { + responseAs[String].substring(0, 6) } - } + } + } - it should "return 404 when unrecognized workflow id is submitted" in { - Get(s"/workflows/$version/${CromwellApiServiceSpec.UnrecognizedWorkflowId}/timing") ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.NotFound) { status } - assertResult( - s"""{ - | "message": "Unrecognized workflow ID: ${CromwellApiServiceSpec.UnrecognizedWorkflowId.toString}", - | "status": "fail" - |}""".stripMargin) { - responseAs[String].parseJson.prettyPrint - } + it should "return 404 when unrecognized workflow id is submitted" in { + Get(s"/workflows/$version/${CromwellApiServiceSpec.UnrecognizedWorkflowId}/timing") ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.NotFound)(status) + assertResult( + s"""{ + | "message": "Unrecognized workflow ID: ${CromwellApiServiceSpec.UnrecognizedWorkflowId.toString}", + | "status": "fail" + |}""".stripMargin + ) { + responseAs[String].parseJson.prettyPrint } - } + } + } - it should "return 400 when invalid workflow id is submitted" in { - Get(s"/workflows/$version/foo/timing") ~> - akkaHttpService.workflowRoutes ~> - check { - assertResult(StatusCodes.BadRequest) { status } - assertResult( - s"""{ - | "message": "Invalid workflow ID: 'foo'.", - | "status": "fail" - |}""".stripMargin) { - responseAs[String].parseJson.prettyPrint - } - assertResult(ContentTypes.`application/json`)(contentType) + it should "return 400 when invalid workflow id is submitted" in { + Get(s"/workflows/$version/foo/timing") ~> + akkaHttpService.workflowRoutes ~> + check { + assertResult(StatusCodes.BadRequest)(status) + assertResult(s"""{ + | "message": "Invalid workflow ID: 'foo'.", + | "status": "fail" + |}""".stripMargin) { + responseAs[String].parseJson.prettyPrint } - } + assertResult(ContentTypes.`application/json`)(contentType) + } + } } object CromwellApiServiceSpec { @@ -561,26 +610,25 @@ object CromwellApiServiceSpec { object MockServiceRegistryActor { - private def fullMetadataResponse(workflowId: WorkflowId) = { + private def fullMetadataResponse(workflowId: WorkflowId) = List( MetadataEvent(MetadataKey(workflowId, None, "testKey1a"), MetadataValue("myValue1a", MetadataString)), MetadataEvent(MetadataKey(workflowId, None, "testKey1b"), MetadataValue("myValue1b", MetadataString)), - MetadataEvent(MetadataKey(workflowId, None, "testKey2a"), MetadataValue("myValue2a", MetadataString)), + MetadataEvent(MetadataKey(workflowId, None, "testKey2a"), MetadataValue("myValue2a", MetadataString)) ) - } - private def wesFullMetadataResponse(workflowId: WorkflowId) = { + private def wesFullMetadataResponse(workflowId: WorkflowId) = List( MetadataEvent(MetadataKey(workflowId, None, "status"), MetadataValue("Running", MetadataString)), - MetadataEvent(MetadataKey(workflowId, None, "submittedFiles:workflow"), MetadataValue("myValue2a", MetadataString)), - + MetadataEvent(MetadataKey(workflowId, None, "submittedFiles:workflow"), + MetadataValue("myValue2a", MetadataString) + ) ) - } - def responseMetadataValues(workflowId: WorkflowId, withKeys: List[String], withoutKeys: List[String]): JsObject = { def keyFilter(keys: List[String])(m: MetadataEvent) = keys.exists(k => m.key.key.startsWith(k)) - val metadataEvents = if (workflowId == wesWorkflowId) wesFullMetadataResponse(workflowId) else fullMetadataResponse(workflowId) + val metadataEvents = + if (workflowId == wesWorkflowId) wesFullMetadataResponse(workflowId) else fullMetadataResponse(workflowId) val events = metadataEvents .filter(m => withKeys.isEmpty || keyFilter(withKeys)(m)) .filter(m => withoutKeys.isEmpty || !keyFilter(withoutKeys)(m)) @@ -592,9 +640,16 @@ object CromwellApiServiceSpec { MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) def logsEvents(id: WorkflowId) = { - val stdout = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), CallMetadataKeys.Stdout), MetadataValue("stdout.txt", MetadataString)) - val stderr = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), CallMetadataKeys.Stderr), MetadataValue("stderr.txt", MetadataString)) - val backend = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), s"${CallMetadataKeys.BackendLogsPrefix}:log"), MetadataValue("backend.log", MetadataString)) + val stdout = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), CallMetadataKeys.Stdout), + MetadataValue("stdout.txt", MetadataString) + ) + val stderr = MetadataEvent(MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), CallMetadataKeys.Stderr), + MetadataValue("stderr.txt", MetadataString) + ) + val backend = MetadataEvent( + MetadataKey(id, Some(MetadataJobKey("mycall", None, 1)), s"${CallMetadataKeys.BackendLogsPrefix}:log"), + MetadataValue("backend.log", MetadataString) + ) Vector(stdout, stderr, backend) } } @@ -604,13 +659,30 @@ object CromwellApiServiceSpec { override def receive = { case QueryForWorkflowsMatchingParameters(parameters) => - val labels: Option[Map[String, String]] = { - parameters.contains(("additionalQueryResultFields", "labels")).option( - Map("key1" -> "label1", "key2" -> "label2")) - } - - val response = WorkflowQuerySuccess(WorkflowQueryResponse(List(WorkflowQueryResult(ExistingWorkflowId.toString, - None, Some(WorkflowSucceeded.toString), None, None, None, labels, Option("pid"), Option("rid"), Unarchived)), 1), None) + val labels: Option[Map[String, String]] = + parameters + .contains(("additionalQueryResultFields", "labels")) + .option(Map("key1" -> "label1", "key2" -> "label2")) + + val response = WorkflowQuerySuccess( + WorkflowQueryResponse( + List( + WorkflowQueryResult(ExistingWorkflowId.toString, + None, + Some(WorkflowSucceeded.toString), + None, + None, + None, + labels, + Option("pid"), + Option("rid"), + Unarchived + ) + ), + 1 + ), + None + ) sender() ! response case ValidateWorkflowIdInMetadata(id) => if (RecognizedWorkflowIds.contains(id)) sender() ! MetadataService.RecognizedWorkflowId @@ -620,15 +692,16 @@ object CromwellApiServiceSpec { else sender() ! MetadataService.UnrecognizedWorkflowId case FetchWorkflowMetadataArchiveStatusAndEndTime(id) => id match { - case ArchivedAndDeletedWorkflowId => sender() ! WorkflowMetadataArchivedStatusAndEndTime(ArchivedAndDeleted, Option(OffsetDateTime.now)) - case ArchivedWorkflowId => sender() ! WorkflowMetadataArchivedStatusAndEndTime(Archived, Option(OffsetDateTime.now)) + case ArchivedAndDeletedWorkflowId => + sender() ! WorkflowMetadataArchivedStatusAndEndTime(ArchivedAndDeleted, Option(OffsetDateTime.now)) + case ArchivedWorkflowId => + sender() ! WorkflowMetadataArchivedStatusAndEndTime(Archived, Option(OffsetDateTime.now)) case _ => sender() ! WorkflowMetadataArchivedStatusAndEndTime(Unarchived, Option(OffsetDateTime.now)) } case GetCurrentStatus => - sender() ! StatusCheckResponse( - ok = true, - systems = Map( - "Engine Database" -> SubsystemStatus(ok = true, messages = None))) + sender() ! StatusCheckResponse(ok = true, + systems = Map("Engine Database" -> SubsystemStatus(ok = true, messages = None)) + ) case request @ GetStatus(id) => val status = id match { case OnHoldWorkflowId => WorkflowOnHold @@ -641,12 +714,22 @@ object CromwellApiServiceSpec { } sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processStatusResponse(id, status)) case request @ GetLabels(id) => - sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processLabelsResponse(id, Map("key1" -> "label1", "key2" -> "label2"))) + sender() ! SuccessfulMetadataJsonResponse( + request, + MetadataBuilderActor.processLabelsResponse(id, Map("key1" -> "label1", "key2" -> "label2")) + ) case request @ WorkflowOutputs(id) => - val event = Vector(MetadataEvent(MetadataKey(id, None, "outputs:test.hello.salutation"), MetadataValue("Hello foo!", MetadataString))) + val event = Vector( + MetadataEvent(MetadataKey(id, None, "outputs:test.hello.salutation"), + MetadataValue("Hello foo!", MetadataString) + ) + ) sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processOutputsResponse(id, event)) case request @ GetLogs(id) => - sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.workflowMetadataResponse(id, logsEvents(id), includeCallsIfEmpty = false, Map.empty)) + sender() ! SuccessfulMetadataJsonResponse( + request, + MetadataBuilderActor.workflowMetadataResponse(id, logsEvents(id), includeCallsIfEmpty = false, Map.empty) + ) case request @ FetchFailedJobsMetadataWithWorkflowId(id) => sender() ! SuccessfulMetadataJsonResponse(request, responseMetadataValues(id, List.empty, List.empty)) case request @ GetMetadataAction(MetadataQuery(id, _, _, withKeys, withoutKeys, _), _) => @@ -657,8 +740,12 @@ object CromwellApiServiceSpec { events.head.key.workflowId match { case CromwellApiServiceSpec.ExistingWorkflowId => sender() ! MetadataWriteSuccess(events) case CromwellApiServiceSpec.SummarizedWorkflowId => sender() ! MetadataWriteSuccess(events) - case CromwellApiServiceSpec.AbortedWorkflowId => sender() ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) - case WorkflowId(_) => throw new Exception("Something untoward happened, this situation is not believed to be possible at this time") + case CromwellApiServiceSpec.AbortedWorkflowId => + sender() ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) + case WorkflowId(_) => + throw new Exception( + "Something untoward happened, this situation is not believed to be possible at this time" + ) case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } case DescribeRequest(sourceFiles) => @@ -677,7 +764,9 @@ object CromwellApiServiceSpec { s"[reading back DescribeRequest contents] version: ${sourceFiles.workflowTypeVersion}" ) - sender() ! DescribeSuccess(description = WorkflowDescription(valid = true, errors = readBack, validWorkflow = true)) + sender() ! DescribeSuccess(description = + WorkflowDescription(valid = true, errors = readBack, validWorkflow = true) + ) } case _: InstrumentationServiceMessage => // Do nothing. case m => logger.error("Unexpected message received by MockServiceRegistryActor: {}", m) @@ -698,7 +787,11 @@ object CromwellApiServiceSpec { val message = id match { case AbortingWorkflowId => WorkflowAbortRequestedResponse(id) case OnHoldWorkflowId | SubmittedWorkflowId => WorkflowAbortedResponse(id) - case UnrecognizedWorkflowId => WorkflowAbortFailureResponse(id, new WorkflowNotFoundException(s"Couldn't abort $id because no workflow with that ID is in progress")) + case UnrecognizedWorkflowId => + WorkflowAbortFailureResponse( + id, + new WorkflowNotFoundException(s"Couldn't abort $id because no workflow with that ID is in progress") + ) case WorkflowId(_) => throw new Exception("Something untoward happened") case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } diff --git a/engine/src/test/scala/cromwell/webservice/routes/MetadataRouteSupportSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/MetadataRouteSupportSpec.scala index 73cc257e828..64d5ae24376 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/MetadataRouteSupportSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/MetadataRouteSupportSpec.scala @@ -2,7 +2,7 @@ package cromwell.webservice.routes import akka.actor.{ActorSystem, Props} import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ -import akka.http.scaladsl.model.headers.{HttpEncodings, `Accept-Encoding`} +import akka.http.scaladsl.model.headers.{`Accept-Encoding`, HttpEncodings} import akka.http.scaladsl.model.{ContentTypes, HttpEntity, StatusCodes} import akka.http.scaladsl.server.Route import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest} @@ -83,7 +83,7 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit akkaHttpService.metadataRoutes ~> check { status should be(StatusCodes.OK) - responseAs[JsObject].fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.Outputs) + responseAs[JsObject].fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.Outputs) contentType should be(ContentTypes.`application/json`) } } @@ -116,19 +116,28 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit akkaHttpService.metadataRoutes ~> check { status should be(StatusCodes.OK) - responseAs[JsObject].fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.Outputs) + responseAs[JsObject].fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.Outputs) contentType should be(ContentTypes.`application/json`) } } - def validateArchivedMetadataResponseMessage(responseJson: JsObject, includeAvailabilityMessage: Boolean, includeLabelsMessage: Boolean) = { + def validateArchivedMetadataResponseMessage(responseJson: JsObject, + includeAvailabilityMessage: Boolean, + includeLabelsMessage: Boolean + ) = { val responseMessage = responseJson.fields("message").asInstanceOf[JsString].value val expectedSuffix = - (if (includeAvailabilityMessage) " It is available in the archive bucket, or via a support request in the case of a managed instance." else "") + - (if (includeLabelsMessage) " As a result, new labels can't be added or existing labels can't be updated for this workflow." else "") - - responseMessage should startWith("Cromwell has archived this workflow's metadata " + - "according to the lifecycle policy. The workflow completed at ") + (if (includeAvailabilityMessage) + " It is available in the archive bucket, or via a support request in the case of a managed instance." + else "") + + (if (includeLabelsMessage) + " As a result, new labels can't be added or existing labels can't be updated for this workflow." + else "") + + responseMessage should startWith( + "Cromwell has archived this workflow's metadata " + + "according to the lifecycle policy. The workflow completed at " + ) // The missing middle of the message looks like "The workflow completed at x timestamp, which was y milliseconds ago." responseMessage should endWith(s"ago.${expectedSuffix}") } @@ -139,10 +148,13 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) - val responseJson = responseAs[JsObject] - responseJson.fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) + val responseJson = responseAs[JsObject] + responseJson.fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) responseJson.fields("metadataArchiveStatus").asInstanceOf[JsString].value shouldBe "ArchivedAndDeleted" - validateArchivedMetadataResponseMessage(responseJson, includeAvailabilityMessage = true, includeLabelsMessage = false) + validateArchivedMetadataResponseMessage(responseJson, + includeAvailabilityMessage = true, + includeLabelsMessage = false + ) contentType should be(ContentTypes.`application/json`) } } @@ -154,11 +166,12 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) - val call = responseAs[JsObject].fields("calls").convertTo[JsObject].fields("mycall").convertTo[Seq[JsObject]].head + val call = + responseAs[JsObject].fields("calls").convertTo[JsObject].fields("mycall").convertTo[Seq[JsObject]].head call.fields("stdout") should be(JsString("stdout.txt")) call.fields("stderr") should be(JsString("stderr.txt")) call.fields("stdout") should be(JsString("stdout.txt")) - call.fields("backendLogs").convertTo[JsObject].fields("log") should be (JsString("backend.log")) + call.fields("backendLogs").convertTo[JsObject].fields("log") should be(JsString("backend.log")) } } @@ -178,11 +191,12 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) - val call = responseAs[JsObject].fields("calls").convertTo[JsObject].fields("mycall").convertTo[Seq[JsObject]].head + val call = + responseAs[JsObject].fields("calls").convertTo[JsObject].fields("mycall").convertTo[Seq[JsObject]].head call.fields("stdout") should be(JsString("stdout.txt")) call.fields("stderr") should be(JsString("stderr.txt")) call.fields("stdout") should be(JsString("stdout.txt")) - call.fields("backendLogs").convertTo[JsObject].fields("log") should be (JsString("backend.log")) + call.fields("backendLogs").convertTo[JsObject].fields("log") should be(JsString("backend.log")) } } @@ -192,11 +206,14 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) - val responseJson = responseAs[JsObject] - responseJson.fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) + val responseJson = responseAs[JsObject] + responseJson.fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) responseJson.fields("metadataArchiveStatus").asInstanceOf[JsString].value shouldBe "ArchivedAndDeleted" - validateArchivedMetadataResponseMessage(responseJson, includeAvailabilityMessage = true, includeLabelsMessage = false) - } + validateArchivedMetadataResponseMessage(responseJson, + includeAvailabilityMessage = true, + includeLabelsMessage = false + ) + } } behavior of "REST API /metadata endpoint" @@ -206,7 +223,7 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") + result.fields.keys should contain allOf ("testKey1a", "testKey1b", "testKey2a") result.fields.keys shouldNot contain("testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) result.fields("testKey1b") should be(JsString("myValue1b")) @@ -220,7 +237,7 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") + result.fields.keys should contain allOf ("testKey1a", "testKey1b", "testKey2a") result.fields.keys shouldNot contain("testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) result.fields("testKey1b") should be(JsString("myValue1b")) @@ -229,7 +246,8 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return with gzip encoding when requested" in { - Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata").addHeader(`Accept-Encoding`(HttpEncodings.gzip)) ~> + Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata") + .addHeader(`Accept-Encoding`(HttpEncodings.gzip)) ~> akkaHttpService.metadataRoutes ~> check { response.headers.find(_.name == "Content-Encoding").get.value should be("gzip") @@ -245,13 +263,15 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return with included metadata from the metadata route" in { - Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?includeKey=testKey1&includeKey=testKey2a") ~> + Get( + s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?includeKey=testKey1&includeKey=testKey2a" + ) ~> akkaHttpService.metadataRoutes ~> check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") - result.fields.keys should contain noneOf("testKey2b", "testKey3") + result.fields.keys should contain allOf ("testKey1a", "testKey1b", "testKey2a") + result.fields.keys should contain noneOf ("testKey2b", "testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) result.fields("testKey1b") should be(JsString("myValue1b")) result.fields("testKey2a") should be(JsString("myValue2a")) @@ -259,28 +279,32 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return with excluded metadata from the metadata route" in { - Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?excludeKey=testKey2&excludeKey=testKey3") ~> + Get( + s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?excludeKey=testKey2&excludeKey=testKey3" + ) ~> akkaHttpService.metadataRoutes ~> check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b") - result.fields.keys should contain noneOf("testKey2a", "testKey3") + result.fields.keys should contain allOf ("testKey1a", "testKey1b") + result.fields.keys should contain noneOf ("testKey2a", "testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) result.fields("testKey1b") should be(JsString("myValue1b")) } } it should "correctly include and exclude metadata keys in workflow details requests" in { - Get(s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?includeKey=testKey1&excludeKey=testKey1a") ~> + Get( + s"/workflows/$version/${CromwellApiServiceSpec.ExistingWorkflowId}/metadata?includeKey=testKey1&excludeKey=testKey1a" + ) ~> akkaHttpService.metadataRoutes ~> check { val r = responseAs[String] withClue(s"From response $r") { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allElementsOf(List("testKey1b")) - result.fields.keys should contain noneOf("testKey1a", "testKey2") + result.fields.keys should contain allElementsOf (List("testKey1b")) + result.fields.keys should contain noneOf ("testKey1a", "testKey2") result.fields("testKey1b") should be(JsString("myValue1b")) } } @@ -292,7 +316,7 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") + result.fields.keys should contain allOf ("testKey1a", "testKey1b", "testKey2a") result.fields.keys shouldNot contain("testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) result.fields("testKey1b") should be(JsString("myValue1b")) @@ -306,10 +330,13 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) - val responseJson = responseAs[JsObject] - responseJson.fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) + val responseJson = responseAs[JsObject] + responseJson.fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) responseJson.fields("metadataArchiveStatus").asInstanceOf[JsString].value shouldBe "ArchivedAndDeleted" - validateArchivedMetadataResponseMessage(responseJson, includeAvailabilityMessage = true, includeLabelsMessage = false) + validateArchivedMetadataResponseMessage(responseJson, + includeAvailabilityMessage = true, + includeLabelsMessage = false + ) } } @@ -319,7 +346,7 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("testKey1a", "testKey1b", "testKey2a") + result.fields.keys should contain allOf ("testKey1a", "testKey1b", "testKey2a") result.fields.keys shouldNot contain("testKey3") result.fields("testKey1a") should be(JsString("myValue1a")) result.fields("testKey1b") should be(JsString("myValue1b")) @@ -341,7 +368,9 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return labels if specified in additionalQueryResultFields param" in { - Get(s"/workflows/$version/query?additionalQueryResultFields=labels&id=${CromwellApiServiceSpec.ExistingWorkflowId}") ~> + Get( + s"/workflows/$version/query?additionalQueryResultFields=labels&id=${CromwellApiServiceSpec.ExistingWorkflowId}" + ) ~> akkaHttpService.metadataRoutes ~> check { status should be(StatusCodes.OK) @@ -355,7 +384,9 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return parentWorkflowId if specified in additionalQueryResultFields param" in { - Get(s"/workflows/$version/query?additionalQueryResultFields=parentWorkflowId&id=${CromwellApiServiceSpec.ExistingWorkflowId}") ~> + Get( + s"/workflows/$version/query?additionalQueryResultFields=parentWorkflowId&id=${CromwellApiServiceSpec.ExistingWorkflowId}" + ) ~> akkaHttpService.metadataRoutes ~> check { status should be(StatusCodes.OK) @@ -382,7 +413,9 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return labels if specified in additionalQueryResultFields param" in { - Post(s"/workflows/$version/query", HttpEntity(ContentTypes.`application/json`, """[{"additionalQueryResultFields":"labels"}]""")) ~> + Post(s"/workflows/$version/query", + HttpEntity(ContentTypes.`application/json`, """[{"additionalQueryResultFields":"labels"}]""") + ) ~> akkaHttpService.metadataRoutes ~> check { assertResult(StatusCodes.OK) { @@ -396,7 +429,9 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } it should "return parentWorkflowId if specified in additionalQueryResultFields param" in { - Post(s"/workflows/$version/query", HttpEntity(ContentTypes.`application/json`, """[{"additionalQueryResultFields":"parentWorkflowId"}]""")) ~> + Post(s"/workflows/$version/query", + HttpEntity(ContentTypes.`application/json`, """[{"additionalQueryResultFields":"parentWorkflowId"}]""") + ) ~> akkaHttpService.metadataRoutes ~> check { assertResult(StatusCodes.OK) { @@ -480,9 +515,12 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status shouldBe StatusCodes.BadRequest val actualResult = responseAs[JsObject] - actualResult.fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) + actualResult.fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) actualResult.fields("metadataArchiveStatus").asInstanceOf[JsString].value shouldBe "Archived" - validateArchivedMetadataResponseMessage(actualResult, includeAvailabilityMessage = false, includeLabelsMessage = true) + validateArchivedMetadataResponseMessage(actualResult, + includeAvailabilityMessage = false, + includeLabelsMessage = true + ) } } @@ -502,9 +540,12 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit check { status shouldBe StatusCodes.BadRequest val actualResult = responseAs[JsObject] - actualResult.fields.keys should contain allOf(WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) + actualResult.fields.keys should contain allOf (WorkflowMetadataKeys.Id, WorkflowMetadataKeys.MetadataArchiveStatus, WorkflowMetadataKeys.Message) actualResult.fields("metadataArchiveStatus").asInstanceOf[JsString].value shouldBe "ArchivedAndDeleted" - validateArchivedMetadataResponseMessage(actualResult, includeAvailabilityMessage = true, includeLabelsMessage = true) + validateArchivedMetadataResponseMessage(actualResult, + includeAvailabilityMessage = true, + includeLabelsMessage = true + ) } } @@ -518,7 +559,9 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit """.stripMargin val unsummarizedId = CromwellApiServiceSpec.ExistingWorkflowId - Patch(s"/workflows/$version/$unsummarizedId/labels", HttpEntity(ContentTypes.`application/json`, validLabelsJson)) ~> + Patch(s"/workflows/$version/$unsummarizedId/labels", + HttpEntity(ContentTypes.`application/json`, validLabelsJson) + ) ~> akkaHttpService.metadataRoutes ~> check { status shouldBe StatusCodes.NotFound @@ -528,7 +571,8 @@ class MetadataRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest wit } object MetadataRouteSupportSpec { - class MockMetadataRouteSupport()(implicit val system: ActorSystem, routeTestTimeout: RouteTestTimeout) extends MetadataRouteSupport { + class MockMetadataRouteSupport()(implicit val system: ActorSystem, routeTestTimeout: RouteTestTimeout) + extends MetadataRouteSupport { override def actorRefFactory = system override val ec = system.dispatcher override val timeout = routeTestTimeout.duration diff --git a/engine/src/test/scala/cromwell/webservice/routes/WomtoolRouteSupportSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/WomtoolRouteSupportSpec.scala index 24f8ce18ee7..06c48a42b77 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/WomtoolRouteSupportSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/WomtoolRouteSupportSpec.scala @@ -16,7 +16,6 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ - // N.B. this suite only tests the routing and creation of the WorkflowSourceFilesCollection, it uses the MockServiceRegistryActor // to return fake results instead of going to a real WomtoolServiceActor class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Matchers { @@ -29,12 +28,24 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with behavior of "/describe endpoint" object BodyParts { - val workflowSource = Multipart.FormData.BodyPart("workflowSource", HttpEntity(ContentTypes.`text/plain(UTF-8)`, "This is not a WDL, but that's OK for this test of request routing.")) - val workflowUrl = Multipart.FormData.BodyPart("workflowUrl", HttpEntity(MediaTypes.`application/json`, - "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl")) - val workflowInputs = Multipart.FormData.BodyPart("workflowInputs", HttpEntity(MediaTypes.`application/json`, "{\"a\":\"is for apple\"}")) + val workflowSource = Multipart.FormData.BodyPart( + "workflowSource", + HttpEntity(ContentTypes.`text/plain(UTF-8)`, "This is not a WDL, but that's OK for this test of request routing.") + ) + val workflowUrl = Multipart.FormData.BodyPart( + "workflowUrl", + HttpEntity( + MediaTypes.`application/json`, + "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" + ) + ) + val workflowInputs = Multipart.FormData.BodyPart( + "workflowInputs", + HttpEntity(MediaTypes.`application/json`, "{\"a\":\"is for apple\"}") + ) val workflowType = Multipart.FormData.BodyPart("workflowType", HttpEntity(ContentTypes.`text/plain(UTF-8)`, "WDL")) - val workflowVersion = Multipart.FormData.BodyPart("workflowTypeVersion", HttpEntity(ContentTypes.`text/plain(UTF-8)`, "1.0")) + val workflowVersion = + Multipart.FormData.BodyPart("workflowTypeVersion", HttpEntity(ContentTypes.`text/plain(UTF-8)`, "1.0")) val workflowSourceTriggerDescribeFailure = Multipart.FormData.BodyPart("workflowSource", HttpEntity(ContentTypes.`text/plain(UTF-8)`, "fail to describe")) @@ -43,16 +54,17 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with } it should "return Bad Request if the actor returns DescribeFailure" in { - Post(s"/womtool/$version/describe", Multipart.FormData(BodyParts.workflowSourceTriggerDescribeFailure).toEntity()) ~> + Post(s"/womtool/$version/describe", + Multipart.FormData(BodyParts.workflowSourceTriggerDescribeFailure).toEntity() + ) ~> akkaHttpService.womtoolRoutes ~> check { status should be(StatusCodes.BadRequest) - assertResult( - s"""{ - | "status": "fail", - | "message": "as requested, failing to describe" - |}""".stripMargin) { + assertResult(s"""{ + | "status": "fail", + | "message": "as requested, failing to describe" + |}""".stripMargin) { responseAs[String] } } @@ -75,7 +87,8 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with status should be(StatusCodes.OK) assertResult { - WorkflowDescription(valid = true, + WorkflowDescription( + valid = true, errors = List( "this is fake data from the mock SR actor", "[reading back DescribeRequest contents] workflow hashcode: Some(580529622)", @@ -86,7 +99,7 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with ), validWorkflow = true ) - } { responseAs[WorkflowDescription] } + }(responseAs[WorkflowDescription]) } } @@ -97,7 +110,8 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with status should be(StatusCodes.OK) assertResult { - WorkflowDescription(valid = true, + WorkflowDescription( + valid = true, errors = List( "this is fake data from the mock SR actor", "[reading back DescribeRequest contents] workflow hashcode: None", @@ -108,18 +122,24 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with ), validWorkflow = true ) - } { responseAs[WorkflowDescription] } + }(responseAs[WorkflowDescription]) } } it should "include inputs, workflow type, and workflow version in the WorkflowSourceFilesCollection" in { - Post(s"/womtool/$version/describe", Multipart.FormData(BodyParts.workflowSource, BodyParts.workflowInputs, BodyParts.workflowType, BodyParts.workflowVersion).toEntity()) ~> + Post( + s"/womtool/$version/describe", + Multipart + .FormData(BodyParts.workflowSource, BodyParts.workflowInputs, BodyParts.workflowType, BodyParts.workflowVersion) + .toEntity() + ) ~> akkaHttpService.womtoolRoutes ~> check { status should be(StatusCodes.OK) assertResult { - WorkflowDescription(valid = true, + WorkflowDescription( + valid = true, errors = List( "this is fake data from the mock SR actor", "[reading back DescribeRequest contents] workflow hashcode: Some(580529622)", @@ -130,17 +150,18 @@ class WomtoolRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with ), validWorkflow = true ) - } { responseAs[WorkflowDescription] } + }(responseAs[WorkflowDescription]) } } } object WomtoolRouteSupportSpec { - class MockWomtoolRouteSupport()(implicit val system: ActorSystem, routeTestTimeout: RouteTestTimeout) extends WomtoolRouteSupport { + class MockWomtoolRouteSupport()(implicit val system: ActorSystem, routeTestTimeout: RouteTestTimeout) + extends WomtoolRouteSupport { override def actorRefFactory = system override val ec = system.dispatcher override val timeout = routeTestTimeout.duration override val serviceRegistryActor = actorRefFactory.actorOf(Props(new MockServiceRegistryActor())) - override implicit val materializer = ActorMaterializer() + implicit override val materializer = ActorMaterializer() } } diff --git a/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala index ba60258476c..e75e3236254 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala @@ -22,7 +22,8 @@ class ServiceInfoSpec extends AsyncFlatSpec with ScalatestRouteTest with Matcher behavior of "ServiceInfo" - val expectedResponse = WesStatusInfoResponse(Map("WDL" -> Set("draft-2", "1.0", "biscayne", "cascades")), + val expectedResponse = WesStatusInfoResponse( + Map("WDL" -> Set("draft-2", "1.0", "biscayne", "cascades")), List("1.0"), Set("ftp", "s3", "drs", "gcs", "http"), Map("Cromwell" -> CromwellApiService.cromwellVersion), @@ -30,8 +31,8 @@ class ServiceInfoSpec extends AsyncFlatSpec with ScalatestRouteTest with Matcher Map(WesState.Running -> 5, WesState.Queued -> 3, WesState.Canceling -> 2), "https://cromwell.readthedocs.io/en/stable/", "https://cromwell.readthedocs.io/en/stable/", - Map()) - + Map() + ) it should "should eventually build the right WesResponse" in { ServiceInfo.toWesResponse(workflowStoreActor) map { r => diff --git a/engine/src/test/scala/cromwell/webservice/routes/wes/WesRouteSupportSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/wes/WesRouteSupportSpec.scala index 00a361a3174..80d4ec23f6a 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/wes/WesRouteSupportSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/wes/WesRouteSupportSpec.scala @@ -7,7 +7,11 @@ import akka.http.scaladsl.server.MethodRejection import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest} import cromwell.util.SampleWdl.HelloWorld import cromwell.webservice.routes.CromwellApiServiceSpec -import cromwell.webservice.routes.CromwellApiServiceSpec.{MockServiceRegistryActor, MockWorkflowManagerActor, MockWorkflowStoreActor} +import cromwell.webservice.routes.CromwellApiServiceSpec.{ + MockServiceRegistryActor, + MockWorkflowManagerActor, + MockWorkflowStoreActor +} import cromwell.webservice.routes.wes.WesResponseJsonSupport._ import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers @@ -18,11 +22,10 @@ import scala.concurrent.duration._ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Matchers with WesRouteSupport { val actorRefFactory = system - override implicit val ec = system.dispatcher + implicit override val ec = system.dispatcher override val timeout = routeTestTimeout.duration implicit def routeTestTimeout = RouteTestTimeout(5.seconds) - override val workflowStoreActor = actorRefFactory.actorOf(Props(new MockWorkflowStoreActor())) override val serviceRegistryActor = actorRefFactory.actorOf(Props(new MockServiceRegistryActor())) override val workflowManagerActor = actorRefFactory.actorOf(Props(new MockWorkflowManagerActor())) @@ -33,16 +36,20 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat it should "return PAUSED when on hold" in { Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.OnHoldWorkflowId}/status") ~> wesRoutes ~> - check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.OnHoldWorkflowId.toString, WesState.Paused) - } + check { + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.OnHoldWorkflowId.toString, + WesState.Paused + ) + } } it should "return QUEUED when submitted" in { Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.ExistingWorkflowId}/status") ~> wesRoutes ~> check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.ExistingWorkflowId.toString, WesState.Queued) + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.ExistingWorkflowId.toString, + WesState.Queued + ) } } @@ -50,7 +57,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.RunningWorkflowId}/status") ~> wesRoutes ~> check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.RunningWorkflowId.toString, WesState.Running) + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.RunningWorkflowId.toString, + WesState.Running + ) } } @@ -58,7 +67,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.AbortingWorkflowId}/status") ~> wesRoutes ~> check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.AbortingWorkflowId.toString, WesState.Canceling) + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.AbortingWorkflowId.toString, + WesState.Canceling + ) } } @@ -66,7 +77,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.AbortedWorkflowId}/status") ~> wesRoutes ~> check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.AbortedWorkflowId.toString, WesState.Canceled) + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.AbortedWorkflowId.toString, + WesState.Canceled + ) } } @@ -74,7 +87,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.SucceededWorkflowId}/status") ~> wesRoutes ~> check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.SucceededWorkflowId.toString, WesState.Complete) + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.SucceededWorkflowId.toString, + WesState.Complete + ) } } @@ -82,7 +97,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat Get(s"/ga4gh/wes/$version/runs/${CromwellApiServiceSpec.FailedWorkflowId}/status") ~> wesRoutes ~> check { - responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.FailedWorkflowId.toString, WesState.ExecutorError) + responseAs[WesRunStatus] shouldEqual WesRunStatus(CromwellApiServiceSpec.FailedWorkflowId.toString, + WesState.ExecutorError + ) } } @@ -107,7 +124,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat status } - responseAs[WesErrorResponse] shouldEqual WesErrorResponse("Invalid workflow ID: 'foobar'.", StatusCodes.InternalServerError.intValue) + responseAs[WesErrorResponse] shouldEqual WesErrorResponse("Invalid workflow ID: 'foobar'.", + StatusCodes.InternalServerError.intValue + ) } } @@ -157,16 +176,24 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat behavior of "WES API /runs POST endpoint" it should "return 201 for a successful workflow submission" in { - val workflowSource = Multipart.FormData.BodyPart("workflow_url", HttpEntity(MediaTypes.`application/json`, "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl")) - val workflowInputs = Multipart.FormData.BodyPart("workflow_params", HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString())) + val workflowSource = Multipart.FormData.BodyPart( + "workflow_url", + HttpEntity( + MediaTypes.`application/json`, + "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" + ) + ) + val workflowInputs = + Multipart.FormData.BodyPart("workflow_params", + HttpEntity(MediaTypes.`application/json`, HelloWorld.rawInputs.toJson.toString()) + ) val formData = Multipart.FormData(workflowSource, workflowInputs).toEntity() Post(s"/ga4gh/wes/$version/runs", formData) ~> wesRoutes ~> check { - assertResult( - s"""{ - | "run_id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}" - |}""".stripMargin) { + assertResult(s"""{ + | "run_id": "${CromwellApiServiceSpec.ExistingWorkflowId.toString}" + |}""".stripMargin) { responseAs[String].parseJson.prettyPrint } assertResult(StatusCodes.Created) { @@ -176,7 +203,6 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat } } - behavior of "WES API /runs GET endpoint" it should "return results for a good query" in { Get(s"/ga4gh/wes/v1/runs") ~> @@ -197,9 +223,9 @@ class WesRouteSupportSpec extends AsyncFlatSpec with ScalatestRouteTest with Mat check { status should be(StatusCodes.OK) val result = responseAs[JsObject] - result.fields.keys should contain allOf("request", "run_id", "state") + result.fields.keys should contain allOf ("request", "run_id", "state") result.fields("state") should be(JsString("RUNNING")) result.fields("run_id") should be(JsString(CromwellApiServiceSpec.wesWorkflowId.toString)) } } -} \ No newline at end of file +} diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemConfig.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemConfig.scala index c5467c78ffe..172b3e8a9d0 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemConfig.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemConfig.scala @@ -10,12 +10,12 @@ import java.util.UUID // WSM config is needed for accessing WSM-managed blob containers created in Terra workspaces. // If the identity executing Cromwell has native access to the blob container, this can be ignored. -final case class WorkspaceManagerConfig(url: WorkspaceManagerURL, - overrideWsmAuthToken: Option[String]) // dev-only +final case class WorkspaceManagerConfig(url: WorkspaceManagerURL, overrideWsmAuthToken: Option[String]) // dev-only final case class BlobFileSystemConfig(subscriptionId: Option[SubscriptionId], expiryBufferMinutes: Long, - workspaceManagerConfig: Option[WorkspaceManagerConfig]) + workspaceManagerConfig: Option[WorkspaceManagerConfig] +) object BlobFileSystemConfig { @@ -36,8 +36,7 @@ object BlobFileSystemConfig { (wsmURL, overrideWsmAuthToken) .mapN(WorkspaceManagerConfig) .map(Option(_)) - } - else None.validNel + } else None.validNel (subscriptionId, expiryBufferMinutes, wsmConfig) .mapN(BlobFileSystemConfig.apply) @@ -45,16 +44,16 @@ object BlobFileSystemConfig { } private def parseString(config: Config, path: String) = - validate[String] { config.as[String](path) } + validate[String](config.as[String](path)) private def parseStringOpt(config: Config, path: String) = - validate[Option[String]] { config.as[Option[String]](path) } + validate[Option[String]](config.as[Option[String]](path)) private def parseUUIDOpt(config: Config, path: String) = - validate[Option[UUID]] { config.as[Option[String]](path).map(UUID.fromString) } + validate[Option[UUID]](config.as[Option[String]](path).map(UUID.fromString)) private def parseLongOpt(config: Config, path: String) = - validate[Option[Long]] { config.as[Option[Long]](path) } + validate[Option[Long]](config.as[Option[Long]](path)) } // Our filesystem setup magic can't use BlobFileSystemConfig.apply directly, so we need this diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala index 8f03dbe7e33..21316250146 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobFileSystemManager.scala @@ -23,9 +23,12 @@ import scala.util.{Failure, Success, Try} // actually connecting to Blob storage. case class AzureFileSystemAPI(private val provider: FileSystemProvider = new AzureFileSystemProvider()) { def getFileSystem(uri: URI): Try[AzureFileSystem] = Try(provider.getFileSystem(uri).asInstanceOf[AzureFileSystem]) - def newFileSystem(uri: URI, config: Map[String, Object]): Try[AzureFileSystem] = Try(provider.newFileSystem(uri, config.asJava).asInstanceOf[AzureFileSystem]) + def newFileSystem(uri: URI, config: Map[String, Object]): Try[AzureFileSystem] = Try( + provider.newFileSystem(uri, config.asJava).asInstanceOf[AzureFileSystem] + ) def closeFileSystem(uri: URI): Option[Unit] = getFileSystem(uri).toOption.map(_.close) } + /** * The BlobFileSystemManager is an object that is responsible for managing the open filesystem, * and refreshing the SAS token that is used to access the blob container containing that filesystem. @@ -37,27 +40,33 @@ object BlobFileSystemManager { def buildConfigMap(credential: AzureSasCredential, container: BlobContainerName): Map[String, Object] = { // Special handling is done here to provide a special key value pair if the placeholder token is provided // This is due to the BlobClient requiring an auth token even for public blob paths. - val sasTuple = if (credential == PLACEHOLDER_TOKEN) (AzureFileSystem.AZURE_STORAGE_PUBLIC_ACCESS_CREDENTIAL, PLACEHOLDER_TOKEN) - else (AzureFileSystem.AZURE_STORAGE_SAS_TOKEN_CREDENTIAL, credential) + val sasTuple = + if (credential == PLACEHOLDER_TOKEN) (AzureFileSystem.AZURE_STORAGE_PUBLIC_ACCESS_CREDENTIAL, PLACEHOLDER_TOKEN) + else (AzureFileSystem.AZURE_STORAGE_SAS_TOKEN_CREDENTIAL, credential) - Map(sasTuple, (AzureFileSystem.AZURE_STORAGE_FILE_STORES, container.value), - (AzureFileSystem.AZURE_STORAGE_SKIP_INITIAL_CONTAINER_CHECK, java.lang.Boolean.TRUE)) + Map( + sasTuple, + (AzureFileSystem.AZURE_STORAGE_FILE_STORES, container.value), + (AzureFileSystem.AZURE_STORAGE_SKIP_INITIAL_CONTAINER_CHECK, java.lang.Boolean.TRUE) + ) } - def combinedEnpointContainerUri(endpoint: EndpointURL, container: BlobContainerName) = new URI("azb://?endpoint=" + endpoint + "/" + container.value) + def combinedEnpointContainerUri(endpoint: EndpointURL, container: BlobContainerName) = new URI( + "azb://?endpoint=" + endpoint + "/" + container.value + ) val PLACEHOLDER_TOKEN = new AzureSasCredential("this-is-a-public-sas") } class BlobFileSystemManager(val expiryBufferMinutes: Long, val blobTokenGenerator: BlobSasTokenGenerator, - val fileSystemAPI: AzureFileSystemAPI = AzureFileSystemAPI()) extends LazyLogging { + val fileSystemAPI: AzureFileSystemAPI = AzureFileSystemAPI() +) extends LazyLogging { - def this(config: BlobFileSystemConfig) = { + def this(config: BlobFileSystemConfig) = this( config.expiryBufferMinutes, BlobSasTokenGenerator.createBlobTokenGeneratorFromConfig(config) ) - } def this(rawConfig: Config) = this(BlobFileSystemConfig(rawConfig)) @@ -68,18 +77,16 @@ class BlobFileSystemManager(val expiryBufferMinutes: Long, synchronized { fileSystemAPI.getFileSystem(uri).filter(!_.isExpired(buffer)).recoverWith { // If no filesystem already exists, this will create a new connection, with the provided configs - case _: FileSystemNotFoundException => { + case _: FileSystemNotFoundException => logger.info(s"Creating new blob filesystem for URI $uri") generateFilesystem(uri, container, endpoint) - } - case _ : NoSuchElementException => { + case _: NoSuchElementException => // When the filesystem expires, the above filter results in a // NoSuchElementException. If expired, close the filesystem // and reopen the filesystem with the fresh token logger.info(s"Closing & regenerating token for existing blob filesystem at URI $uri") fileSystemAPI.closeFileSystem(uri) generateFilesystem(uri, container, endpoint) - } } } } @@ -93,15 +100,17 @@ class BlobFileSystemManager(val expiryBufferMinutes: Long, * @param endpoint the endpoint containing the storage account for the container to open * @return a try with either the successfully created filesystem, or a failure containing the exception */ - private def generateFilesystem(uri: URI, container: BlobContainerName, endpoint: EndpointURL): Try[AzureFileSystem] = { - blobTokenGenerator.generateBlobSasToken(endpoint, container) - .flatMap((token: AzureSasCredential) => { + private def generateFilesystem(uri: URI, container: BlobContainerName, endpoint: EndpointURL): Try[AzureFileSystem] = + blobTokenGenerator + .generateBlobSasToken(endpoint, container) + .flatMap { (token: AzureSasCredential) => fileSystemAPI.newFileSystem(uri, BlobFileSystemManager.buildConfigMap(token, container)) - }) - } + } } -sealed trait BlobSasTokenGenerator { def generateBlobSasToken(endpoint: EndpointURL, container: BlobContainerName): Try[AzureSasCredential] } +sealed trait BlobSasTokenGenerator { + def generateBlobSasToken(endpoint: EndpointURL, container: BlobContainerName): Try[AzureSasCredential] +} object BlobSasTokenGenerator { /** @@ -123,16 +132,18 @@ object BlobSasTokenGenerator { * @return An appropriate BlobSasTokenGenerator */ def createBlobTokenGeneratorFromConfig(config: BlobFileSystemConfig): BlobSasTokenGenerator = - config.workspaceManagerConfig.map { wsmConfig => - val wsmClient: WorkspaceManagerApiClientProvider = new HttpWorkspaceManagerClientProvider(wsmConfig.url) + config.workspaceManagerConfig + .map { wsmConfig => + val wsmClient: WorkspaceManagerApiClientProvider = new HttpWorkspaceManagerClientProvider(wsmConfig.url) - // WSM-mediated mediated SAS token generator - // parameterizing client instead of URL to make injecting mock client possible - BlobSasTokenGenerator.createBlobTokenGenerator(wsmClient, wsmConfig.overrideWsmAuthToken) - }.getOrElse( - // Native SAS token generator - BlobSasTokenGenerator.createBlobTokenGenerator(config.subscriptionId) - ) + // WSM-mediated mediated SAS token generator + // parameterizing client instead of URL to make injecting mock client possible + BlobSasTokenGenerator.createBlobTokenGenerator(wsmClient, wsmConfig.overrideWsmAuthToken) + } + .getOrElse( + // Native SAS token generator + BlobSasTokenGenerator.createBlobTokenGenerator(config.subscriptionId) + ) /** * Native SAS token generator, uses the DefaultAzureCredentialBuilder in the local environment @@ -143,9 +154,8 @@ object BlobSasTokenGenerator { * @return A NativeBlobTokenGenerator, able to produce a valid SAS token for accessing the provided blob * container and endpoint locally */ - def createBlobTokenGenerator(subscription: Option[SubscriptionId]): BlobSasTokenGenerator = { + def createBlobTokenGenerator(subscription: Option[SubscriptionId]): BlobSasTokenGenerator = NativeBlobSasTokenGenerator(subscription) - } /** * WSM-mediated SAS token generator, uses the DefaultAzureCredentialBuilder in the cloud environment @@ -160,16 +170,17 @@ object BlobSasTokenGenerator { * container and endpoint that is managed by WSM */ def createBlobTokenGenerator(workspaceManagerClient: WorkspaceManagerApiClientProvider, - overrideWsmAuthToken: Option[String]): BlobSasTokenGenerator = { + overrideWsmAuthToken: Option[String] + ): BlobSasTokenGenerator = new WSMBlobSasTokenGenerator(workspaceManagerClient, overrideWsmAuthToken) - } } case class WSMTerraCoordinates(wsmEndpoint: String, workspaceId: UUID, containerResourceId: UUID) class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvider, - overrideWsmAuthToken: Option[String]) extends BlobSasTokenGenerator { + overrideWsmAuthToken: Option[String] +) extends BlobSasTokenGenerator { /** * Generate a BlobSasToken by using the available authorization information @@ -184,7 +195,7 @@ class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvi val wsmAuthToken: Try[String] = getWsmAuth container.workspaceId match { // If this is a Terra workspace, request a token from WSM - case Success(workspaceId) => { + case Success(workspaceId) => (for { wsmAuth <- wsmAuthToken wsmAzureResourceClient = wsmClientProvider.getControlledAzureResourceApi(wsmAuth) @@ -194,7 +205,6 @@ class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvi // If the storage account was still not found in WSM, this may be a public filesystem case exception: ApiException if exception.getCode == 404 => Try(BlobFileSystemManager.PLACEHOLDER_TOKEN) } - } // Otherwise assume that the container is public and use a placeholder // SAS token to bypass the BlobClient authentication requirement case Failure(_) => Try(BlobFileSystemManager.PLACEHOLDER_TOKEN) @@ -205,36 +215,40 @@ class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvi // Optionally provide wsmAuth to avoid acquiring it twice in generateBlobSasToken. // In the case that the resourceId is not cached and no auth is provided, this function will acquire a new auth as necessary. - private def getContainerResourceId(workspaceId: UUID, container: BlobContainerName, precomputedWsmAuth: Option[String]): Try[UUID] = { + private def getContainerResourceId(workspaceId: UUID, + container: BlobContainerName, + precomputedWsmAuth: Option[String] + ): Try[UUID] = cachedContainerResourceIds.get(container) match { - case Some(id) => Try(id) //cache hit - case _ => { //cache miss + case Some(id) => Try(id) // cache hit + case _ => // cache miss val auth: Try[String] = precomputedWsmAuth.map(auth => Try(auth)).getOrElse(getWsmAuth) val resourceId = for { wsmAuth <- auth wsmResourceApi = wsmClientProvider.getResourceApi(wsmAuth) resourceId <- wsmResourceApi.findContainerResourceId(workspaceId, container) } yield resourceId - resourceId.map(id => cachedContainerResourceIds.put(container, id)) //NB: Modifying cache state here. + resourceId.map(id => cachedContainerResourceIds.put(container, id)) // NB: Modifying cache state here. cachedContainerResourceIds.get(container) match { case Some(uuid) => Try(uuid) case _ => Failure(new NoSuchElementException("Could not retrieve container resource ID from WSM")) } - } } - } - private def getWsmAuth: Try[String] = { + private def getWsmAuth: Try[String] = overrideWsmAuthToken match { case Some(t) => Success(t) case None => AzureCredentials.getAccessToken(None).toTry } - } - private def parseTerraWorkspaceIdFromPath(blobPath: BlobPath): Try[UUID] = { + private def parseTerraWorkspaceIdFromPath(blobPath: BlobPath): Try[UUID] = if (blobPath.container.value.startsWith("sc-")) Try(UUID.fromString(blobPath.container.value.substring(3))) - else Failure(new Exception("Could not parse workspace ID from storage container. Are you sure this is a file in a Terra Workspace?")) - } + else + Failure( + new Exception( + "Could not parse workspace ID from storage container. Are you sure this is a file in a Terra Workspace?" + ) + ) /** * Return a REST endpoint that will reply with a sas token for the blob storage container associated with the provided blob path. @@ -245,13 +259,14 @@ class WSMBlobSasTokenGenerator(wsmClientProvider: WorkspaceManagerApiClientProvi */ def getWSMSasFetchEndpoint(blobPath: BlobPath, tokenDuration: Option[Duration] = None): Try[String] = { val wsmEndpoint = wsmClientProvider.getBaseWorkspaceManagerUrl - val lifetimeQueryParameters: String = tokenDuration.map(d => s"?sasExpirationDuration=${d.toSeconds.intValue}").getOrElse("") + val lifetimeQueryParameters: String = + tokenDuration.map(d => s"?sasExpirationDuration=${d.toSeconds.intValue}").getOrElse("") val terraInfo: Try[WSMTerraCoordinates] = for { workspaceId <- parseTerraWorkspaceIdFromPath(blobPath) containerResourceId <- getContainerResourceId(workspaceId, blobPath.container, None) coordinates = WSMTerraCoordinates(wsmEndpoint, workspaceId, containerResourceId) } yield coordinates - terraInfo.map{terraCoordinates => + terraInfo.map { terraCoordinates => s"${terraCoordinates.wsmEndpoint}/api/workspaces/v1/${terraCoordinates.workspaceId.toString}/resources/controlled/azure/storageContainer/${terraCoordinates.containerResourceId.toString}/getSasToken${lifetimeQueryParameters}" } } @@ -273,11 +288,14 @@ case class NativeBlobSasTokenGenerator(subscription: Option[SubscriptionId] = No * @return an AzureSasCredential for accessing a blob container */ def generateBlobSasToken(endpoint: EndpointURL, container: BlobContainerName): Try[AzureSasCredential] = { - val c = AzureUtils.buildContainerClientFromLocalEnvironment(container.toString, endpoint.toString, subscription.map(_.toString)) + val c = AzureUtils.buildContainerClientFromLocalEnvironment(container.toString, + endpoint.toString, + subscription.map(_.toString) + ) c.map { bcc => - val bsssv = new BlobServiceSasSignatureValues(OffsetDateTime.now.plusDays(1), bcsp) - new AzureSasCredential(bcc.generateSas(bsssv)) - }.orElse(Try(BlobFileSystemManager.PLACEHOLDER_TOKEN)) + val bsssv = new BlobServiceSasSignatureValues(OffsetDateTime.now.plusDays(1), bcsp) + new AzureSasCredential(bcc.generateSas(bsssv)) + }.orElse(Try(BlobFileSystemManager.PLACEHOLDER_TOKEN)) } } diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala index 3acb99857e0..22bf6b44742 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala @@ -17,11 +17,17 @@ object BlobPathBuilder { case class ValidBlobPath(path: String, container: BlobContainerName, endpoint: EndpointURL) extends BlobPathValidation case class UnparsableBlobPath(errorMessage: Throwable) extends BlobPathValidation - def invalidBlobHostMessage(endpoint: EndpointURL) = s"Malformed Blob URL for this builder: The endpoint $endpoint doesn't contain the expected host string '{SA}.blob.core.windows.net/'" - def invalidBlobContainerMessage(endpoint: EndpointURL) = s"Malformed Blob URL for this builder: Could not parse container" + def invalidBlobHostMessage(endpoint: EndpointURL) = + s"Malformed Blob URL for this builder: The endpoint $endpoint doesn't contain the expected host string '{SA}.blob.core.windows.net/'" + def invalidBlobContainerMessage(endpoint: EndpointURL) = + s"Malformed Blob URL for this builder: Could not parse container" def parseURI(string: String): Try[URI] = Try(URI.create(UrlEscapers.urlFragmentEscaper().escape(string))) - def parseStorageAccount(uri: URI): Try[StorageAccountName] = uri.getHost.split("\\.").find(_.nonEmpty).map(StorageAccountName(_)) - .map(Success(_)).getOrElse(Failure(new Exception("Could not parse storage account"))) + def parseStorageAccount(uri: URI): Try[StorageAccountName] = uri.getHost + .split("\\.") + .find(_.nonEmpty) + .map(StorageAccountName(_)) + .map(Success(_)) + .getOrElse(Failure(new Exception("Could not parse storage account"))) /** * Validates a that a path from a string is a valid BlobPath of the format: @@ -41,7 +47,7 @@ object BlobPathBuilder { * * If the configured container and storage account do not match, the string is considered unparsable */ - def validateBlobPath(string: String): BlobPathValidation = { + def validateBlobPath(string: String): BlobPathValidation = { val blobValidation = for { testUri <- parseURI(string) testEndpoint = EndpointURL(testUri.getScheme + "://" + testUri.getHost()) @@ -49,10 +55,8 @@ object BlobPathBuilder { testContainer = testUri.getPath.split("/").find(_.nonEmpty) isBlobHost = testUri.getHost().contains(blobHostnameSuffix) && testUri.getScheme().contains("https") blobPathValidation = (isBlobHost, testContainer) match { - case (true, Some(container)) => ValidBlobPath( - testUri.getPath.replaceFirst("/" + container, ""), - BlobContainerName(container), - testEndpoint) + case (true, Some(container)) => + ValidBlobPath(testUri.getPath.replaceFirst("/" + container, ""), BlobContainerName(container), testEndpoint) case (false, _) => UnparsableBlobPath(new MalformedURLException(invalidBlobHostMessage(testEndpoint))) case (true, None) => UnparsableBlobPath(new MalformedURLException(invalidBlobContainerMessage(testEndpoint))) } @@ -63,12 +67,11 @@ object BlobPathBuilder { class BlobPathBuilder()(private val fsm: BlobFileSystemManager) extends PathBuilder { - def build(string: String): Try[BlobPath] = { + def build(string: String): Try[BlobPath] = validateBlobPath(string) match { case ValidBlobPath(path, container, endpoint) => Try(BlobPath(path, endpoint, container)(fsm)) case UnparsableBlobPath(errorMessage: Throwable) => Failure(errorMessage) } - } override def name: String = "Azure Blob Storage" } @@ -103,33 +106,36 @@ object BlobPath { s"${containerName}:/${pathInContainer}" case _ => nioString } - pathStr.substring(pathStr.indexOf(":")+1) + pathStr.substring(pathStr.indexOf(":") + 1) } def apply(nioPath: NioPath, endpoint: EndpointURL, container: BlobContainerName, - fsm: BlobFileSystemManager): BlobPath = { + fsm: BlobFileSystemManager + ): BlobPath = BlobPath(cleanedNioPathString(nioPath.toString), endpoint, container)(fsm) - } } -case class BlobPath private[blob](pathString: String, endpoint: EndpointURL, container: BlobContainerName)(private val fsm: BlobFileSystemManager) extends Path { +case class BlobPath private[blob] (pathString: String, endpoint: EndpointURL, container: BlobContainerName)( + private val fsm: BlobFileSystemManager +) extends Path { override def nioPath: NioPath = findNioPath(pathString) override protected def newPath(nioPath: NioPath): Path = BlobPath(nioPath, endpoint, container, fsm) override def pathAsString: String = List(endpoint, container, pathString.stripPrefix("/")).mkString("/") - //This is purposefully an unprotected get because if the endpoint cannot be parsed this should fail loudly rather than quietly - override def pathWithoutScheme: String = parseURI(endpoint.value).map(u => List(u.getHost, container, pathString.stripPrefix("/")).mkString("/")).get + // This is purposefully an unprotected get because if the endpoint cannot be parsed this should fail loudly rather than quietly + override def pathWithoutScheme: String = + parseURI(endpoint.value).map(u => List(u.getHost, container, pathString.stripPrefix("/")).mkString("/")).get private def findNioPath(path: String): NioPath = (for { fileSystem <- fsm.retrieveFilesystem(endpoint, container) // The Azure NIO library uses `{container}:` to represent the root of the path nioPath = fileSystem.getPath(s"${container.value}:", path) - // This is purposefully an unprotected get because the NIO API needing an unwrapped path object. - // If an error occurs the api expects a thrown exception + // This is purposefully an unprotected get because the NIO API needing an unwrapped path object. + // If an error occurs the api expects a thrown exception } yield nioPath).get def blobFileAttributes: Try[AzureBlobFileAttributes] = @@ -168,15 +174,13 @@ case class BlobPath private[blob](pathString: String, endpoint: EndpointURL, con * Return the pathString of this BlobPath, with the given prefix removed if this path shares that * prefix. */ - def pathStringWithoutPrefix(prefix: Path): String = { + def pathStringWithoutPrefix(prefix: Path): String = if (this.startsWith(prefix)) { prefix.relativize(this) match { case b: BlobPath => b.pathString // path inside the blob container case p: Path => p.pathAsString // full path } - } - else pathString - } + } else pathString /** * Returns the path relative to the container root. @@ -184,10 +188,10 @@ case class BlobPath private[blob](pathString: String, endpoint: EndpointURL, con * will be returned as path/to/my/file. * @return Path string relative to the container root. */ - def pathWithoutContainer : String = pathString + def pathWithoutContainer: String = pathString def getFilesystemManager: BlobFileSystemManager = fsm - override def getSymlinkSafePath(options: LinkOption*): Path = toAbsolutePath + override def getSymlinkSafePath(options: LinkOption*): Path = toAbsolutePath } diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala index 47245552dc2..2dafa6ab5d2 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala @@ -10,17 +10,16 @@ import java.util.UUID import scala.concurrent.{ExecutionContext, Future} import scala.util.Try -final case class SubscriptionId(value: UUID) {override def toString: String = value.toString} +final case class SubscriptionId(value: UUID) { override def toString: String = value.toString } final case class BlobContainerName(value: String) { override def toString: String = value - lazy val workspaceId: Try[UUID] = { - Try(UUID.fromString(value.replaceFirst("sc-",""))) - } + lazy val workspaceId: Try[UUID] = + Try(UUID.fromString(value.replaceFirst("sc-", ""))) } -final case class StorageAccountName(value: String) {override def toString: String = value} +final case class StorageAccountName(value: String) { override def toString: String = value } final case class EndpointURL(value: String) { override def toString: String = value - lazy val storageAccountName : Try[StorageAccountName] = { + lazy val storageAccountName: Try[StorageAccountName] = { val sa = for { host <- value.split("//").findLast(_.nonEmpty) storageAccountName <- host.split("\\.").find(_.nonEmpty) @@ -28,17 +27,19 @@ final case class EndpointURL(value: String) { sa.toRight(new Exception(s"Storage account name could not be parsed from $value")).toTry } } -final case class WorkspaceId(value: UUID) {override def toString: String = value.toString} -final case class ContainerResourceId(value: UUID) {override def toString: String = value.toString} -final case class WorkspaceManagerURL(value: String) {override def toString: String = value} +final case class WorkspaceId(value: UUID) { override def toString: String = value.toString } +final case class ContainerResourceId(value: UUID) { override def toString: String = value.toString } +final case class WorkspaceManagerURL(value: String) { override def toString: String = value } -final case class BlobPathBuilderFactory(globalConfig: Config, instanceConfig: Config, fsm: BlobFileSystemManager) extends PathBuilderFactory { +final case class BlobPathBuilderFactory(globalConfig: Config, instanceConfig: Config, fsm: BlobFileSystemManager) + extends PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[BlobPathBuilder] = { + override def withOptions( + options: WorkflowOptions + )(implicit as: ActorSystem, ec: ExecutionContext): Future[BlobPathBuilder] = Future { new BlobPathBuilder()(fsm) } - } override def priority: Int = PriorityBlob } diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala index 490d0fcc704..8698dc7f859 100644 --- a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/WorkspaceManagerApiClientProvider.scala @@ -23,7 +23,8 @@ trait WorkspaceManagerApiClientProvider { def getBaseWorkspaceManagerUrl: String } -class HttpWorkspaceManagerClientProvider(baseWorkspaceManagerUrl: WorkspaceManagerURL) extends WorkspaceManagerApiClientProvider { +class HttpWorkspaceManagerClientProvider(baseWorkspaceManagerUrl: WorkspaceManagerURL) + extends WorkspaceManagerApiClientProvider { private def getApiClient: ApiClient = { val client: ApiClient = new ApiClient() client.setBasePath(baseWorkspaceManagerUrl.value) @@ -44,27 +45,37 @@ class HttpWorkspaceManagerClientProvider(baseWorkspaceManagerUrl: WorkspaceManag def getBaseWorkspaceManagerUrl: String = baseWorkspaceManagerUrl.value } -case class WsmResourceApi(resourcesApi : ResourceApi) { - def findContainerResourceId(workspaceId : UUID, container: BlobContainerName): Try[UUID] = { +case class WsmResourceApi(resourcesApi: ResourceApi) { + def findContainerResourceId(workspaceId: UUID, container: BlobContainerName): Try[UUID] = for { - workspaceResources <- Try(resourcesApi.enumerateResources(workspaceId, 0, 10, ResourceType.AZURE_STORAGE_CONTAINER, StewardshipType.CONTROLLED).getResources()) - workspaceStorageContainerOption = workspaceResources.asScala.find(r => r.getMetadata().getName() == container.value) - workspaceStorageContainer <- workspaceStorageContainerOption.toRight(new Exception("No storage container found for this workspace")).toTry + workspaceResources <- Try( + resourcesApi + .enumerateResources(workspaceId, 0, 10, ResourceType.AZURE_STORAGE_CONTAINER, StewardshipType.CONTROLLED) + .getResources() + ) + workspaceStorageContainerOption = workspaceResources.asScala.find(r => + r.getMetadata().getName() == container.value + ) + workspaceStorageContainer <- workspaceStorageContainerOption + .toRight(new Exception("No storage container found for this workspace")) + .toTry resourceId = workspaceStorageContainer.getMetadata().getResourceId() } yield resourceId - } } -case class WsmControlledAzureResourceApi(controlledAzureResourceApi : ControlledAzureResourceApi) { - def createAzureStorageContainerSasToken(workspaceId: UUID, resourceId: UUID): Try[AzureSasCredential] = { +case class WsmControlledAzureResourceApi(controlledAzureResourceApi: ControlledAzureResourceApi) { + def createAzureStorageContainerSasToken(workspaceId: UUID, resourceId: UUID): Try[AzureSasCredential] = for { - sas <- Try(controlledAzureResourceApi.createAzureStorageContainerSasToken( - workspaceId, - resourceId, - null, - null, - null, - null - ).getToken) + sas <- Try( + controlledAzureResourceApi + .createAzureStorageContainerSasToken( + workspaceId, + resourceId, + null, + null, + null, + null + ) + .getToken + ) } yield new AzureSasCredential(sas) - } } diff --git a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/AzureFileSystemSpec.scala b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/AzureFileSystemSpec.scala index 9b8362ced80..0626a008de0 100644 --- a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/AzureFileSystemSpec.scala +++ b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/AzureFileSystemSpec.scala @@ -23,34 +23,36 @@ class AzureFileSystemSpec extends AnyFlatSpec with Matchers { val combinedEndpoint = BlobFileSystemManager.combinedEnpointContainerUri(storageEndpoint, container) val provider = new AzureFileSystemProvider() - provider.newFileSystem( - combinedEndpoint, - BlobFileSystemManager.buildConfigMap(creds, container).asJava - ).asInstanceOf[AzureFileSystem] + provider + .newFileSystem( + combinedEndpoint, + BlobFileSystemManager.buildConfigMap(creds, container).asJava + ) + .asInstanceOf[AzureFileSystem] } it should "parse an expiration from a sas token" in { val now = Instant.now() - val filesystem : AzureFileSystem = makeFilesystemWithExpiration(now) + val filesystem: AzureFileSystem = makeFilesystemWithExpiration(now) filesystem.getExpiry.asScala shouldBe Some(now) filesystem.getFileStores.asScala.map(_.name()).exists(_ == "testContainer") shouldBe true } it should "not be expired when the token is fresh" in { val anHourFromNow = Instant.now().plusSeconds(3600) - val filesystem : AzureFileSystem = makeFilesystemWithExpiration(anHourFromNow) + val filesystem: AzureFileSystem = makeFilesystemWithExpiration(anHourFromNow) filesystem.isExpired(fiveMinutes) shouldBe false } it should "be expired when we're within the buffer" in { val threeMinutesFromNow = Instant.now().plusSeconds(180) - val filesystem : AzureFileSystem = makeFilesystemWithExpiration(threeMinutesFromNow) + val filesystem: AzureFileSystem = makeFilesystemWithExpiration(threeMinutesFromNow) filesystem.isExpired(fiveMinutes) shouldBe true } it should "be expired when the token is stale" in { val anHourAgo = Instant.now().minusSeconds(3600) - val filesystem : AzureFileSystem = makeFilesystemWithExpiration(anHourAgo) + val filesystem: AzureFileSystem = makeFilesystemWithExpiration(anHourAgo) filesystem.isExpired(fiveMinutes) shouldBe true } diff --git a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobFileSystemConfigSpec.scala b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobFileSystemConfigSpec.scala index 68804113763..30d580a6e49 100644 --- a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobFileSystemConfigSpec.scala +++ b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobFileSystemConfigSpec.scala @@ -12,8 +12,7 @@ class BlobFileSystemConfigSpec extends AnyFlatSpec with Matchers { it should "parse configs for a minimal functioning factory with native blob access" in { val config = BlobFileSystemConfig( - ConfigFactory.parseString( - s""" + ConfigFactory.parseString(s""" """.stripMargin) ) config.expiryBufferMinutes should equal(BlobFileSystemConfig.defaultExpiryBufferMinutes) @@ -21,14 +20,13 @@ class BlobFileSystemConfigSpec extends AnyFlatSpec with Matchers { it should "parse configs for a functioning factory with WSM-mediated blob access" in { val config = BlobFileSystemConfig( - ConfigFactory.parseString( - s""" - |expiry-buffer-minutes = "20" - |workspace-manager { - | url = "$workspaceManagerURL" - | b2cToken = "$b2cToken" - |} - | + ConfigFactory.parseString(s""" + |expiry-buffer-minutes = "20" + |workspace-manager { + | url = "$workspaceManagerURL" + | b2cToken = "$b2cToken" + |} + | """.stripMargin) ) config.expiryBufferMinutes should equal(20L) @@ -39,13 +37,12 @@ class BlobFileSystemConfigSpec extends AnyFlatSpec with Matchers { it should "fail when partial WSM config is supplied" in { val rawConfig = - ConfigFactory.parseString( - s""" - |expiry-buffer-minutes = "10" - |workspace-manager { - | b2cToken = "$b2cToken" - |} - | + ConfigFactory.parseString(s""" + |expiry-buffer-minutes = "10" + |workspace-manager { + | b2cToken = "$b2cToken" + |} + | """.stripMargin) val error = intercept[AggregatedMessageException](BlobFileSystemConfig(rawConfig)) diff --git a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderFactorySpec.scala b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderFactorySpec.scala index 24783c15780..2b46a8b80b8 100644 --- a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderFactorySpec.scala +++ b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderFactorySpec.scala @@ -14,12 +14,11 @@ import java.time.{Duration, Instant, ZoneId} import java.util.UUID import scala.util.{Failure, Success, Try} - object BlobPathBuilderFactorySpec { def buildExampleSasToken(expiry: Instant): AzureSasCredential = { val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd").withZone(ZoneId.systemDefault()) val sv = formatter.format(expiry) - val se = expiry.toString().replace(":","%3A") + val se = expiry.toString().replace(":", "%3A") new AzureSasCredential(s"sv=$sv&se=$se&sr=c&sp=rcl") } } @@ -47,15 +46,15 @@ class BlobPathBuilderFactorySpec extends AnyFlatSpec with Matchers with MockSuga it should "test retrieveFileSystem with expired Terra filesystem" in { val endpoint = BlobPathBuilderSpec.buildEndpoint("storageAccount") - //val expiredToken = generateTokenExpiration(9L) + // val expiredToken = generateTokenExpiration(9L) val refreshedToken = generateTokenExpiration(69L) val sasToken = BlobPathBuilderFactorySpec.buildExampleSasToken(refreshedToken) val container = BlobContainerName("sc-" + UUID.randomUUID().toString()) val configMap = BlobFileSystemManager.buildConfigMap(sasToken, container) val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint, container) - //Mocking this final class requires the plugin Mock Maker Inline plugin, configured here - //at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker + // Mocking this final class requires the plugin Mock Maker Inline plugin, configured here + // at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker val azureFileSystem = mock[AzureFileSystem] when(azureFileSystem.isExpired(Duration.ofMinutes(10L))).thenReturn(true) val fileSystems = mock[AzureFileSystemAPI] @@ -73,15 +72,15 @@ class BlobPathBuilderFactorySpec extends AnyFlatSpec with Matchers with MockSuga it should "test retrieveFileSystem with an unexpired Terra fileSystem" in { val endpoint = BlobPathBuilderSpec.buildEndpoint("storageAccount") - //val initialToken = generateTokenExpiration(11L) + // val initialToken = generateTokenExpiration(11L) val refreshedToken = generateTokenExpiration(71L) val sasToken = BlobPathBuilderFactorySpec.buildExampleSasToken(refreshedToken) val container = BlobContainerName("sc-" + UUID.randomUUID().toString()) val configMap = BlobFileSystemManager.buildConfigMap(sasToken, container) - val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint,container) + val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint, container) - //Mocking this final class requires the plugin Mock Maker Inline plugin, configured here - //at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker + // Mocking this final class requires the plugin Mock Maker Inline plugin, configured here + // at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker val azureFileSystem = mock[AzureFileSystem] when(azureFileSystem.isExpired(Duration.ofMinutes(10L))).thenReturn(false) val fileSystems = mock[AzureFileSystemAPI] @@ -106,8 +105,8 @@ class BlobPathBuilderFactorySpec extends AnyFlatSpec with Matchers with MockSuga val configMap = BlobFileSystemManager.buildConfigMap(sasToken, container) val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint, container) - //Mocking this final class requires the plugin Mock Maker Inline plugin, configured here - //at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker + // Mocking this final class requires the plugin Mock Maker Inline plugin, configured here + // at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker val azureFileSystem = mock[AzureFileSystem] when(azureFileSystem.isExpired(Duration.ofMinutes(10L))).thenReturn(false) val fileSystems = mock[AzureFileSystemAPI] @@ -131,8 +130,8 @@ class BlobPathBuilderFactorySpec extends AnyFlatSpec with Matchers with MockSuga val configMap = BlobFileSystemManager.buildConfigMap(sasToken, container) val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint, container) - //Mocking this final class requires the plugin Mock Maker Inline plugin, configured here - //at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker + // Mocking this final class requires the plugin Mock Maker Inline plugin, configured here + // at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker val azureFileSystem = mock[AzureFileSystem] when(azureFileSystem.isExpired(Duration.ofMinutes(10L))).thenReturn(true) val fileSystems = mock[AzureFileSystemAPI] @@ -153,10 +152,10 @@ class BlobPathBuilderFactorySpec extends AnyFlatSpec with Matchers with MockSuga val sasToken = BlobFileSystemManager.PLACEHOLDER_TOKEN val container = BlobContainerName("sc-" + UUID.randomUUID().toString()) val configMap = BlobFileSystemManager.buildConfigMap(sasToken, container) - val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint,container) + val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint, container) - //Mocking this final class requires the plugin Mock Maker Inline plugin, configured here - //at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker + // Mocking this final class requires the plugin Mock Maker Inline plugin, configured here + // at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker val azureFileSystem = mock[AzureFileSystem] when(azureFileSystem.isExpired(Duration.ofMinutes(10L))).thenReturn(false) val fileSystems = mock[AzureFileSystemAPI] @@ -180,8 +179,8 @@ class BlobPathBuilderFactorySpec extends AnyFlatSpec with Matchers with MockSuga val configMap = BlobFileSystemManager.buildConfigMap(sasToken, container) val azureUri = BlobFileSystemManager.combinedEnpointContainerUri(endpoint, container) - //Mocking this final class requires the plugin Mock Maker Inline plugin, configured here - //at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker + // Mocking this final class requires the plugin Mock Maker Inline plugin, configured here + // at filesystems/blob/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker val azureFileSystem = mock[AzureFileSystem] when(azureFileSystem.isExpired(Duration.ofMinutes(10L))).thenReturn(false) val fileSystems = mock[AzureFileSystemAPI] diff --git a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala index a8ca7d58d6f..fbff8e47431 100644 --- a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala +++ b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala @@ -19,11 +19,10 @@ class BlobPathBuilderSpec extends AnyFlatSpec with Matchers with MockSugar { val evalPath = "/path/to/file" val testString = endpoint.value + "/" + container + evalPath BlobPathBuilder.validateBlobPath(testString) match { - case BlobPathBuilder.ValidBlobPath(path, parsedContainer, parsedEndpoint) => { + case BlobPathBuilder.ValidBlobPath(path, parsedContainer, parsedEndpoint) => path should equal(evalPath) parsedContainer should equal(container) parsedEndpoint should equal(endpoint) - } case BlobPathBuilder.UnparsableBlobPath(errorMessage) => fail(errorMessage) } } @@ -119,9 +118,14 @@ class BlobPathBuilderSpec extends AnyFlatSpec with Matchers with MockSugar { val builder = makeBlobPathBuilder() val rootString = s"${endpoint.value}/${container.value}/cromwell-execution" val blobRoot: BlobPath = builder build rootString getOrElse fail() - blobRoot.toAbsolutePath.pathAsString should equal ("https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution") - val otherFile = blobRoot.resolve("https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution/test/inputFile.txt") - otherFile.toAbsolutePath.pathAsString should equal ("https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution/test/inputFile.txt") + blobRoot.toAbsolutePath.pathAsString should equal( + "https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution" + ) + val otherFile = + blobRoot.resolve("https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution/test/inputFile.txt") + otherFile.toAbsolutePath.pathAsString should equal( + "https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution/test/inputFile.txt" + ) } it should "build a blob path from a test string and read a file" in { @@ -136,8 +140,8 @@ class BlobPathBuilderSpec extends AnyFlatSpec with Matchers with MockSugar { blobPath.pathAsString should equal(testString) blobPath.pathWithoutScheme should equal(endpointHost + "/" + container + evalPath) val is = blobPath.newInputStream() - val fileText = (is.readAllBytes.map(_.toChar)).mkString - fileText should include ("This is my test file!!!! Did it work?") + val fileText = is.readAllBytes.map(_.toChar).mkString + fileText should include("This is my test file!!!! Did it work?") } it should "build duplicate blob paths in the same filesystem" in { @@ -149,38 +153,47 @@ class BlobPathBuilderSpec extends AnyFlatSpec with Matchers with MockSugar { val blobPath2: BlobPath = builder build testString getOrElse fail() blobPath1 should equal(blobPath2) val is = blobPath1.newInputStream() - val fileText = (is.readAllBytes.map(_.toChar)).mkString - fileText should include ("This is my test file!!!! Did it work?") + val fileText = is.readAllBytes.map(_.toChar).mkString + fileText should include("This is my test file!!!! Did it work?") } it should "resolve a path without duplicating container name" in { val builder = makeBlobPathBuilder() val rootString = s"${endpoint.value}/${container.value}/cromwell-execution" val blobRoot: BlobPath = builder build rootString getOrElse fail() - blobRoot.toAbsolutePath.pathAsString should equal ("https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution") + blobRoot.toAbsolutePath.pathAsString should equal( + "https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution" + ) val otherFile = blobRoot.resolve("test/inputFile.txt") - otherFile.toAbsolutePath.pathAsString should equal ("https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution/test/inputFile.txt") + otherFile.toAbsolutePath.pathAsString should equal( + "https://centaurtesting.blob.core.windows.net/test-blob/cromwell-execution/test/inputFile.txt" + ) } it should "correctly remove a prefix from the blob path" in { val builder = makeBlobPathBuilder() val rootString = s"${endpoint.value}/${container.value}/cromwell-execution/" - val execDirString = s"${endpoint.value}/${container.value}/cromwell-execution/abc123/myworkflow/task1/def4356/execution/" - val fileString = s"${endpoint.value}/${container.value}/cromwell-execution/abc123/myworkflow/task1/def4356/execution/stdout" + val execDirString = + s"${endpoint.value}/${container.value}/cromwell-execution/abc123/myworkflow/task1/def4356/execution/" + val fileString = + s"${endpoint.value}/${container.value}/cromwell-execution/abc123/myworkflow/task1/def4356/execution/stdout" val blobRoot: BlobPath = builder build rootString getOrElse fail() val execDir: BlobPath = builder build execDirString getOrElse fail() val blobFile: BlobPath = builder build fileString getOrElse fail() - blobFile.pathStringWithoutPrefix(blobRoot) should equal ("abc123/myworkflow/task1/def4356/execution/stdout") - blobFile.pathStringWithoutPrefix(execDir) should equal ("stdout") - blobFile.pathStringWithoutPrefix(blobFile) should equal ("") + blobFile.pathStringWithoutPrefix(blobRoot) should equal("abc123/myworkflow/task1/def4356/execution/stdout") + blobFile.pathStringWithoutPrefix(execDir) should equal("stdout") + blobFile.pathStringWithoutPrefix(blobFile) should equal("") } it should "not change a path if it doesn't start with a prefix" in { val builder = makeBlobPathBuilder() val otherRootString = s"${endpoint.value}/${container.value}/foobar/" - val fileString = s"${endpoint.value}/${container.value}/cromwell-execution/abc123/myworkflow/task1/def4356/execution/stdout" + val fileString = + s"${endpoint.value}/${container.value}/cromwell-execution/abc123/myworkflow/task1/def4356/execution/stdout" val otherBlobRoot: BlobPath = builder build otherRootString getOrElse fail() val blobFile: BlobPath = builder build fileString getOrElse fail() - blobFile.pathStringWithoutPrefix(otherBlobRoot) should equal ("/cromwell-execution/abc123/myworkflow/task1/def4356/execution/stdout") + blobFile.pathStringWithoutPrefix(otherBlobRoot) should equal( + "/cromwell-execution/abc123/myworkflow/task1/def4356/execution/stdout" + ) } } diff --git a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPath.scala b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPath.scala index 5856e41f97b..59b056f7247 100644 --- a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPath.scala +++ b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPath.scala @@ -6,14 +6,12 @@ import cromwell.core.path.{NioPath, Path} import java.io.IOException - case class DrsPath(drsPath: CloudNioPath, requesterPaysProjectIdOption: Option[String]) extends Path { override def nioPath: NioPath = drsPath - override protected def newPath(nioPath: NioPath): Path = { + override protected def newPath(nioPath: NioPath): Path = DrsPath(nioPath.asInstanceOf[CloudNioPath], requesterPaysProjectIdOption) - } override def pathAsString: String = drsPath.cloudHost @@ -28,9 +26,15 @@ case class DrsPath(drsPath: CloudNioPath, requesterPaysProjectIdOption: Option[S case Some(fileAttributes) => fileAttributes.fileHash match { case Some(fileHash) => fileHash - case None => throw new IOException(s"Error while resolving DRS path $this. The response from DRS Resolver doesn't contain the 'md5' hash for the file.") + case None => + throw new IOException( + s"Error while resolving DRS path $this. The response from DRS Resolver doesn't contain the 'md5' hash for the file." + ) } - case None => throw new IOException(s"Error getting file hash of DRS path $this. Reason: File attributes class DrsCloudNioRegularFileAttributes wasn't defined in DrsCloudNioFileProvider.") + case None => + throw new IOException( + s"Error getting file hash of DRS path $this. Reason: File attributes class DrsCloudNioRegularFileAttributes wasn't defined in DrsCloudNioFileProvider." + ) } } } diff --git a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala index b464772eec8..4b0b6c9758f 100644 --- a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala +++ b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala @@ -9,20 +9,20 @@ import scala.util.{Failure, Success, Try} case class DrsPathBuilder(fileSystemProvider: DrsCloudNioFileSystemProvider, requesterPaysProjectIdOption: Option[String], - preResolve: Boolean = false, - ) extends PreResolvePathBuilder with StrictLogging { + preResolve: Boolean = false +) extends PreResolvePathBuilder + with StrictLogging { private val drsScheme: String = fileSystemProvider.getScheme override def name: String = "DRS" - override def build(pathAsString: String, pathBuilders: PathBuilders): Try[Path] = { + override def build(pathAsString: String, pathBuilders: PathBuilders): Try[Path] = if (pathAsString.startsWith(s"$drsScheme://")) { Try(createDrsOrOtherPath(pathAsString, pathBuilders)) } else { Failure(new IllegalArgumentException(s"$pathAsString does not have a $drsScheme scheme.")) } - } private def createDrsOrOtherPath(pathAsString: String, pathBuilders: PathBuilders): Path = { def drsPath = DrsPath(fileSystemProvider.getCloudNioPath(pathAsString), requesterPaysProjectIdOption) @@ -35,17 +35,15 @@ case class DrsPathBuilder(fileSystemProvider: DrsCloudNioFileSystemProvider, private def maybeCreateOtherPath(pathAsString: String, pathBuilders: PathBuilders): Option[Path] = { - def logAttempt[A](description: String, attempt: => A): Option[A] = { + def logAttempt[A](description: String, attempt: => A): Option[A] = logTry(description, Try(attempt)) - } - def logTry[A](description: String, tried: Try[A]): Option[A] = { + def logTry[A](description: String, tried: Try[A]): Option[A] = tried match { case Success(result) => Option(result) case Failure(exception) => logFailure(description, exception) } - } def logFailure(description: String, reason: Any): None.type = { logger.debug(s"Unable to $description, will use a DrsPath to access '$pathAsString': $reason") @@ -66,7 +64,7 @@ case class DrsPathBuilder(fileSystemProvider: DrsCloudNioFileSystemProvider, gcsUrlWithNoCreds <- gsUriOption gcsPath <- logAttempt( s"create a GcsPath for '$gcsUrlWithNoCreds'", - PathFactory.buildPath(gcsUrlWithNoCreds, pathBuilders), + PathFactory.buildPath(gcsUrlWithNoCreds, pathBuilders) ) // Extra: Make sure the GcsPath _actually_ has permission to access the path _ <- logAttempt(s"access '$gcsPath' with GCS credentials", gcsPath.size) diff --git a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilderFactory.scala b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilderFactory.scala index 873874912a0..b39dde25358 100644 --- a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilderFactory.scala +++ b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilderFactory.scala @@ -17,8 +17,8 @@ import scala.concurrent.{ExecutionContext, Future} */ class DrsFileSystemConfig(val config: Config) - -class DrsPathBuilderFactory(globalConfig: Config, instanceConfig: Config, singletonConfig: DrsFileSystemConfig) extends PathBuilderFactory { +class DrsPathBuilderFactory(globalConfig: Config, instanceConfig: Config, singletonConfig: DrsFileSystemConfig) + extends PathBuilderFactory { private lazy val googleConfiguration: GoogleConfiguration = GoogleConfiguration(globalConfig) private lazy val scheme = instanceConfig.getString("auth") @@ -26,7 +26,9 @@ class DrsPathBuilderFactory(globalConfig: Config, instanceConfig: Config, single // For Azure support - this should be the UAMI client id private val dataAccessIdentityKey = "data_access_identity" - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = { + override def withOptions( + options: WorkflowOptions + )(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = Future { val drsResolverScopes = List( // Profile and Email scopes are requirements for interacting with DRS Resolvers @@ -36,13 +38,20 @@ class DrsPathBuilderFactory(globalConfig: Config, instanceConfig: Config, single val (googleAuthMode, drsCredentials) = scheme match { case "azure" => (None, AzureDrsCredentials(options.get(dataAccessIdentityKey).toOption)) - case googleAuthScheme => googleConfiguration.auth(googleAuthScheme) match { - case Valid(auth) => ( - Option(auth), - GoogleOauthDrsCredentials(auth.credentials(options.get(_).get, drsResolverScopes), singletonConfig.config) - ) - case Invalid(error) => throw new RuntimeException(s"Error while instantiating DRS path builder factory. Errors: ${error.toString}") - } + case googleAuthScheme => + googleConfiguration.auth(googleAuthScheme) match { + case Valid(auth) => + ( + Option(auth), + GoogleOauthDrsCredentials(auth.credentials(options.get(_).get, drsResolverScopes), + singletonConfig.config + ) + ) + case Invalid(error) => + throw new RuntimeException( + s"Error while instantiating DRS path builder factory. Errors: ${error.toString}" + ) + } } // Unlike PAPI we're not going to fall back to a "default" project from the backend config. @@ -58,8 +67,7 @@ class DrsPathBuilderFactory(globalConfig: Config, instanceConfig: Config, single .getBoolean("override_preresolve_for_test") .toOption .getOrElse( - singletonConfig - .config + singletonConfig.config .getBoolean("resolver.preresolve") ) @@ -67,15 +75,15 @@ class DrsPathBuilderFactory(globalConfig: Config, instanceConfig: Config, single new DrsCloudNioFileSystemProvider( singletonConfig.config, drsCredentials, - DrsReader.readInterpreter(googleAuthMode, options, requesterPaysProjectIdOption), + DrsReader.readInterpreter(googleAuthMode, options, requesterPaysProjectIdOption) ), requesterPaysProjectIdOption, - preResolve, + preResolve ) } - } } case class UrlNotFoundException(scheme: String) extends Exception(s"No $scheme url associated with given DRS path.") -case class DrsResolverResponseMissingKeyException(missingKey: String) extends Exception(s"The response from the DRS Resolver doesn't contain the key '$missingKey'.") +case class DrsResolverResponseMissingKeyException(missingKey: String) + extends Exception(s"The response from the DRS Resolver doesn't contain the key '$missingKey'.") diff --git a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsReader.scala b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsReader.scala index 3256d6da248..e82790b454c 100644 --- a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsReader.scala +++ b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsReader.scala @@ -21,49 +21,48 @@ object DrsReader { options: WorkflowOptions, requesterPaysProjectIdOption: Option[String], drsPathResolver: DrsPathResolver, - drsResolverResponse: DrsResolverResponse): IO[DrsReader] = { + drsResolverResponse: DrsResolverResponse + ): IO[DrsReader] = (drsResolverResponse.accessUrl, drsResolverResponse.gsUri, googleAuthMode) match { case (Some(accessUrl), _, _) => IO.pure(AccessUrlReader(drsPathResolver, accessUrl)) case (_, Some(gcsPath), Some(authMode)) => - IO.pure(GcsReader( - authMode, - options, - requesterPaysProjectIdOption, - gcsPath, - drsResolverResponse.googleServiceAccount, - )) + IO.pure( + GcsReader( + authMode, + options, + requesterPaysProjectIdOption, + gcsPath, + drsResolverResponse.googleServiceAccount + ) + ) case (_, Some(_), _) => IO.raiseError(new RuntimeException("GCS URI found in the DRS Resolver response, but no Google auth found!")) case _ => IO.raiseError(new RuntimeException(DrsPathResolver.ExtractUriErrorMsg)) } - } def readInterpreter(googleAuthMode: Option[GoogleAuthMode], options: WorkflowOptions, - requesterPaysProjectIdOption: Option[String]) - (drsPathResolver: DrsPathResolver, - drsResolverResponse: DrsResolverResponse): IO[ReadableByteChannel] = { + requesterPaysProjectIdOption: Option[String] + )(drsPathResolver: DrsPathResolver, drsResolverResponse: DrsResolverResponse): IO[ReadableByteChannel] = for { reader <- reader(googleAuthMode, options, requesterPaysProjectIdOption, drsPathResolver, drsResolverResponse) channel <- reader.read() } yield channel - } } case class AccessUrlReader(drsPathResolver: DrsPathResolver, accessUrl: AccessUrl) extends DrsReader { - override def read(): IO[ReadableByteChannel] = { + override def read(): IO[ReadableByteChannel] = drsPathResolver.openChannel(accessUrl) - } } case class GcsReader(googleAuthMode: GoogleAuthMode, options: WorkflowOptions, requesterPaysProjectIdOption: Option[String], gsUri: String, - googleServiceAccount: Option[SADataObject], - ) extends DrsReader { + googleServiceAccount: Option[SADataObject] +) extends DrsReader { override def read(): IO[ReadableByteChannel] = { val readScopes = List(StorageScopes.DEVSTORAGE_READ_ONLY) val credentialsIo = googleServiceAccount match { @@ -71,7 +70,7 @@ case class GcsReader(googleAuthMode: GoogleAuthMode, IO( UserServiceAccountMode("drs_resolver_service_account").credentials( Map(GoogleAuthMode.UserServiceAccountKey -> googleSA.data.noSpaces), - readScopes, + readScopes ) ) case None => @@ -86,25 +85,23 @@ case class GcsReader(googleAuthMode: GoogleAuthMode, private def gcsInputStream(gcsFile: String, credentials: OAuth2Credentials, - requesterPaysProjectIdOption: Option[String], - ): IO[ReadableByteChannel] = { + requesterPaysProjectIdOption: Option[String] + ): IO[ReadableByteChannel] = for { storage <- IO(StorageOptions.newBuilder().setCredentials(credentials).build().getService) gcsBucketAndName <- IO(getGcsBucketAndName(gcsFile)) (bucketName, objectName) = gcsBucketAndName - readChannel <- IO(storage.get(bucketName, objectName).reader()) handleErrorWith { - throwable => - (requesterPaysProjectIdOption, throwable) match { - case (Some(requesterPaysProjectId), storageException: StorageException) + readChannel <- IO(storage.get(bucketName, objectName).reader()) handleErrorWith { throwable => + (requesterPaysProjectIdOption, throwable) match { + case (Some(requesterPaysProjectId), storageException: StorageException) if storageException.getMessage.contains("requester pays bucket but no user project") => - IO( - storage - .get(bucketName, objectName, BlobGetOption.userProject(requesterPaysProjectId)) - .reader(Blob.BlobSourceOption.userProject(requesterPaysProjectId)) - ) - case _ => IO.raiseError(throwable) - } + IO( + storage + .get(bucketName, objectName, BlobGetOption.userProject(requesterPaysProjectId)) + .reader(Blob.BlobSourceOption.userProject(requesterPaysProjectId)) + ) + case _ => IO.raiseError(throwable) + } } } yield readChannel - } } diff --git a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsResolver.scala b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsResolver.scala index 4aa760f17e1..39115e1633e 100644 --- a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsResolver.scala +++ b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsResolver.scala @@ -8,7 +8,6 @@ import cromwell.core.path.{DefaultPathBuilder, Path} import org.apache.commons.lang3.exception.ExceptionUtils import shapeless.syntax.typeable._ - object DrsResolver { private val GcsScheme: String = "gs" @@ -16,18 +15,18 @@ object DrsResolver { private val DrsLocalizationPathsContainer = "drs_localization_paths" - private def resolveError[A](pathAsString: String)(throwable: Throwable): IO[A] = { + private def resolveError[A](pathAsString: String)(throwable: Throwable): IO[A] = IO.raiseError( new RuntimeException( s"Error while resolving DRS path: $pathAsString. Error: ${ExceptionUtils.getMessage(throwable)}" ) ) - } private def getDrsPathResolver(drsPath: DrsPath): IO[DrsPathResolver] = { val drsFileSystemProviderOption = drsPath.drsPath.getFileSystem.provider.cast[DrsCloudNioFileSystemProvider] - val noFileSystemForDrsError = s"Unable to cast file system provider to DrsCloudNioFileSystemProvider for DRS path $drsPath." + val noFileSystemForDrsError = + s"Unable to cast file system provider to DrsCloudNioFileSystemProvider for DRS path $drsPath." for { drsFileSystemProvider <- toIO(drsFileSystemProviderOption, noFileSystemForDrsError) @@ -37,11 +36,17 @@ object DrsResolver { case class DrsResolverLocalizationData(gsUri: Option[String], fileName: Option[String], bondProvider: Option[String], - localizationPath: Option[String]) + localizationPath: Option[String] + ) private def getDrsResolverLocalizationData(pathAsString: String, - drsPathResolver: DrsPathResolver): IO[DrsResolverLocalizationData] = { - val fields = NonEmptyList.of(DrsResolverField.GsUri, DrsResolverField.FileName, DrsResolverField.BondProvider, DrsResolverField.LocalizationPath) + drsPathResolver: DrsPathResolver + ): IO[DrsResolverLocalizationData] = { + val fields = NonEmptyList.of(DrsResolverField.GsUri, + DrsResolverField.FileName, + DrsResolverField.BondProvider, + DrsResolverField.LocalizationPath + ) drsPathResolver.resolveDrs(pathAsString, fields) map { r => DrsResolverLocalizationData(r.gsUri, r.fileName, r.bondProvider, r.localizationPath) @@ -49,7 +54,7 @@ object DrsResolver { } /** Returns the `gsUri` if it ends in the `fileName` and the `bondProvider` is empty. */ - private def getSimpleGsUri(localizationData: DrsResolverLocalizationData): Option[String] = { + private def getSimpleGsUri(localizationData: DrsResolverLocalizationData): Option[String] = localizationData match { // `gsUri` not defined so no gsUri can be returned. case DrsResolverLocalizationData(None, _, _, _) => None @@ -60,11 +65,9 @@ object DrsResolver { // Barring any of the situations above return the `gsUri`. case DrsResolverLocalizationData(Some(gsUri), _, _, _) => Option(gsUri) } - } /** Returns the `gsUri` if it ends in the `fileName` and the `bondProvider` is empty. */ - def getSimpleGsUri(pathAsString: String, - drsPathResolver: DrsPathResolver): IO[Option[String]] = { + def getSimpleGsUri(pathAsString: String, drsPathResolver: DrsPathResolver): IO[Option[String]] = { val gsUriIO = getDrsResolverLocalizationData(pathAsString, drsPathResolver) map getSimpleGsUri @@ -72,12 +75,11 @@ object DrsResolver { } /** Returns the `gsUri` if it ends in the `fileName` and the `bondProvider` is empty. */ - def getSimpleGsUri(drsPath: DrsPath): IO[Option[String]] = { + def getSimpleGsUri(drsPath: DrsPath): IO[Option[String]] = for { drsPathResolver <- getDrsPathResolver(drsPath) gsUri <- getSimpleGsUri(drsPath.pathAsString, drsPathResolver) } yield gsUri - } def getContainerRelativePath(drsPath: DrsPath): IO[String] = { val pathIO = for { diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala index d207bba8d86..5fc3257ed9d 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala @@ -6,7 +6,7 @@ import cromwell.core.filesystem.CromwellFileSystems import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class DrsPathBuilderFactorySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers{ +class DrsPathBuilderFactorySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "DrsPathBuilderFactory" diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderSpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderSpec.scala index 315e51b5d04..65f71e029c7 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderSpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderSpec.scala @@ -51,9 +51,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a path with non-ascii", path = s"drs://$bucket/hello/world/with non ascii £€", @@ -66,9 +65,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a gs uri path with encoded characters", path = s"drs://$bucket/hello/world/encoded%20spaces", @@ -81,9 +79,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a file at the top of the bucket", path = s"drs://$bucket/hello", @@ -96,9 +93,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a path ending in /", path = s"drs://$bucket/hello/world/", @@ -111,7 +107,7 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), // Special paths @@ -128,9 +124,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket with a path ..", path = s"drs://$bucket/..", @@ -143,9 +138,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket including . in the path", path = s"drs://$bucket/hello/./world", @@ -158,9 +152,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket including .. in the path", path = s"drs://$bucket/hello/../world", @@ -173,7 +166,7 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), // Normalized @@ -190,9 +183,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket with a normalized path ..", path = s"drs://$bucket/..", @@ -205,9 +197,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket including . in the normalized path", path = s"drs://$bucket/hello/./world", @@ -220,9 +211,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket including .. in the normalized path", path = s"drs://$bucket/hello/../world", @@ -235,9 +225,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket with an underscore", path = s"drs://hello_underscore/world", @@ -250,9 +239,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a bucket named .", path = s"drs://./hello/world", @@ -265,9 +253,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "a non ascii bucket name", path = s"drs://nonasciibucket£€/hello/world", @@ -280,9 +267,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "an non-absolute path without a host", path = s"drs://blah/", @@ -295,9 +281,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), - GoodPath( description = "an absolute path without a host", path = s"drs://blah", @@ -310,7 +295,7 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), // No spec says this is illegal... so pass it to the DRS Resolver's various GCFs JIC @@ -326,7 +311,7 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), // Sample via: https://docs.google.com/document/d/1Wf4enSGOEXD5_AE-uzLoYqjIp5MnePbZ6kYTVFp1WoM/edit @@ -340,12 +325,11 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers "drs.data.humancellatlas.org/8aca942c-17f7-4e34-b8fd-3c12e50f9291?version=2019-07-04T151444.185805Z", parent = null, getParent = null, - root = - "drs://drs.data.humancellatlas.org/8aca942c-17f7-4e34-b8fd-3c12e50f9291?version=2019-07-04T151444.185805Z", + root = "drs://drs.data.humancellatlas.org/8aca942c-17f7-4e34-b8fd-3c12e50f9291?version=2019-07-04T151444.185805Z", name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, + isAbsolute = false ), // Sample via: https://docs.google.com/document/d/1Wf4enSGOEXD5_AE-uzLoYqjIp5MnePbZ6kYTVFp1WoM/edit @@ -361,8 +345,8 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 1, - isAbsolute = false, - ), + isAbsolute = false + ) ) private def badPaths = Seq( @@ -371,7 +355,7 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers BadPath("a https path", "https://hello/world", "https://hello/world does not have a drs scheme."), BadPath("a file uri path", "file:///hello/world", "file:///hello/world does not have a drs scheme."), BadPath("a relative file path", "hello/world", "hello/world does not have a drs scheme."), - BadPath("an absolute file path", "/hello/world", "/hello/world does not have a drs scheme."), + BadPath("an absolute file path", "/hello/world", "/hello/world does not have a drs scheme.") ) private val drsReadInterpreter: DrsReadInterpreter = (_, _) => @@ -387,7 +371,10 @@ class DrsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers private lazy val fakeCredentials = NoCredentials.getInstance private lazy val drsPathBuilder = DrsPathBuilder( - new DrsCloudNioFileSystemProvider(drsResolverConfig, GoogleOauthDrsCredentials(fakeCredentials, 1.minutes), drsReadInterpreter), - None, + new DrsCloudNioFileSystemProvider(drsResolverConfig, + GoogleOauthDrsCredentials(fakeCredentials, 1.minutes), + drsReadInterpreter + ), + None ) } diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala index b5bee55b301..2cee6c49b12 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala @@ -31,7 +31,12 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche val googleServiceAccount = None val drsResolverResponse = DrsResolverResponse(gsUri = Option(gsUri), googleServiceAccount = googleServiceAccount) val readerIo = - DrsReader.reader(Option(googleAuthMode), workflowOptions, requesterPaysProjectIdOption, drsPathResolver, drsResolverResponse) + DrsReader.reader(Option(googleAuthMode), + workflowOptions, + requesterPaysProjectIdOption, + drsPathResolver, + drsResolverResponse + ) readerIo.unsafeRunSync() should be( GcsReader(googleAuthMode, workflowOptions, requesterPaysProjectIdOption, gsUri, googleServiceAccount) ) @@ -45,7 +50,12 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche val accessUrl = AccessUrl("https://host/object/path", Option(Map("hello" -> "world"))) val drsResolverResponse = DrsResolverResponse(accessUrl = Option(accessUrl)) val readerIo = - DrsReader.reader(Option(googleAuthMode), workflowOptions, requesterPaysProjectIdOption, drsPathResolver, drsResolverResponse) + DrsReader.reader(Option(googleAuthMode), + workflowOptions, + requesterPaysProjectIdOption, + drsPathResolver, + drsResolverResponse + ) readerIo.unsafeRunSync() should be( AccessUrlReader(drsPathResolver, accessUrl) ) @@ -58,7 +68,12 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche val drsPathResolver = new MockEngineDrsPathResolver() val drsResolverResponse = DrsResolverResponse() val readerIo = - DrsReader.reader(Option(googleAuthMode), workflowOptions, requesterPaysProjectIdOption, drsPathResolver, drsResolverResponse) + DrsReader.reader(Option(googleAuthMode), + workflowOptions, + requesterPaysProjectIdOption, + drsPathResolver, + drsResolverResponse + ) the[RuntimeException] thrownBy { readerIo.unsafeRunSync() } should have message DrsPathResolver.ExtractUriErrorMsg @@ -80,16 +95,18 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche val accessUrl = AccessUrl("https://host/object/path", Option(Map("hello" -> "world"))) val drsResolverResponse = DrsResolverResponse(accessUrl = Option(accessUrl)) val channelIo = - DrsReader.readInterpreter(Option(MockAuthMode("unused")), WorkflowOptions.empty, None)(drsPathResolver, drsResolverResponse) + DrsReader.readInterpreter(Option(MockAuthMode("unused")), WorkflowOptions.empty, None)(drsPathResolver, + drsResolverResponse + ) val channel = channelIo.unsafeRunSync() val buffer = ByteBuffer.allocate(exampleBytes.length) - channel.isOpen should be (true) + channel.isOpen should be(true) DrsReaderSpec.readToBuffer(channel, buffer) channel.close() val httpGetCapture = capture[HttpGet] - channel.isOpen should be (false) + channel.isOpen should be(false) buffer.array() should be(exampleBytes) verify(httpClient).execute(httpGetCapture.capture) verify(httpClient).close() @@ -104,7 +121,7 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche object DrsReaderSpec { @tailrec - def readToBuffer(input: ReadableByteChannel, buffer: ByteBuffer): Unit = { + def readToBuffer(input: ReadableByteChannel, buffer: ByteBuffer): Unit = if (buffer.remaining() > 0) { if (input.read(buffer) >= 0) { readToBuffer(input, buffer) @@ -112,5 +129,4 @@ object DrsReaderSpec { throw new EOFException(s"input exhausted with ${buffer.remaining()} expected bytes") } } - } } diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala index 23dcb1bcd66..ac37fe26f59 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala @@ -8,7 +8,6 @@ import org.scalatest.matchers.should.Matchers import scala.jdk.CollectionConverters._ - class DrsResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { private val drsResolverConfig: Config = ConfigFactory.parseMap( @@ -21,37 +20,40 @@ class DrsResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers private val mockFileSystemProvider = new MockDrsCloudNioFileSystemProvider(config = drsResolverConfig) private val drsPathBuilder = DrsPathBuilder(mockFileSystemProvider, None) - behavior of "DrsResolver" it should "find DRS path from a GCS path" in { val drsPath = drsPathBuilder.build(MockDrsPaths.drsPathResolvingGcsPath).get.asInstanceOf[DrsPath] - DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be (MockDrsPaths.drsRelativePath) + DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be(MockDrsPaths.drsRelativePath) } it should "find DRS path from a path replacing characters" in { val drsPath = drsPathBuilder.build(MockDrsPaths.drsPathWithNonPathChars).get.asInstanceOf[DrsPath] - DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be (MockDrsPaths.drsReplacedChar) + DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be(MockDrsPaths.drsReplacedChar) } it should "find DRS path from a file name" in { val drsPath = drsPathBuilder.build(MockDrsPaths.drsPathResolvingWithFileName).get.asInstanceOf[DrsPath] - DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be (MockDrsPaths.gcsRelativePathWithFileName) + DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be(MockDrsPaths.gcsRelativePathWithFileName) } it should "find DRS path from a localization path" in { val drsPath = drsPathBuilder.build(MockDrsPaths.drsPathResolvingWithLocalizationPath).get.asInstanceOf[DrsPath] - DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be (MockDrsPaths.gcsRelativePathWithFileNameFromLocalizationPath) + DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be( + MockDrsPaths.gcsRelativePathWithFileNameFromLocalizationPath + ) } it should "find DRS path from all the paths" in { val drsPath = drsPathBuilder.build(MockDrsPaths.drsPathResolvingWithAllThePaths).get.asInstanceOf[DrsPath] - DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be (MockDrsPaths.gcsRelativePathWithFileNameFromAllThePaths) + DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() should be( + MockDrsPaths.gcsRelativePathWithFileNameFromAllThePaths + ) } it should "throw GcsUrlNotFoundException when DRS path doesn't resolve to at least one GCS url" in { @@ -69,7 +71,7 @@ class DrsResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() } should have message s"Error while resolving DRS path: ${drsPath.pathAsString}. " + - s"Error: RuntimeException: Unexpected response resolving ${drsPath.pathAsString} " + - s"through DRS Resolver url https://drshub-url/drshub_v4. Error: 404 Not Found." + s"Error: RuntimeException: Unexpected response resolving ${drsPath.pathAsString} " + + s"through DRS Resolver url https://drshub-url/drshub_v4. Error: 404 Not Found." } } diff --git a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/CromwellFtpFileSystems.scala b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/CromwellFtpFileSystems.scala index 70593c41e68..27fb1698fd7 100644 --- a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/CromwellFtpFileSystems.scala +++ b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/CromwellFtpFileSystems.scala @@ -25,19 +25,21 @@ object CromwellFtpFileSystems { } def parseConfig(config: Config): FtpFileSystemsConfiguration = { - val cacheTTL = validate[FiniteDuration] { config.as[FiniteDuration]("cache-ttl") } - val leaseTimeout = validate[Option[FiniteDuration]] { config.getAs[FiniteDuration]("obtain-connection-timeout") } + val cacheTTL = validate[FiniteDuration](config.as[FiniteDuration]("cache-ttl")) + val leaseTimeout = validate[Option[FiniteDuration]](config.getAs[FiniteDuration]("obtain-connection-timeout")) // Cannot be less than 2, otherwise we can't copy files as we need 2 connections to copy a file (one for downstream and one for upstream) - val capacity: ErrorOr[Int] = validate[Int] { config.as[Int]("max-connection-per-server-per-user") } map { c => Math.max(2, c) } - val idleConnectionTimeout = validate[FiniteDuration] { config.as[FiniteDuration]("idle-connection-timeout") } - val connectionPort = validate[Int] { config.as[Int]("connection-port") } - val connectionMode = validate[ConnectionMode] { config.as[ConnectionMode]("connection-mode") } + val capacity: ErrorOr[Int] = validate[Int](config.as[Int]("max-connection-per-server-per-user")) map { c => + Math.max(2, c) + } + val idleConnectionTimeout = validate[FiniteDuration](config.as[FiniteDuration]("idle-connection-timeout")) + val connectionPort = validate[Int](config.as[Int]("connection-port")) + val connectionMode = validate[ConnectionMode](config.as[ConnectionMode]("connection-mode")) (cacheTTL, leaseTimeout, capacity, idleConnectionTimeout, connectionPort, connectionMode) .mapN(FtpFileSystemsConfiguration.apply) .unsafe("Failed to parse FTP configuration") } - + val Default = new CromwellFtpFileSystems(FtpFileSystems.Default) } diff --git a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpInstanceConfiguration.scala b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpInstanceConfiguration.scala index 6af3c8863c5..23c8504872f 100644 --- a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpInstanceConfiguration.scala +++ b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpInstanceConfiguration.scala @@ -25,12 +25,12 @@ case class FtpInstanceConfiguration(ftpCredentials: FtpCredentials) object FtpInstanceConfiguration { lazy val Default = FtpInstanceConfiguration(FtpAnonymousCredentials) - + def apply(conf: Config): FtpInstanceConfiguration = { val credentials: ErrorOr[FtpCredentials] = conf.getAs[Config]("auth") map { authConfig => - val username = validate { authConfig.as[String]("username") } - val password = validate { authConfig.as[String]("password") } - val account = validate { authConfig.getAs[String]("account") } + val username = validate(authConfig.as[String]("username")) + val password = validate(authConfig.as[String]("password")) + val account = validate(authConfig.getAs[String]("account")) (username, password, account) mapN FtpAuthenticatedCredentials.apply } getOrElse Default.ftpCredentials.validNel diff --git a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilder.scala b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilder.scala index 409a4027437..ed708ef41e0 100644 --- a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilder.scala +++ b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilder.scala @@ -16,7 +16,7 @@ object FtpPathBuilder { case class FtpPathBuilder(fileSystemProvider: CloudNioFileSystemProvider) extends PathBuilder { override def name = "FTP" - override def build(string: String) = { + override def build(string: String) = if (string == "ftp://") Failure(new IllegalArgumentException(s"$string does not have a valid host")) else { Try(URI.create(UrlEscapers.urlFragmentEscaper().escape(string))) flatMap { uri => @@ -31,5 +31,4 @@ case class FtpPathBuilder(fileSystemProvider: CloudNioFileSystemProvider) extend } } } - } } diff --git a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilderFactory.scala b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilderFactory.scala index cd599349d27..da55640d4e1 100644 --- a/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilderFactory.scala +++ b/filesystems/ftp/src/main/scala/cromwell/filesystems/ftp/FtpPathBuilderFactory.scala @@ -19,24 +19,37 @@ object FtpPathBuilderFactory { def credentialsFromWorkflowOptions(workflowOptions: WorkflowOptions) = { def getValue(key: String) = workflowOptions.get(key).toOption - (getValue(WorkflowOptions.FtpUsername), getValue(WorkflowOptions.FtpPassword), getValue(WorkflowOptions.FtpAccount)) match { + (getValue(WorkflowOptions.FtpUsername), + getValue(WorkflowOptions.FtpPassword), + getValue(WorkflowOptions.FtpAccount) + ) match { case (Some(username), Some(password), account) => Option(FtpAuthenticatedCredentials(username, password, account)) case _ => None } } } -class FtpPathBuilderFactory(globalConfig: Config, instanceConfig: Config, cromwellFtpFileSystems: CromwellFtpFileSystems) extends PathBuilderFactory { - private [ftp] lazy val configFtpConfiguration = FtpInstanceConfiguration(instanceConfig) - private lazy val defaultFtpProvider = new FtpCloudNioFileSystemProvider(instanceConfig, configFtpConfiguration.ftpCredentials, cromwellFtpFileSystems.ftpFileSystems) - - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = Future.successful { - val provider = credentialsFromWorkflowOptions(options) match { - case Some(overriddenCredentials) => - new FtpCloudNioFileSystemProvider(instanceConfig, overriddenCredentials, cromwellFtpFileSystems.ftpFileSystems) - case _ => defaultFtpProvider +class FtpPathBuilderFactory(globalConfig: Config, + instanceConfig: Config, + cromwellFtpFileSystems: CromwellFtpFileSystems +) extends PathBuilderFactory { + private[ftp] lazy val configFtpConfiguration = FtpInstanceConfiguration(instanceConfig) + private lazy val defaultFtpProvider = new FtpCloudNioFileSystemProvider(instanceConfig, + configFtpConfiguration.ftpCredentials, + cromwellFtpFileSystems.ftpFileSystems + ) + + override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = + Future.successful { + val provider = credentialsFromWorkflowOptions(options) match { + case Some(overriddenCredentials) => + new FtpCloudNioFileSystemProvider(instanceConfig, + overriddenCredentials, + cromwellFtpFileSystems.ftpFileSystems + ) + case _ => defaultFtpProvider + } + + FtpPathBuilder(provider) } - - FtpPathBuilder(provider) - } } diff --git a/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/CromwellFtpFileSystemsSpec.scala b/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/CromwellFtpFileSystemsSpec.scala index 90229c30cc2..ea1985797e3 100644 --- a/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/CromwellFtpFileSystemsSpec.scala +++ b/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/CromwellFtpFileSystemsSpec.scala @@ -13,14 +13,13 @@ class CromwellFtpFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec wi behavior of "CromwellFtpFileSystemsSpec" it should "parse configuration" in { - val config = ConfigFactory.parseString( - """cache-ttl = 10 days - |obtain-connection-timeout = 12 hours - |max-connection-per-server-per-user = 1 - |idle-connection-timeout = 14 hours - |connection-port: 212 - |connection-mode = "active" """.stripMargin) - + val config = ConfigFactory.parseString("""cache-ttl = 10 days + |obtain-connection-timeout = 12 hours + |max-connection-per-server-per-user = 1 + |idle-connection-timeout = 14 hours + |connection-port: 212 + |connection-mode = "active" """.stripMargin) + val fs = new CromwellFtpFileSystems(config) fs.ftpFileSystems.config.cacheTTL shouldBe 10.days fs.ftpFileSystems.config.leaseTimeout shouldBe Some(12.hours) diff --git a/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpInstanceConfigurationSpec.scala b/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpInstanceConfigurationSpec.scala index 5fa5de22ea2..09ce6ec863c 100644 --- a/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpInstanceConfigurationSpec.scala +++ b/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpInstanceConfigurationSpec.scala @@ -15,21 +15,19 @@ class FtpInstanceConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec } it should "parse authenticated credentials" in { - FtpInstanceConfiguration(ConfigFactory.parseString( - """ - |auth { - | username = "me" - | password = "mot de passe" - |} + FtpInstanceConfiguration(ConfigFactory.parseString(""" + |auth { + | username = "me" + | password = "mot de passe" + |} """.stripMargin)).ftpCredentials shouldBe FtpAuthenticatedCredentials("me", "mot de passe", None) - FtpInstanceConfiguration(ConfigFactory.parseString( - """ - |auth { - | username = "me" - | password = "mot de passe" - | account = "account" - |} + FtpInstanceConfiguration(ConfigFactory.parseString(""" + |auth { + | username = "me" + | password = "mot de passe" + | account = "account" + |} """.stripMargin)).ftpCredentials shouldBe FtpAuthenticatedCredentials("me", "mot de passe", Option("account")) } diff --git a/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpPathSpec.scala b/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpPathSpec.scala index 9224b4f4124..0c164127514 100644 --- a/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpPathSpec.scala +++ b/filesystems/ftp/src/test/scala/cromwell/filesystems/ftp/FtpPathSpec.scala @@ -16,9 +16,10 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit behavior of "FtpPathSpec" - val pathBuilderFactory = new FtpPathBuilderFactory(ConfigFactory.empty(), ConfigFactory.empty(), CromwellFtpFileSystems.Default) { - override private [ftp] lazy val configFtpConfiguration = new FtpInstanceConfiguration(FtpAnonymousCredentials) - } + val pathBuilderFactory = + new FtpPathBuilderFactory(ConfigFactory.empty(), ConfigFactory.empty(), CromwellFtpFileSystems.Default) { + override private[ftp] lazy val configFtpConfiguration = new FtpInstanceConfiguration(FtpAnonymousCredentials) + } val pathBuilder = Await.result(pathBuilderFactory.withOptions(WorkflowOptions.empty)(null, null), 1.second) @@ -41,7 +42,10 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit ("ftp://ftp-server.com/path/to/my//dir", "ftp://ftp-server.com/path/to/my/dir/file", "dir/file"), ("ftp://ftp-server.com/path/to/my//dir", "ftp://ftp-server.com/path/to/my/dir//file", "dir//file"), ("ftp://ftp-server.com/path/to/my/dir", "ftp://ftp-server.com/path/./to/my/dir/file", "./to/my/dir/file"), - ("ftp://ftp-server.com/path/to/my/dir/with/file", "ftp://ftp-server.com/path/to/other/dir/with/file", "other/dir/with/file") + ("ftp://ftp-server.com/path/to/my/dir/with/file", + "ftp://ftp-server.com/path/to/other/dir/with/file", + "other/dir/with/file" + ) ) private def goodPaths = Seq( @@ -57,8 +61,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "with spaces", getFileName = s"ftp://ftp-server.com/with spaces", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path with non-ascii", path = s"ftp://ftp-server.com/hello/world/with non ascii £€", @@ -71,8 +75,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "with non ascii £€", getFileName = s"ftp://ftp-server.com/with non ascii £€", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "an ftp uri path with encoded characters", path = s"ftp://ftp-server.com/hello/world/encoded%20spaces", @@ -85,8 +89,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "encoded%20spaces", getFileName = s"ftp://ftp-server.com/encoded%20spaces", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a hostname only path (root path)", path = s"ftp://ftp-server.com", @@ -99,8 +103,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "", getFileName = null, getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a hostname only path ending in a /", path = s"ftp://ftp-server.com/", @@ -113,8 +117,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a file at the top of the hostname", path = s"ftp://ftp-server.com/hello", @@ -127,8 +131,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "hello", getFileName = s"ftp://ftp-server.com/hello", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path ending in /", path = s"ftp://ftp-server.com/hello/world/", @@ -141,7 +145,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "world", getFileName = s"ftp://ftp-server.com/world", getNameCount = 2, - isAbsolute = true), + isAbsolute = true + ), // Special paths @@ -157,8 +162,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "", getFileName = s"ftp://ftp-server.com/.", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a hostname with a path ..", path = s"ftp://ftp-server.com/..", @@ -171,8 +176,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "", getFileName = s"ftp://ftp-server.com/..", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket including . in the path", path = s"ftp://ftp-server.com/hello/./world", @@ -185,8 +190,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "world", getFileName = s"ftp://ftp-server.com/world", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path including .. in the path", path = s"ftp://ftp-server.com/hello/../world", @@ -199,7 +204,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "world", getFileName = s"ftp://ftp-server.com/world", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // Normalized @@ -215,8 +221,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path with a normalized path ..", path = s"ftp://ftp-server.com/..", @@ -229,8 +235,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "", getFileName = null, getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a path including . in the normalized path", path = s"ftp://ftp-server.com/hello/./world", @@ -243,8 +249,8 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "world", getFileName = s"ftp://ftp-server.com/world", getNameCount = 2, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path including .. in the normalized path", path = s"ftp://ftp-server.com/hello/../world", @@ -257,14 +263,18 @@ class FtpPathSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit name = "world", getFileName = s"ftp://ftp-server.com/world", getNameCount = 1, - isAbsolute = true), + isAbsolute = true + ) ) private def badPaths = Seq( BadPath("an empty path", "", " does not have an ftp scheme"), BadPath("a hostless path", "ftp://", "ftp:// does not have a valid host"), BadPath("a bucket named .", "ftp://./hello/world", "ftp://./hello/world does not have a valid host"), - BadPath("a non ascii bucket name", "ftp://nonasciibucket£€/hello/world", "ftp://nonasciibucket£€/hello/world does not have a valid host"), + BadPath("a non ascii bucket name", + "ftp://nonasciibucket£€/hello/world", + "ftp://nonasciibucket£€/hello/world does not have a valid host" + ), BadPath("a https path", "https://hello/world", "https://hello/world does not have an ftp scheme"), BadPath("a file uri path", "file:///hello/world", "file:///hello/world does not have an ftp scheme"), BadPath("a relative file path", "hello/world", "hello/world does not have an ftp scheme"), diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsEnhancedRequest.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsEnhancedRequest.scala index 76887905387..638d23e50ec 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsEnhancedRequest.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsEnhancedRequest.scala @@ -11,20 +11,19 @@ import java.io.FileNotFoundException object GcsEnhancedRequest { // If the request fails because no project was passed, recover the request, this time setting the project - def recoverFromProjectNotProvided[A](path: GcsPath, f: Boolean => A) = { - IO(f(false)).handleErrorWith({ - // Only retry with the the project if the error is right - case error: StorageException if isProjectNotProvidedError(error) => - IO(f(true)) - // Use NoSuchFileException for better error reporting - case e: StorageException if e.getCode == StatusCodes.NotFound.intValue => - IO.raiseError(new FileNotFoundException(s"File not found: ${path.pathAsString}")) - case e: GoogleJsonResponseException if isProjectNotProvidedError(e) => - IO(f(true)) - case e: GoogleJsonResponseException if e.getStatusCode == StatusCodes.NotFound.intValue => - IO.raiseError(new FileNotFoundException(s"File not found: ${path.pathAsString}")) - case e => - IO.raiseError(e) - }) - } + def recoverFromProjectNotProvided[A](path: GcsPath, f: Boolean => A) = + IO(f(false)).handleErrorWith { + // Only retry with the the project if the error is right + case error: StorageException if isProjectNotProvidedError(error) => + IO(f(true)) + // Use NoSuchFileException for better error reporting + case e: StorageException if e.getCode == StatusCodes.NotFound.intValue => + IO.raiseError(new FileNotFoundException(s"File not found: ${path.pathAsString}")) + case e: GoogleJsonResponseException if isProjectNotProvidedError(e) => + IO(f(true)) + case e: GoogleJsonResponseException if e.getStatusCode == StatusCodes.NotFound.intValue => + IO.raiseError(new FileNotFoundException(s"File not found: ${path.pathAsString}")) + case e => + IO.raiseError(e) + } } diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilder.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilder.scala index 00aee93e537..2a618e48aa0 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilder.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilder.scala @@ -29,11 +29,11 @@ import scala.language.postfixOps import scala.util.{Failure, Try} object GcsPathBuilder { /* - * Provides some level of validation of GCS bucket names - * This is meant to alert the user early if they mistyped a gcs path in their workflow / inputs and not to validate - * exact bucket syntax, which is done by GCS. - * See https://cloud.google.com/storage/docs/naming for full spec - */ + * Provides some level of validation of GCS bucket names + * This is meant to alert the user early if they mistyped a gcs path in their workflow / inputs and not to validate + * exact bucket syntax, which is done by GCS. + * See https://cloud.google.com/storage/docs/naming for full spec + */ private val GcsBucketPattern = """ (?x) # Turn on comments and whitespace insensitivity @@ -57,13 +57,12 @@ object GcsPathBuilder { override def errorMessage = s"Cloud Storage URIs must have 'gs' scheme: $pathString" } final case class InvalidFullGcsPath(pathString: String) extends InvalidGcsPath { - override def errorMessage: String = { + override def errorMessage: String = s""" |The path '$pathString' does not seem to be a valid GCS path. |Please check that it starts with gs:// and that the bucket and object follow GCS naming guidelines at |https://cloud.google.com/storage/docs/naming. """.stripMargin.replace("\n", " ").trim - } } final case class UnparseableGcsPath(pathString: String, throwable: Throwable) extends InvalidGcsPath { override def errorMessage: String = @@ -79,7 +78,7 @@ object GcsPathBuilder { case _ => None } - def validateGcsPath(string: String): GcsPathValidation = { + def validateGcsPath(string: String): GcsPathValidation = Try { val uri = URI.create(UrlEscapers.urlFragmentEscaper().escape(string)) if (uri.getScheme == null) PossiblyValidRelativeGcsPath @@ -89,39 +88,32 @@ object GcsPathBuilder { } else ValidFullGcsPath(uri.getHost, uri.getPath) } else InvalidScheme(string) } recover { case t => UnparseableGcsPath(string, t) } get - } - def isGcsPath(nioPath: NioPath): Boolean = { + def isGcsPath(nioPath: NioPath): Boolean = nioPath.getFileSystem.provider().getScheme.equalsIgnoreCase(CloudStorageFileSystem.URI_SCHEME) - } def fromAuthMode(authMode: GoogleAuthMode, applicationName: String, retrySettings: RetrySettings, cloudStorageConfiguration: CloudStorageConfiguration, options: WorkflowOptions, - defaultProject: Option[String])(implicit as: ActorSystem, ec: ExecutionContext): Future[GcsPathBuilder] = { + defaultProject: Option[String] + )(implicit as: ActorSystem, ec: ExecutionContext): Future[GcsPathBuilder] = authMode.retryCredentials(options, List(StorageScopes.DEVSTORAGE_FULL_CONTROL)) map { credentials => - fromCredentials(credentials, - applicationName, - retrySettings, - cloudStorageConfiguration, - options, - defaultProject - ) + fromCredentials(credentials, applicationName, retrySettings, cloudStorageConfiguration, options, defaultProject) } - } def fromCredentials(credentials: Credentials, applicationName: String, retrySettings: RetrySettings, cloudStorageConfiguration: CloudStorageConfiguration, options: WorkflowOptions, - defaultProject: Option[String]): GcsPathBuilder = { + defaultProject: Option[String] + ): GcsPathBuilder = { // Grab the google project from Workflow Options if specified and set // that to be the project used by the StorageOptions Builder. If it's not // specified use the default project mentioned in config file - val project: Option[String] = options.get("google_project").toOption match { + val project: Option[String] = options.get("google_project").toOption match { case Some(googleProject) => Option(googleProject) case None => defaultProject } @@ -139,8 +131,9 @@ object GcsPathBuilder { class GcsPathBuilder(apiStorage: com.google.api.services.storage.Storage, cloudStorageConfiguration: CloudStorageConfiguration, - storageOptions: StorageOptions) extends PathBuilder { - private [gcs] val projectId = storageOptions.getProjectId + storageOptions: StorageOptions +) extends PathBuilder { + private[gcs] val projectId = storageOptions.getProjectId private lazy val cloudStorage = storageOptions.getService /** @@ -153,7 +146,7 @@ class GcsPathBuilder(apiStorage: com.google.api.services.storage.Storage, * * Also see https://github.com/GoogleCloudPlatform/google-cloud-java/issues/1343 */ - def build(string: String): Try[GcsPath] = { + def build(string: String): Try[GcsPath] = validateGcsPath(string) match { case ValidFullGcsPath(bucket, path) => Try { @@ -161,18 +154,19 @@ class GcsPathBuilder(apiStorage: com.google.api.services.storage.Storage, val cloudStoragePath = fileSystem.getPath(path) GcsPath(cloudStoragePath, apiStorage, cloudStorage, projectId) } - case PossiblyValidRelativeGcsPath => Failure(new IllegalArgumentException(s"""Path "$string" does not have a gcs scheme""")) + case PossiblyValidRelativeGcsPath => + Failure(new IllegalArgumentException(s"""Path "$string" does not have a gcs scheme""")) case invalid: InvalidGcsPath => Failure(new IllegalArgumentException(invalid.errorMessage)) } - } override def name: String = "Google Cloud Storage" } -case class GcsPath private[gcs](nioPath: NioPath, - apiStorage: com.google.api.services.storage.Storage, - cloudStorage: com.google.cloud.storage.Storage, - projectId: String) extends Path { +case class GcsPath private[gcs] (nioPath: NioPath, + apiStorage: com.google.api.services.storage.Storage, + cloudStorage: com.google.cloud.storage.Storage, + projectId: String +) extends Path { lazy val objectBlobId: Try[BlobId] = Try { val bucketName = cloudStoragePath.bucket val objectName = cloudStoragePath.toRealPath().toString @@ -202,13 +196,12 @@ case class GcsPath private[gcs](nioPath: NioPath, BlobId.of(bucketName, objectName) } - lazy val bucketOrObjectBlobId: Try[BlobId] = { + lazy val bucketOrObjectBlobId: Try[BlobId] = Try { val bucketName = cloudStoragePath.bucket val objectName = cloudStoragePath.toRealPath().toString BlobId.of(bucketName, objectName) } - } override protected def newPath(nioPath: NioPath): GcsPath = GcsPath(nioPath, apiStorage, cloudStorage, projectId) @@ -218,9 +211,9 @@ case class GcsPath private[gcs](nioPath: NioPath, s"${CloudStorageFileSystem.URI_SCHEME}://$host/$path" } - override def writeContent(content: String) - (openOptions: OpenOptions, codec: Codec, compressPayload: Boolean) - (implicit ec: ExecutionContext): GcsPath.this.type = { + override def writeContent(content: String)(openOptions: OpenOptions, codec: Codec, compressPayload: Boolean)(implicit + ec: ExecutionContext + ): GcsPath.this.type = { def request(withProject: Boolean) = { val builder = BlobInfo.newBuilder(objectBlobId.get).setContentType(ContentTypes.`text/plain(UTF-8)`.value) if (compressPayload) { @@ -243,14 +236,14 @@ case class GcsPath private[gcs](nioPath: NioPath, } override def mediaInputStream(implicit ec: ExecutionContext): InputStream = { - def request(withProject: Boolean) = { + def request(withProject: Boolean) = // Use apiStorage here instead of cloudStorage, because apiStorage throws now if the bucket has requester pays, // whereas cloudStorage creates the input stream anyway and only throws one `read` is called (which only happens in NioFlow) - apiStorage.objects() + apiStorage + .objects() .get(objectBlobId.get.getBucket, objectBlobId.get.getName) .setUserProject(withProject.option(projectId).orNull) .executeMediaAsInputStream() - } // Since the NIO interface is synchronous we need to run this synchronously here. It is however wrapped in a Future // in the NioFlow so we don't need to worry about exceptions runOnEc(recoverFromProjectNotProvided(this, request)).unsafeRunSync() diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilderFactory.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilderFactory.scala index 74d88b05476..f63ef73b4dd 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilderFactory.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GcsPathBuilderFactory.scala @@ -14,8 +14,7 @@ import org.threeten.bp.Duration import scala.concurrent.{ExecutionContext, Future} -final case class GcsPathBuilderFactory(globalConfig: Config, instanceConfig: Config) - extends PathBuilderFactory { +final case class GcsPathBuilderFactory(globalConfig: Config, instanceConfig: Config) extends PathBuilderFactory { import net.ceedubs.ficus.Ficus._ // Parse the configuration and create a GoogleConfiguration val googleConf: GoogleConfiguration = GoogleConfiguration(globalConfig) @@ -30,8 +29,9 @@ final case class GcsPathBuilderFactory(globalConfig: Config, instanceConfig: Con val defaultProject = instanceConfig.as[Option[String]]("project") - lazy val defaultRetrySettings: RetrySettings = { - RetrySettings.newBuilder() + lazy val defaultRetrySettings: RetrySettings = + RetrySettings + .newBuilder() .setMaxAttempts(maxAttempts) .setTotalTimeout(Duration.ofSeconds(30)) .setInitialRetryDelay(Duration.ofMillis(100)) @@ -41,9 +41,8 @@ final case class GcsPathBuilderFactory(globalConfig: Config, instanceConfig: Con .setRpcTimeoutMultiplier(1.1) .setMaxRpcTimeout(Duration.ofSeconds(5)) .build() - } - def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[GcsPathBuilder] = { + def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[GcsPathBuilder] = GcsPathBuilder.fromAuthMode( authMode, applicationName, @@ -52,7 +51,6 @@ final case class GcsPathBuilderFactory(globalConfig: Config, instanceConfig: Con options, defaultProject ) - } } object GcsPathBuilderFactory { diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleUtil.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleUtil.scala index 481a3f36d10..8149c822670 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleUtil.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/GoogleUtil.scala @@ -11,43 +11,43 @@ import cromwell.core.{CromwellFatalExceptionMarker, WorkflowOptions} import scala.concurrent.{ExecutionContext, Future} object GoogleUtil { + /** * Extract status code from an exception if it's a com.google.api.client.http.HttpResponseException */ - def extractStatusCode(exception: Throwable): Option[Int] = { + def extractStatusCode(exception: Throwable): Option[Int] = exception match { case t: HttpResponseException => Option(t.getStatusCode) case t: BaseServiceException => Option(t.getCode) case _ => None } - } implicit class EnhancedGoogleAuthMode(val googleAuthMode: GoogleAuthMode) extends AnyVal { + /** * Retries getting the credentials three times. * * There is nothing GCS specific about this method. This package just happens to be the lowest level with access * to core's version of Retry + cloudSupport's implementation of GoogleAuthMode. */ - def retryCredentials(options: WorkflowOptions, scopes: Iterable[String]) - (implicit actorSystem: ActorSystem, executionContext: ExecutionContext): Future[Credentials] = { - def credential(): Credentials = { - - try { + def retryCredentials(options: WorkflowOptions, scopes: Iterable[String])(implicit + actorSystem: ActorSystem, + executionContext: ExecutionContext + ): Future[Credentials] = { + def credential(): Credentials = + try googleAuthMode.credentials(options.get(_).get, scopes) - } catch { + catch { case exception: OptionLookupException => throw new IllegalArgumentException(s"Missing parameters in workflow options: ${exception.key}", exception) with CromwellFatalExceptionMarker } - } - def isFatal(throwable: Throwable): Boolean = { + def isFatal(throwable: Throwable): Boolean = throwable match { case _: IllegalArgumentException => Option(throwable.getCause).exists(isFatal) case _ => GoogleAuthMode.isFatal(throwable) } - } Retry.withRetry( () => Future(credential()), diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/RequesterPaysErrors.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/RequesterPaysErrors.scala index 26ee0868c65..cf693698d18 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/RequesterPaysErrors.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/RequesterPaysErrors.scala @@ -12,7 +12,7 @@ object RequesterPaysErrors { def isProjectNotProvidedError(storageException: StorageException) = storageException.getCode == BucketIsRequesterPaysErrorCode && - StringUtils.contains(storageException.getMessage, BucketIsRequesterPaysErrorMessage) + StringUtils.contains(storageException.getMessage, BucketIsRequesterPaysErrorMessage) def isProjectNotProvidedError(googleJsonError: GoogleJsonError) = googleJsonError.getCode == BucketIsRequesterPaysErrorCode && diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchCommandBuilder.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchCommandBuilder.scala index 5c7b2b724d3..77c15d09a84 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchCommandBuilder.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchCommandBuilder.scala @@ -7,32 +7,32 @@ import cromwell.filesystems.gcs.GcsPath import scala.util.Try private case object PartialGcsBatchCommandBuilder extends PartialIoCommandBuilder { - override def sizeCommand: PartialFunction[Path, Try[GcsBatchSizeCommand]] = { - case gcsPath: GcsPath => GcsBatchSizeCommand.forPath(gcsPath) + override def sizeCommand: PartialFunction[Path, Try[GcsBatchSizeCommand]] = { case gcsPath: GcsPath => + GcsBatchSizeCommand.forPath(gcsPath) } - + override def deleteCommand: PartialFunction[(Path, Boolean), Try[GcsBatchDeleteCommand]] = { case (gcsPath: GcsPath, swallowIoExceptions) => GcsBatchDeleteCommand.forPath(gcsPath, swallowIoExceptions) } - + override def copyCommand: PartialFunction[(Path, Path), Try[GcsBatchCopyCommand]] = { case (gcsSrc: GcsPath, gcsDest: GcsPath) => GcsBatchCopyCommand.forPaths(gcsSrc, gcsDest) } - - override def hashCommand: PartialFunction[Path, Try[GcsBatchCrc32Command]] = { - case gcsPath: GcsPath => GcsBatchCrc32Command.forPath(gcsPath) + + override def hashCommand: PartialFunction[Path, Try[GcsBatchCrc32Command]] = { case gcsPath: GcsPath => + GcsBatchCrc32Command.forPath(gcsPath) } - override def touchCommand: PartialFunction[Path, Try[GcsBatchTouchCommand]] = { - case gcsPath: GcsPath => GcsBatchTouchCommand.forPath(gcsPath) + override def touchCommand: PartialFunction[Path, Try[GcsBatchTouchCommand]] = { case gcsPath: GcsPath => + GcsBatchTouchCommand.forPath(gcsPath) } - override def existsCommand: PartialFunction[Path, Try[GcsBatchExistsCommand]] = { - case gcsPath: GcsPath => GcsBatchExistsCommand.forPath(gcsPath) + override def existsCommand: PartialFunction[Path, Try[GcsBatchExistsCommand]] = { case gcsPath: GcsPath => + GcsBatchExistsCommand.forPath(gcsPath) } - override def isDirectoryCommand: PartialFunction[Path, Try[GcsBatchIsDirectoryCommand]] = { - case gcsPath: GcsPath => GcsBatchIsDirectoryCommand.forPath(gcsPath) + override def isDirectoryCommand: PartialFunction[Path, Try[GcsBatchIsDirectoryCommand]] = { case gcsPath: GcsPath => + GcsBatchIsDirectoryCommand.forPath(gcsPath) } } diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala index ed07fdb3605..f8d1239992a 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala @@ -24,6 +24,7 @@ import scala.util.Try * @tparam U Return type of the Google response */ sealed trait GcsBatchIoCommand[T, U] extends IoCommand[T] { + /** * StorageRequest operation to be executed by this command */ @@ -50,7 +51,9 @@ sealed trait GcsBatchIoCommand[T, U] extends IoCommand[T] { /** * Override to handle a failure differently and potentially return a successful response. */ - def onFailure(googleJsonError: GoogleJsonError, httpHeaders: HttpHeaders): Option[Either[T, GcsBatchIoCommand[T, U]]] = None + def onFailure(googleJsonError: GoogleJsonError, + httpHeaders: HttpHeaders + ): Option[Either[T, GcsBatchIoCommand[T, U]]] = None /** * Use to signal that the request has failed because the user project was not set and that it can be retried with it. @@ -60,25 +63,26 @@ sealed trait GcsBatchIoCommand[T, U] extends IoCommand[T] { sealed trait SingleFileGcsBatchIoCommand[T, U] extends GcsBatchIoCommand[T, U] with SingleFileIoCommand[T] { override def file: GcsPath - //noinspection MutatorLikeMethodIsParameterless + // noinspection MutatorLikeMethodIsParameterless def setUserProject: Boolean def userProject: String = setUserProject.option(file.projectId).orNull } case class GcsBatchCopyCommand( - override val source: GcsPath, - sourceBlob: BlobId, - override val destination: GcsPath, - destinationBlob: BlobId, - rewriteToken: Option[String] = None, - setUserProject: Boolean = false - ) - extends IoCopyCommand(source, destination) with GcsBatchIoCommand[Unit, RewriteResponse] { + override val source: GcsPath, + sourceBlob: BlobId, + override val destination: GcsPath, + destinationBlob: BlobId, + rewriteToken: Option[String] = None, + setUserProject: Boolean = false +) extends IoCopyCommand(source, destination) + with GcsBatchIoCommand[Unit, RewriteResponse] { override def commandDescription: String = s"GcsBatchCopyCommand source '$source' destination '$destination' " + s"setUserProject '$setUserProject' rewriteToken '$rewriteToken'" override def operation: StorageRequest[RewriteResponse] = { - val rewriteOperation = source.apiStorage.objects() + val rewriteOperation = source.apiStorage + .objects() .rewrite(sourceBlob.getBucket, sourceBlob.getName, destinationBlob.getBucket, destinationBlob.getName, null) .setUserProject(setUserProject.option(source.projectId).orNull) @@ -92,13 +96,14 @@ case class GcsBatchCopyCommand( */ def withRewriteToken(rewriteToken: String): GcsBatchCopyCommand = copy(rewriteToken = Option(rewriteToken)) - override def onSuccess(response: RewriteResponse, httpHeaders: HttpHeaders): ErrorOr[Either[Unit, GcsBatchCopyCommand]] = { + override def onSuccess(response: RewriteResponse, + httpHeaders: HttpHeaders + ): ErrorOr[Either[Unit, GcsBatchCopyCommand]] = if (response.getDone) { mapGoogleResponse(response) map Left.apply } else { Right(withRewriteToken(response.getRewriteToken)).validNel } - } override def mapGoogleResponse(response: RewriteResponse): ErrorOr[Unit] = ().validNel @@ -106,31 +111,29 @@ case class GcsBatchCopyCommand( } object GcsBatchCopyCommand { - def forPaths(source: GcsPath, destination: GcsPath): Try[GcsBatchCopyCommand] = { + def forPaths(source: GcsPath, destination: GcsPath): Try[GcsBatchCopyCommand] = for { sourceBlob <- source.objectBlobId destinationBlob <- destination.objectBlobId } yield GcsBatchCopyCommand(source, sourceBlob, destination, destinationBlob) - } } case class GcsBatchDeleteCommand( - override val file: GcsPath, - blob: BlobId, - override val swallowIOExceptions: Boolean, - setUserProject: Boolean = false - ) extends IoDeleteCommand(file, swallowIOExceptions) with SingleFileGcsBatchIoCommand[Unit, Void] { - override def operation: StorageRequest[Void] = { + override val file: GcsPath, + blob: BlobId, + override val swallowIOExceptions: Boolean, + setUserProject: Boolean = false +) extends IoDeleteCommand(file, swallowIOExceptions) + with SingleFileGcsBatchIoCommand[Unit, Void] { + override def operation: StorageRequest[Void] = file.apiStorage.objects().delete(blob.getBucket, blob.getName).setUserProject(userProject) - } override def mapGoogleResponse(response: Void): ErrorOr[Unit] = ().validNel override def onFailure(googleJsonError: GoogleJsonError, - httpHeaders: HttpHeaders, - ): Option[Either[Unit, GcsBatchDeleteCommand]] = { + httpHeaders: HttpHeaders + ): Option[Either[Unit, GcsBatchDeleteCommand]] = if (swallowIOExceptions) Option(Left(())) else None - } override def withUserProject: GcsBatchDeleteCommand = this.copy(setUserProject = true) override def commandDescription: String = s"GcsBatchDeleteCommand file '$file' swallowIOExceptions " + @@ -138,9 +141,8 @@ case class GcsBatchDeleteCommand( } object GcsBatchDeleteCommand { - def forPath(file: GcsPath, swallowIOExceptions: Boolean): Try[GcsBatchDeleteCommand] = { + def forPath(file: GcsPath, swallowIOExceptions: Boolean): Try[GcsBatchDeleteCommand] = file.objectBlobId.map(GcsBatchDeleteCommand(file, _, swallowIOExceptions)) - } } /** @@ -149,21 +151,18 @@ object GcsBatchDeleteCommand { sealed trait GcsBatchGetCommand[T] extends SingleFileGcsBatchIoCommand[T, StorageObject] { def file: GcsPath def blob: BlobId - override def operation: StorageRequest[StorageObject] = { + override def operation: StorageRequest[StorageObject] = file.apiStorage.objects().get(blob.getBucket, blob.getName).setUserProject(userProject) - } } -case class GcsBatchSizeCommand(override val file: GcsPath, - override val blob: BlobId, - setUserProject: Boolean = false, - ) extends IoSizeCommand(file) with GcsBatchGetCommand[Long] { - override def mapGoogleResponse(response: StorageObject): ErrorOr[Long] = { +case class GcsBatchSizeCommand(override val file: GcsPath, override val blob: BlobId, setUserProject: Boolean = false) + extends IoSizeCommand(file) + with GcsBatchGetCommand[Long] { + override def mapGoogleResponse(response: StorageObject): ErrorOr[Long] = Option(response.getSize) match { case None => s"'${file.pathAsString}' in project '${file.projectId}' returned null size".invalidNel case Some(size) => size.longValue().validNel } - } override def withUserProject: GcsBatchSizeCommand = this.copy(setUserProject = true) @@ -171,21 +170,18 @@ case class GcsBatchSizeCommand(override val file: GcsPath, } object GcsBatchSizeCommand { - def forPath(file: GcsPath): Try[GcsBatchSizeCommand] = { + def forPath(file: GcsPath): Try[GcsBatchSizeCommand] = file.objectBlobId.map(GcsBatchSizeCommand(file, _)) - } } -case class GcsBatchCrc32Command(override val file: GcsPath, - override val blob: BlobId, - setUserProject: Boolean = false, - ) extends IoHashCommand(file) with GcsBatchGetCommand[String] { - override def mapGoogleResponse(response: StorageObject): ErrorOr[String] = { +case class GcsBatchCrc32Command(override val file: GcsPath, override val blob: BlobId, setUserProject: Boolean = false) + extends IoHashCommand(file) + with GcsBatchGetCommand[String] { + override def mapGoogleResponse(response: StorageObject): ErrorOr[String] = Option(response.getCrc32c) match { case None => s"'${file.pathAsString}' in project '${file.projectId}' returned null CRC32C checksum".invalidNel case Some(crc32c) => crc32c.validNel } - } override def withUserProject: GcsBatchCrc32Command = this.copy(setUserProject = true) @@ -193,15 +189,13 @@ case class GcsBatchCrc32Command(override val file: GcsPath, } object GcsBatchCrc32Command { - def forPath(file: GcsPath): Try[GcsBatchCrc32Command] = { + def forPath(file: GcsPath): Try[GcsBatchCrc32Command] = file.objectBlobId.map(GcsBatchCrc32Command(file, _)) - } } -case class GcsBatchTouchCommand(override val file: GcsPath, - override val blob: BlobId, - setUserProject: Boolean = false, - ) extends IoTouchCommand(file) with GcsBatchGetCommand[Unit] { +case class GcsBatchTouchCommand(override val file: GcsPath, override val blob: BlobId, setUserProject: Boolean = false) + extends IoTouchCommand(file) + with GcsBatchGetCommand[Unit] { override def mapGoogleResponse(response: StorageObject): ErrorOr[Unit] = ().validNel override def withUserProject: GcsBatchTouchCommand = this.copy(setUserProject = true) @@ -210,9 +204,8 @@ case class GcsBatchTouchCommand(override val file: GcsPath, } object GcsBatchTouchCommand { - def forPath(file: GcsPath): Try[GcsBatchTouchCommand] = { + def forPath(file: GcsPath): Try[GcsBatchTouchCommand] = file.objectBlobId.map(GcsBatchTouchCommand(file, _)) - } } /* @@ -220,47 +213,46 @@ object GcsBatchTouchCommand { * Specifically, list objects that have this path as a prefix. Since we don't really care about what's inside here, * set max results to 1 to avoid unnecessary payload. */ -case class GcsBatchIsDirectoryCommand(override val file: GcsPath, - blob: BlobId, - setUserProject: Boolean = false, - ) - extends IoIsDirectoryCommand(file) with SingleFileGcsBatchIoCommand[Boolean, Objects] { - override def operation: StorageRequest[Objects] = { - file.apiStorage.objects().list(blob.getBucket).setPrefix(blob.getName.ensureSlashed).setMaxResults(1L).setUserProject(userProject) - } - - override def mapGoogleResponse(response: Objects): ErrorOr[Boolean] = { +case class GcsBatchIsDirectoryCommand(override val file: GcsPath, blob: BlobId, setUserProject: Boolean = false) + extends IoIsDirectoryCommand(file) + with SingleFileGcsBatchIoCommand[Boolean, Objects] { + override def operation: StorageRequest[Objects] = + file.apiStorage + .objects() + .list(blob.getBucket) + .setPrefix(blob.getName.ensureSlashed) + .setMaxResults(1L) + .setUserProject(userProject) + + override def mapGoogleResponse(response: Objects): ErrorOr[Boolean] = // Wrap in an Option because getItems can (always ?) return null if there are no objects Option(response.getItems).map(_.asScala).exists(_.nonEmpty).validNel - } override def withUserProject: GcsBatchIsDirectoryCommand = this.copy(setUserProject = true) override def commandDescription: String = s"GcsBatchIsDirectoryCommand file '$file' setUserProject '$setUserProject'" } object GcsBatchIsDirectoryCommand { - def forPath(file: GcsPath): Try[GcsBatchIsDirectoryCommand] = { + def forPath(file: GcsPath): Try[GcsBatchIsDirectoryCommand] = file.bucketOrObjectBlobId.map(GcsBatchIsDirectoryCommand(file, _)) - } } -case class GcsBatchExistsCommand(override val file: GcsPath, - override val blob: BlobId, - setUserProject: Boolean = false, - ) extends IoExistsCommand(file) with GcsBatchGetCommand[Boolean] { +case class GcsBatchExistsCommand(override val file: GcsPath, override val blob: BlobId, setUserProject: Boolean = false) + extends IoExistsCommand(file) + with GcsBatchGetCommand[Boolean] { override def mapGoogleResponse(response: StorageObject): ErrorOr[Boolean] = true.validNel - override def onFailure(googleJsonError: GoogleJsonError, httpHeaders: HttpHeaders): Option[Either[Boolean, GcsBatchIoCommand[Boolean, StorageObject]]] = { + override def onFailure(googleJsonError: GoogleJsonError, + httpHeaders: HttpHeaders + ): Option[Either[Boolean, GcsBatchIoCommand[Boolean, StorageObject]]] = // If the object can't be found, don't fail the request but just return false as we were testing for existence if (googleJsonError.getCode == 404) Option(Left(false)) else None - } override def withUserProject: GcsBatchExistsCommand = this.copy(setUserProject = true) override def commandDescription: String = s"GcsBatchExistsCommand file '$file' setUserProject '$setUserProject'" } object GcsBatchExistsCommand { - def forPath(file: GcsPath): Try[GcsBatchExistsCommand] = { + def forPath(file: GcsPath): Try[GcsBatchExistsCommand] = file.objectBlobId.map(GcsBatchExistsCommand(file, _)) - } } /** A GcsBatchIoCommand for use in tests. */ diff --git a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala index 01a75e21ee5..be91888a610 100644 --- a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala +++ b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala @@ -22,9 +22,11 @@ class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with M CloudStorageFileSystem.forBucket("bucket").getPath("test"), mock[com.google.api.services.storage.Storage], mock[com.google.cloud.storage.Storage], - "GcsEnhancedRequest-project", + "GcsEnhancedRequest-project" ) - val requesterPaysException = new StorageException(BucketIsRequesterPaysErrorCode, "Bucket is a requester pays bucket but no user project provided.") + val requesterPaysException = new StorageException(BucketIsRequesterPaysErrorCode, + "Bucket is a requester pays bucket but no user project provided." + ) it should "attempt first without project, and not retry if the requests succeeds" in { val testFunction = mock[Boolean => String] @@ -54,7 +56,9 @@ class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with M when(testFunction.apply(false)).thenThrow(requesterPaysException) // We expect it to be called a second time with withProject = true this time, and fail for another reason when(testFunction.apply(true)).thenThrow(new RuntimeException("it really doesn't work")) - a[RuntimeException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + a[RuntimeException] should be thrownBy GcsEnhancedRequest + .recoverFromProjectNotProvided(path, testFunction) + .unsafeRunSync() } it should "not retry requests if the error does not match" in { @@ -62,7 +66,9 @@ class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with M // Throw an unrelated exception, should only be called once when(testFunction.apply(false)).thenThrow(new RuntimeException("it really doesn't work")) - a[RuntimeException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + a[RuntimeException] should be thrownBy GcsEnhancedRequest + .recoverFromProjectNotProvided(path, testFunction) + .unsafeRunSync() verify(testFunction).apply(false) verify(testFunction).apply(anyBoolean) } @@ -72,7 +78,9 @@ class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with M // Throw an unrelated exception, should only be called once when(testFunction.apply(false)).thenThrow(new StorageException(404, "gs://does/not/exist")) - a[FileNotFoundException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + a[FileNotFoundException] should be thrownBy GcsEnhancedRequest + .recoverFromProjectNotProvided(path, testFunction) + .unsafeRunSync() verify(testFunction).apply(false) verify(testFunction).apply(anyBoolean) } @@ -86,7 +94,9 @@ class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with M // Throw an unrelated exception, should only be called once when(testFunction.apply(false)).thenAnswer(_ => throw new GoogleJsonResponseException(builder, error)) - a[FileNotFoundException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + a[FileNotFoundException] should be thrownBy GcsEnhancedRequest + .recoverFromProjectNotProvided(path, testFunction) + .unsafeRunSync() verify(testFunction).apply(false) verify(testFunction).apply(anyBoolean) } diff --git a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsPathBuilderSpec.scala b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsPathBuilderSpec.scala index 22e63ea0f32..a275d163e4a 100644 --- a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsPathBuilderSpec.scala +++ b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsPathBuilderSpec.scala @@ -15,7 +15,12 @@ import org.scalatest.prop.Tables.Table import java.io.ByteArrayInputStream -class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with PathBuilderSpecUtils with ServiceAccountTestSupport { +class GcsPathBuilderSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with PathBuilderSpecUtils + with ServiceAccountTestSupport { behavior of "GcsPathBuilder" @@ -64,8 +69,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "with spaces", getFileName = s"gs://$bucket/with spaces", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path with non-ascii", path = s"gs://$bucket/hello/world/with non ascii £€", @@ -78,8 +83,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "with non ascii £€", getFileName = s"gs://$bucket/with non ascii £€", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a gs uri path with encoded characters", path = s"gs://$bucket/hello/world/encoded%20spaces", @@ -92,8 +97,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "encoded%20spaces", getFileName = s"gs://$bucket/encoded%20spaces", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket only path", path = s"gs://$bucket", @@ -106,8 +111,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = s"gs://$bucket/", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a bucket only path ending in a /", path = s"gs://$bucket/", @@ -120,8 +125,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a file at the top of the bucket", path = s"gs://$bucket/hello", @@ -134,8 +139,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "hello", getFileName = s"gs://$bucket/hello", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a path ending in /", path = s"gs://$bucket/hello/world/", @@ -148,7 +153,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"gs://$bucket/world", getNameCount = 2, - isAbsolute = true), + isAbsolute = true + ), // Special paths @@ -164,8 +170,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = s"gs://$bucket/.", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket with a path ..", path = s"gs://$bucket/..", @@ -178,8 +184,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = s"gs://$bucket/..", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket including . in the path", path = s"gs://$bucket/hello/./world", @@ -192,8 +198,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"gs://$bucket/world", getNameCount = 3, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket including .. in the path", path = s"gs://$bucket/hello/../world", @@ -206,7 +212,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"gs://$bucket/world", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // Normalized @@ -222,8 +229,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket with a normalized path ..", path = s"gs://$bucket/..", @@ -236,8 +243,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = s"gs://$bucket/", getNameCount = 1, - isAbsolute = false), - + isAbsolute = false + ), GoodPath( description = "a bucket including . in the normalized path", path = s"gs://$bucket/hello/./world", @@ -250,8 +257,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"gs://$bucket/world", getNameCount = 2, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket including .. in the normalized path", path = s"gs://$bucket/hello/../world", @@ -264,8 +271,8 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"gs://$bucket/world", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket with an underscore", path = s"gs://hello_underscore/world", @@ -278,24 +285,34 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"gs://hello_underscore/world", getNameCount = 1, - isAbsolute = true) + isAbsolute = true + ) ) private def badPaths = Seq( BadPath("an empty path", "", "Path \"\" does not have a gcs scheme"), - BadPath("an bucketless path", "gs://", "The specified GCS path 'gs://' does not parse as a URI.\nExpected authority at index 5: gs://"), - BadPath("a bucket named .", "gs://./hello/world", "The path 'gs://./hello/world' does not seem to be a valid GCS path. Please check that it starts with gs:// and that the bucket and object follow GCS naming guidelines at https://cloud.google.com/storage/docs/naming."), - BadPath("a non ascii bucket name", "gs://nonasciibucket£€/hello/world", - "The path 'gs://nonasciibucket£€/hello/world' does not seem to be a valid GCS path. Please check that it starts with gs:// and that the bucket and object follow GCS naming guidelines at https://cloud.google.com/storage/docs/naming."), + BadPath("an bucketless path", + "gs://", + "The specified GCS path 'gs://' does not parse as a URI.\nExpected authority at index 5: gs://" + ), + BadPath( + "a bucket named .", + "gs://./hello/world", + "The path 'gs://./hello/world' does not seem to be a valid GCS path. Please check that it starts with gs:// and that the bucket and object follow GCS naming guidelines at https://cloud.google.com/storage/docs/naming." + ), + BadPath( + "a non ascii bucket name", + "gs://nonasciibucket£€/hello/world", + "The path 'gs://nonasciibucket£€/hello/world' does not seem to be a valid GCS path. Please check that it starts with gs:// and that the bucket and object follow GCS naming guidelines at https://cloud.google.com/storage/docs/naming." + ), BadPath("a https path", "https://hello/world", "Cloud Storage URIs must have 'gs' scheme: https://hello/world"), BadPath("a file uri path", "file:///hello/world", "Cloud Storage URIs must have 'gs' scheme: file:///hello/world"), BadPath("a relative file path", "hello/world", "Path \"hello/world\" does not have a gcs scheme"), BadPath("an absolute file path", "/hello/world", "Path \"/hello/world\" does not have a gcs scheme") ) - private lazy val pathBuilder = { + private lazy val pathBuilder = MockGcsPathBuilder.instance - } it should "not mix up credentials" in { def retrySettings: RetrySettings = RetrySettings.newBuilder().build() @@ -304,22 +321,25 @@ class GcsPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers val noCredentials = NoCredentials.getInstance() val noCredentialsPathBuilder: GcsPathBuilder = { val noCredentialsStorage = GcsStorage.gcsStorage("no-credentials", noCredentials, retrySettings) - val noCredentialsStorageOptions = GcsStorage.gcsStorageOptions(noCredentials, retrySettings, Option("proj-no-credentials")) + val noCredentialsStorageOptions = + GcsStorage.gcsStorageOptions(noCredentials, retrySettings, Option("proj-no-credentials")) new GcsPathBuilder(noCredentialsStorage, cloudStorageConfig, noCredentialsStorageOptions) } - val serviceAccountCredentials = ServiceAccountCredentials.fromStream( - new ByteArrayInputStream(serviceAccountJsonContents.getBytes)) + val serviceAccountCredentials = + ServiceAccountCredentials.fromStream(new ByteArrayInputStream(serviceAccountJsonContents.getBytes)) val serviceAccountPathBuilder: GcsPathBuilder = { val serviceAccountStorage = GcsStorage.gcsStorage("service-account", serviceAccountCredentials, retrySettings) - val serviceAccountStorageOptions = GcsStorage.gcsStorageOptions(serviceAccountCredentials, retrySettings, Option("proj-service-account")) + val serviceAccountStorageOptions = + GcsStorage.gcsStorageOptions(serviceAccountCredentials, retrySettings, Option("proj-service-account")) new GcsPathBuilder(serviceAccountStorage, cloudStorageConfig, serviceAccountStorageOptions) } def credentialsForPath(gcsPath: GcsPath): Credentials = { - val cloudFilesystemProvider = gcsPath.nioPath.getFileSystem.provider().asInstanceOf[CloudStorageFileSystemProvider] + val cloudFilesystemProvider = + gcsPath.nioPath.getFileSystem.provider().asInstanceOf[CloudStorageFileSystemProvider] val storageOptionsField = cloudFilesystemProvider.getClass.getDeclaredField("storageOptions") storageOptionsField.setAccessible(true) val storageOptions = storageOptionsField.get(cloudFilesystemProvider) diff --git a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/MockGcsPathBuilder.scala b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/MockGcsPathBuilder.scala index c42b7b324d0..8834d4c6476 100644 --- a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/MockGcsPathBuilder.scala +++ b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/MockGcsPathBuilder.scala @@ -12,12 +12,13 @@ import scala.concurrent.ExecutionContext object MockGcsPathBuilder { implicit val ec = ExecutionContext.fromExecutor(Executors.newSingleThreadExecutor()) - private def makeStorageOptions(project: Option[String] = Option("cromwell-test")) = GcsStorage.gcsStorageOptions(NoCredentials.getInstance(), RetrySettings.newBuilder().build(), project) + private def makeStorageOptions(project: Option[String] = Option("cromwell-test")) = + GcsStorage.gcsStorageOptions(NoCredentials.getInstance(), RetrySettings.newBuilder().build(), project) private val storageOptions = makeStorageOptions() private val apiStorage = GcsStorage.gcsStorage("cromwell-test-app", storageOptions) lazy val instance = new GcsPathBuilder(apiStorage, CloudStorageConfiguration.DEFAULT, storageOptions) - + def withOptions(workflowOptions: WorkflowOptions) = { val customStorageOptions = makeStorageOptions(workflowOptions.get("google_project").toOption) new GcsPathBuilder(apiStorage, GcsStorage.DefaultCloudStorageConfiguration, customStorageOptions) diff --git a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala index d54662f3535..a4c936a463d 100644 --- a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala +++ b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala @@ -182,7 +182,9 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte response.setDone(false) response.setRewriteToken("token") - command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.toOption.get.rewriteToken should be(Option("token")) + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.toOption.get.rewriteToken should be( + Option("token") + ) command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } diff --git a/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilder.scala b/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilder.scala index 89bb22021df..fa1861dd81a 100644 --- a/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilder.scala +++ b/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilder.scala @@ -16,19 +16,20 @@ object HttpPathBuilder { def accepts(url: String): Boolean = url.matches("^http[s]?://.*") } - class HttpPathBuilder extends PathBuilder { override def name: String = "HTTP" - override def build(pathAsString: String): Try[Path] = { + override def build(pathAsString: String): Try[Path] = if (HttpPathBuilder.accepts(pathAsString)) Try { HttpPath(Paths.get(pathAsString)) - } else { + } + else { Failure(new IllegalArgumentException(s"$pathAsString does not have an http or https scheme")) } - } - def content(url: String)(implicit actorContext: ActorContext, actorMaterializer: ActorMaterializer): Future[NioPath] = { + def content( + url: String + )(implicit actorContext: ActorContext, actorMaterializer: ActorMaterializer): Future[NioPath] = { implicit val actorSystem: ActorSystem = actorContext.system implicit val executionContext: ExecutionContext = actorContext.dispatcher @@ -54,19 +55,19 @@ case class HttpPath(nioPath: NioPath) extends Path { override def pathWithoutScheme: String = pathAsString.replaceFirst("http[s]?://", "") - def fetchSize(implicit executionContext: ExecutionContext, actorSystem: ActorSystem): Future[Long] = { + def fetchSize(implicit executionContext: ExecutionContext, actorSystem: ActorSystem): Future[Long] = Http().singleRequest(HttpRequest(uri = pathAsString, method = HttpMethods.HEAD)).map { response => response.discardEntityBytes() - val length = if (response.status.isSuccess()) - response.entity.contentLengthOption - else - None + val length = + if (response.status.isSuccess()) + response.entity.contentLengthOption + else + None length.getOrElse( throw new RuntimeException( s"Couldn't fetch size for $pathAsString, missing Content-Length header or path doesn't exist (HTTP ${response.status.toString()})." ) ) } - } } diff --git a/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilderFactory.scala b/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilderFactory.scala index 7a42c113921..bf65ed34a31 100644 --- a/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilderFactory.scala +++ b/filesystems/http/src/main/scala/cromwell/filesystems/http/HttpPathBuilderFactory.scala @@ -8,10 +8,10 @@ import cromwell.core.path.{PathBuilder, PathBuilderFactory} import scala.concurrent.{ExecutionContext, Future} class HttpPathBuilderFactory(globalConfig: Config, instanceConfig: Config) extends PathBuilderFactory { - override def withOptions(options: WorkflowOptions) - (implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = { + override def withOptions( + options: WorkflowOptions + )(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = Future { new HttpPathBuilder } - } } diff --git a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilder.scala b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilder.scala index 0713afa1a71..4de28288cd6 100644 --- a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilder.scala +++ b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilder.scala @@ -76,13 +76,12 @@ object S3PathBuilder { override def errorMessage: String = s"S3 URIs must have 's3' scheme: $pathString" } final case class InvalidFullS3Path(pathString: String) extends InvalidS3Path { - override def errorMessage: String = { + override def errorMessage: String = s""" |The path '$pathString' does not seem to be a valid S3 path. |Please check that it starts with s3:// and that the bucket and object follow S3 naming guidelines at |https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html """.stripMargin.replace("\n", " ").trim - } } final case class UnparseableS3Path(pathString: String, throwable: Throwable) extends InvalidS3Path { override def errorMessage: String = @@ -98,7 +97,7 @@ object S3PathBuilder { def pathToUri(string: String): URI = URI.create(UrlEscapers.urlFragmentEscaper.escape(string)) - def validatePath(string: String): S3PathValidation = { + def validatePath(string: String): S3PathValidation = Try { val uri = pathToUri(string) if (uri.getScheme == null) { PossiblyValidRelativeS3Path } @@ -108,56 +107,46 @@ object S3PathBuilder { } else { ValidFullS3Path(uri.getHost, uri.getPath) } } else { InvalidScheme(string) } } recover { case t => UnparseableS3Path(string, t) } get - } def fromAuthMode(authMode: AwsAuthMode, configuration: S3Configuration, options: WorkflowOptions, - storageRegion: Option[Region])(implicit ec: ExecutionContext): Future[S3PathBuilder] = { + storageRegion: Option[Region] + )(implicit ec: ExecutionContext): Future[S3PathBuilder] = { val provider = authMode.provider() // Other backends needed retry here. In case we need retry, we'll return // a future. This will allow us to add capability without changing signature - Future(fromProvider(provider, - configuration, - options, - storageRegion - )) + Future(fromProvider(provider, configuration, options, storageRegion)) } def fromProvider(provider: AwsCredentialsProvider, configuration: S3Configuration, options: WorkflowOptions, - storageRegion: Option[Region]): S3PathBuilder = { + storageRegion: Option[Region] + ): S3PathBuilder = new S3PathBuilder(configuration) - } } -class S3PathBuilder(configuration: S3Configuration - ) extends PathBuilder { +class S3PathBuilder(configuration: S3Configuration) extends PathBuilder { // Tries to create a new S3Path from a String representing an absolute s3 path: s3://[/]. - def build(string: String): Try[S3Path] = { + def build(string: String): Try[S3Path] = validatePath(string) match { case ValidFullS3Path(bucket, path) => Try { val s3Path = new S3FileSystemProvider() .getFileSystem(URI.create("s3:////"), System.getenv) .getPath(s"""/$bucket/$path""") - S3Path(s3Path, bucket, - new AmazonS3ClientFactory().getS3Client(URI.create("s3:////"), System.getProperties)) + S3Path(s3Path, bucket, new AmazonS3ClientFactory().getS3Client(URI.create("s3:////"), System.getProperties)) } case PossiblyValidRelativeS3Path => Failure(new IllegalArgumentException(s"$string does not have a s3 scheme")) case invalid: InvalidS3Path => Failure(new IllegalArgumentException(invalid.errorMessage)) } - } override def name: String = "s3" } -case class S3Path private[s3](nioPath: NioPath, - bucket: String, - client: S3Client - ) extends Path { +case class S3Path private[s3] (nioPath: NioPath, bucket: String, client: S3Client) extends Path { override protected def newPath(nioPath: NioPath): S3Path = S3Path(nioPath, bucket, client) override def pathAsString: String = s"s3://$pathWithoutScheme" @@ -188,7 +177,7 @@ case class S3Path private[s3](nioPath: NioPath, val originalPath = s3Path.toString if (originalPath.startsWith("s3")) return s3Path.toAbsolutePath.toString originalPath.charAt(0) match { - case '/' => s3Path.toAbsolutePath.toString + case '/' => s3Path.toAbsolutePath.toString case _ => s3Path.resolve(s"/$bucket/$originalPath").toAbsolutePath.toString } } diff --git a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilderFactory.scala b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilderFactory.scala index 880b0b984c1..ad505a35929 100644 --- a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilderFactory.scala +++ b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/S3PathBuilderFactory.scala @@ -46,8 +46,8 @@ import scala.concurrent.{ExecutionContext, Future} // The constructor of this class is required to be Config, Config by cromwell // So, we need to take this config and get the AuthMode out of it -final case class S3PathBuilderFactory private(globalConfig: Config, instanceConfig: Config) - extends PathBuilderFactory { +final case class S3PathBuilderFactory private (globalConfig: Config, instanceConfig: Config) + extends PathBuilderFactory { // Grab the authMode out of configuration val conf: AwsConfiguration = AwsConfiguration(globalConfig) @@ -55,19 +55,16 @@ final case class S3PathBuilderFactory private(globalConfig: Config, instanceConf val authModeValidation: ErrorOr[AwsAuthMode] = conf.auth(authModeAsString) val authMode = authModeValidation.unsafe(s"Failed to get authentication mode for $authModeAsString") - def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[S3PathBuilder] = { - S3PathBuilder.fromAuthMode(authMode, S3Storage.DefaultConfiguration, options, conf.region) - } + def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[S3PathBuilder] = + S3PathBuilder.fromAuthMode(authMode, S3Storage.DefaultConfiguration, options, conf.region) // Ignores the authMode and creates an S3PathBuilder using the passed credentials directly. // Can be used when the Credentials are already available. - def fromProvider(options: WorkflowOptions, provider: AwsCredentialsProvider): S3PathBuilder = { + def fromProvider(options: WorkflowOptions, provider: AwsCredentialsProvider): S3PathBuilder = S3PathBuilder.fromProvider(provider, S3Storage.DefaultConfiguration, options, conf.region) - } } object S3PathBuilderFactory { - def apply(globalConfig: Config, instanceConfig: Config): S3PathBuilderFactory = { + def apply(globalConfig: Config, instanceConfig: Config): S3PathBuilderFactory = new S3PathBuilderFactory(globalConfig, instanceConfig) - } } diff --git a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchCommandBuilder.scala b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchCommandBuilder.scala index 8058d897975..74ec4e1b6b6 100644 --- a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchCommandBuilder.scala +++ b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchCommandBuilder.scala @@ -40,8 +40,8 @@ import scala.util.Try * Generates commands for IO operations on S3 */ private case object PartialS3BatchCommandBuilder extends PartialIoCommandBuilder { - override def sizeCommand: PartialFunction[Path, Try[S3BatchSizeCommand]] = { - case path: S3Path => Try(S3BatchSizeCommand(path)) + override def sizeCommand: PartialFunction[Path, Try[S3BatchSizeCommand]] = { case path: S3Path => + Try(S3BatchSizeCommand(path)) } override def deleteCommand: PartialFunction[(Path, Boolean), Try[S3BatchDeleteCommand]] = { @@ -52,16 +52,16 @@ private case object PartialS3BatchCommandBuilder extends PartialIoCommandBuilder case (src: S3Path, dest: S3Path) => Try(S3BatchCopyCommand(src, dest)) } - override def hashCommand: PartialFunction[Path, Try[S3BatchEtagCommand]] = { - case path: S3Path => Try(S3BatchEtagCommand(path)) + override def hashCommand: PartialFunction[Path, Try[S3BatchEtagCommand]] = { case path: S3Path => + Try(S3BatchEtagCommand(path)) } - override def touchCommand: PartialFunction[Path, Try[S3BatchTouchCommand]] = { - case path: S3Path => Try(S3BatchTouchCommand(path)) + override def touchCommand: PartialFunction[Path, Try[S3BatchTouchCommand]] = { case path: S3Path => + Try(S3BatchTouchCommand(path)) } - override def existsCommand: PartialFunction[Path, Try[S3BatchExistsCommand]] = { - case path: S3Path => Try(S3BatchExistsCommand(path)) + override def existsCommand: PartialFunction[Path, Try[S3BatchExistsCommand]] = { case path: S3Path => + Try(S3BatchExistsCommand(path)) } } diff --git a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchIoCommand.scala b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchIoCommand.scala index 75c224f6418..696c4d0240c 100644 --- a/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchIoCommand.scala +++ b/filesystems/s3/src/main/scala/cromwell/filesystems/s3/batch/S3BatchIoCommand.scala @@ -32,14 +32,16 @@ package cromwell.filesystems.s3.batch import software.amazon.awssdk.core.exception.SdkException -import software.amazon.awssdk.services.s3.model.{HeadObjectResponse, CopyObjectResponse, NoSuchKeyException} -import cromwell.core.io.{IoCommand, - IoDeleteCommand, - IoSizeCommand, - IoHashCommand, - IoTouchCommand, - IoExistsCommand, - IoCopyCommand} +import software.amazon.awssdk.services.s3.model.{CopyObjectResponse, HeadObjectResponse, NoSuchKeyException} +import cromwell.core.io.{ + IoCommand, + IoCopyCommand, + IoDeleteCommand, + IoExistsCommand, + IoHashCommand, + IoSizeCommand, + IoTouchCommand +} import cromwell.filesystems.s3.S3Path @@ -49,6 +51,7 @@ import cromwell.filesystems.s3.S3Path * @tparam U Return type of the response */ sealed trait S3BatchIoCommand[T, U] extends IoCommand[T] { + /** * Maps the response of type U to the Cromwell Io response of type T */ @@ -61,9 +64,8 @@ sealed trait S3BatchIoCommand[T, U] extends IoCommand[T] { * Right(newCommand) means the command is not complete and needs another request to be executed. * Most commands will reply with Left(value). */ - def onSuccess(response: U): Either[T, S3BatchIoCommand[T, U]] = { + def onSuccess(response: U): Either[T, S3BatchIoCommand[T, U]] = Left(mapResponse(response)) - } /** * Override to handle a failure differently and potentially return a successful response. @@ -72,19 +74,22 @@ sealed trait S3BatchIoCommand[T, U] extends IoCommand[T] { } case class S3BatchCopyCommand( - override val source: S3Path, - override val destination: S3Path, - ) extends IoCopyCommand(source, destination) with S3BatchIoCommand[Unit, CopyObjectResponse] { + override val source: S3Path, + override val destination: S3Path +) extends IoCopyCommand(source, destination) + with S3BatchIoCommand[Unit, CopyObjectResponse] { override def mapResponse(response: CopyObjectResponse): Unit = () override def commandDescription: String = s"S3BatchCopyCommand source '$source' destination '$destination'" } case class S3BatchDeleteCommand( - override val file: S3Path, - override val swallowIOExceptions: Boolean - ) extends IoDeleteCommand(file, swallowIOExceptions) with S3BatchIoCommand[Unit, Void] { + override val file: S3Path, + override val swallowIOExceptions: Boolean +) extends IoDeleteCommand(file, swallowIOExceptions) + with S3BatchIoCommand[Unit, Void] { override protected def mapResponse(response: Void): Unit = () - override def commandDescription: String = s"S3BatchDeleteCommand file '$file' swallowIOExceptions '$swallowIOExceptions'" + override def commandDescription: String = + s"S3BatchDeleteCommand file '$file' swallowIOExceptions '$swallowIOExceptions'" } /** @@ -126,14 +131,15 @@ case class S3BatchTouchCommand(override val file: S3Path) extends IoTouchCommand * `IoCommand` to determine the existence of an object in S3 * @param file the path to the object */ -case class S3BatchExistsCommand(override val file: S3Path) extends IoExistsCommand(file) with S3BatchHeadCommand[Boolean] { +case class S3BatchExistsCommand(override val file: S3Path) + extends IoExistsCommand(file) + with S3BatchHeadCommand[Boolean] { override def mapResponse(response: HeadObjectResponse): Boolean = true - override def onFailure(error: SdkException): Option[Left[Boolean, Nothing]] = { + override def onFailure(error: SdkException): Option[Left[Boolean, Nothing]] = // If the object can't be found, don't fail the request but just return false as we were testing for existence error match { - case _ : NoSuchKeyException => Option(Left(false)) + case _: NoSuchKeyException => Option(Left(false)) case _ => None } - } override def commandDescription: String = s"S3BatchExistsCommand file '$file'" } diff --git a/filesystems/s3/src/test/scala/cromwell/filesystems/s3/S3PathBuilderSpec.scala b/filesystems/s3/src/test/scala/cromwell/filesystems/s3/S3PathBuilderSpec.scala index 262ad2cc7de..05703f548d2 100644 --- a/filesystems/s3/src/test/scala/cromwell/filesystems/s3/S3PathBuilderSpec.scala +++ b/filesystems/s3/src/test/scala/cromwell/filesystems/s3/S3PathBuilderSpec.scala @@ -107,7 +107,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "with spaces", getFileName = s"s3://$bucket/with spaces", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // GoodPath( // description = "a path with non-ascii", @@ -134,7 +135,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "with non ascii £€", getFileName = s"s3://$bucket/with non ascii £€", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // GoodPath( // description = "a s3 uri path with encoded characters", @@ -162,7 +164,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "encoded paces", getFileName = s"s3://$bucket/encoded paces", getNameCount = 3, - isAbsolute = true), + isAbsolute = true + ), // TODO: In order for this to pass tests, S3Path needs to implement the // Path trait directly and cannot inherit. We will work on this later @@ -192,8 +195,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = null, getNameCount = 0, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a file at the top of the bucket", path = s"s3://$bucket/hello", @@ -206,7 +209,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "hello", getFileName = s"s3://$bucket/hello", getNameCount = 1, - isAbsolute = true), + isAbsolute = true + ), // parent/getParent do not end in a "/". // TODO: Determine if this is critcal. Note @@ -236,7 +240,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"s3://$bucket/world", getNameCount = 2, - isAbsolute = true), + isAbsolute = true + ), // Special paths @@ -252,7 +257,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "", getFileName = s"s3://$bucket/.", getNameCount = 1, - isAbsolute = true), + isAbsolute = true + ), // GoodPath( // description = "a bucket with a path ..", @@ -352,8 +358,8 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"s3://$bucket/world", getNameCount = 1, - isAbsolute = true), - + isAbsolute = true + ), GoodPath( description = "a bucket with an underscore", path = s"s3://hello_underscore/world", @@ -366,27 +372,37 @@ class S3PathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers name = "world", getFileName = s"s3://hello_underscore/world", getNameCount = 1, - isAbsolute = true) + isAbsolute = true + ) ) private def badPaths = Seq( BadPath("an empty path", "", " does not have a s3 scheme"), - BadPath("a bucketless path", "s3://", "The specified S3 path 's3://' does not parse as a URI.\nExpected authority at index 5: s3://"), - BadPath("a bucket named .", "s3://./hello/world", "The path 's3://./hello/world' does not seem to be a valid S3 path. Please check that it starts with s3:// and that the bucket and object follow S3 naming guidelines at https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html"), - BadPath("a non ascii bucket name", "s3://nonasciibucket£€/hello/world", - "The path 's3://nonasciibucket£€/hello/world' does not seem to be a valid S3 path. Please check that it starts with s3:// and that the bucket and object follow S3 naming guidelines at https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html"), + BadPath("a bucketless path", + "s3://", + "The specified S3 path 's3://' does not parse as a URI.\nExpected authority at index 5: s3://" + ), + BadPath( + "a bucket named .", + "s3://./hello/world", + "The path 's3://./hello/world' does not seem to be a valid S3 path. Please check that it starts with s3:// and that the bucket and object follow S3 naming guidelines at https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html" + ), + BadPath( + "a non ascii bucket name", + "s3://nonasciibucket£€/hello/world", + "The path 's3://nonasciibucket£€/hello/world' does not seem to be a valid S3 path. Please check that it starts with s3:// and that the bucket and object follow S3 naming guidelines at https://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html" + ), BadPath("a https path", "https://hello/world", "S3 URIs must have 's3' scheme: https://hello/world"), BadPath("a file uri path", "file:///hello/world", "S3 URIs must have 's3' scheme: file:///hello/world"), BadPath("a relative file path", "hello/world", "hello/world does not have a s3 scheme"), BadPath("an absolute file path", "/hello/world", "/hello/world does not have a s3 scheme") ) - private lazy val pathBuilder = { + private lazy val pathBuilder = S3PathBuilder.fromProvider( AnonymousCredentialsProvider.create, S3Storage.s3Configuration(), WorkflowOptions.empty, Option(Region.US_EAST_1) ) - } } diff --git a/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilder.scala b/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilder.scala index 7b539709079..1455a8898ea 100644 --- a/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilder.scala +++ b/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilder.scala @@ -27,6 +27,8 @@ case class SraPath(accession: String, path: String) extends Path { override def pathAsString: String = SraPath.Scheme + pathWithoutScheme override def pathWithoutScheme: String = accession + "/" + path - protected def newPath(nioPath: NioPath): Path = throw new UnsupportedOperationException("'newPath' not implemented for SraPath") + protected def newPath(nioPath: NioPath): Path = throw new UnsupportedOperationException( + "'newPath' not implemented for SraPath" + ) def nioPath: NioPath = throw new UnsupportedOperationException("'nioPath' not implemented for SraPath") } diff --git a/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilderFactory.scala b/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilderFactory.scala index bc487f1dd73..c3855aa4635 100644 --- a/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilderFactory.scala +++ b/filesystems/sra/src/main/scala/cromwell/filesystems/sra/SraPathBuilderFactory.scala @@ -7,9 +7,9 @@ import cromwell.core.path.PathBuilderFactory import scala.concurrent.{ExecutionContext, Future} -final case class SraPathBuilderFactory(globalConfig: Config, instanceConfig: Config) - extends PathBuilderFactory { - def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[SraPathBuilder] = Future.successful(SraPathBuilderFactory.pathBuilder) +final case class SraPathBuilderFactory(globalConfig: Config, instanceConfig: Config) extends PathBuilderFactory { + def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[SraPathBuilder] = + Future.successful(SraPathBuilderFactory.pathBuilder) } object SraPathBuilderFactory { diff --git a/filesystems/sra/src/test/scala/cromwell/filesystems/sra/SraPathBuilderSpec.scala b/filesystems/sra/src/test/scala/cromwell/filesystems/sra/SraPathBuilderSpec.scala index bbc44b55793..193e684c0ae 100644 --- a/filesystems/sra/src/test/scala/cromwell/filesystems/sra/SraPathBuilderSpec.scala +++ b/filesystems/sra/src/test/scala/cromwell/filesystems/sra/SraPathBuilderSpec.scala @@ -8,12 +8,12 @@ import org.scalatest.matchers.should.Matchers class SraPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with PathBuilderSpecUtils { behavior of "SraPathBuilder" - goodPaths foreach { - good => it should behave like buildGood(good) + goodPaths foreach { good => + it should behave like buildGood(good) } - badPaths foreach { - bad => it should behave like buildBad(bad) + badPaths foreach { bad => + it should behave like buildBad(bad) } private def buildGood(good: Good): Unit = { @@ -28,11 +28,11 @@ class SraPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers val sraPath = path.asInstanceOf[SraPath] it should "match expected accession" in { - sraPath.accession should be (good.accession) + sraPath.accession should be(good.accession) } it should "match expected path" in { - sraPath.path should be (good.path) + sraPath.path should be(good.path) } } @@ -40,50 +40,47 @@ class SraPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers behavior of s"Building ${bad.description}" it should "fail to build an SraPath" in { - pathBuilder.build(bad.in).isSuccess should be (false) + pathBuilder.build(bad.in).isSuccess should be(false) } } private lazy val pathBuilder = new SraPathBuilder - private case class Good(description: String, - in: String, - accession: String, - path: String) + private case class Good(description: String, in: String, accession: String, path: String) private def goodPaths = Seq( Good( description = "well-formed path", in = "sra://SXP000001/asdf.bam", accession = "SXP000001", - path = "asdf.bam", + path = "asdf.bam" ), Good( description = "nested path", in = "sra://SRA42424242/first/second/third.bam.bai", accession = "SRA42424242", - path = "first/second/third.bam.bai", + path = "first/second/third.bam.bai" ), Good( description = "path with spaces", in = "sra://SXP111111/top level/nested level/file name.bz2", accession = "SXP111111", - path = "top level/nested level/file name.bz2", - ), + path = "top level/nested level/file name.bz2" + ) ) private case class Bad(description: String, in: String) private def badPaths = Seq( Bad( description = "not an SRA path", - in = "gcs://some/gcs/path/thing.txt", + in = "gcs://some/gcs/path/thing.txt" ), Bad( description = "missing accession", - in = "sra://", + in = "sra://" ), Bad( description = "missing path within accession", - in = "sra://SRA00001", - ), + in = "sra://SRA00001" + ) ) } diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/LanguageFactory.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/LanguageFactory.scala index f16054f15a7..e2f1d48e9f4 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/LanguageFactory.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/LanguageFactory.scala @@ -23,27 +23,31 @@ trait LanguageFactory { import net.ceedubs.ficus.Ficus._ lazy val enabled = !config.as[Option[Boolean]]("enabled").contains(false) - lazy val enabledCheck: Checked[Unit] = if (enabled) ().validNelCheck else - s"The language factory for $languageName ($languageVersionName) is not currently enabled in this Cromwell".invalidNelCheck - + lazy val enabledCheck: Checked[Unit] = + if (enabled) ().validNelCheck + else + s"The language factory for $languageName ($languageVersionName) is not currently enabled in this Cromwell".invalidNelCheck lazy val strictValidation: Boolean = !config.as[Option[Boolean]]("strict-validation").contains(false) - lazy val womOutputRuntimeExtractor: Checked[Option[WomOutputRuntimeExtractor]] = config.getAs[Config]("output-runtime-extractor") match { - case Some(c) => WomOutputRuntimeExtractor.fromConfig(c).map(Option.apply).toEither - case _ => None.validNelCheck - } + lazy val womOutputRuntimeExtractor: Checked[Option[WomOutputRuntimeExtractor]] = + config.getAs[Config]("output-runtime-extractor") match { + case Some(c) => WomOutputRuntimeExtractor.fromConfig(c).map(Option.apply).toEither + case _ => None.validNelCheck + } def getWomBundle(workflowSource: WorkflowSource, workflowSourceOrigin: Option[ResolvedImportRecord], workflowOptionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver], languageFactories: List[LanguageFactory], - convertNestedScatterToSubworkflow : Boolean = true): Checked[WomBundle] + convertNestedScatterToSubworkflow: Boolean = true + ): Checked[WomBundle] def createExecutable(womBundle: WomBundle, inputs: WorkflowJson, - ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] + ioFunctions: IoFunctionSet + ): Checked[ValidatedWomNamespace] def validateNamespace(source: WorkflowSourceFilesCollection, workflowSource: WorkflowSource, @@ -51,7 +55,8 @@ trait LanguageFactory { importLocalFilesystem: Boolean, workflowIdForLogging: WorkflowId, ioFunctions: IoFunctionSet, - importResolvers: List[ImportResolver]): IOChecked[ValidatedWomNamespace] + importResolvers: List[ImportResolver] + ): IOChecked[ValidatedWomNamespace] /** * In case no version is specified: does this language factory feel like it might be suitable for this file? diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/ValidatedWomNamespace.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/ValidatedWomNamespace.scala index 306a442672b..828b9c2bbc0 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/ValidatedWomNamespace.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/ValidatedWomNamespace.scala @@ -11,4 +11,5 @@ import wom.values.WomValue */ final case class ValidatedWomNamespace(executable: Executable, womValueInputs: Map[OutputPort, WomValue], - importedFileContent: Map[String, String]) + importedFileContent: Map[String, String] +) diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/CromwellLanguages.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/CromwellLanguages.scala index b7c4460df98..e05b48d991b 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/CromwellLanguages.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/CromwellLanguages.scala @@ -5,10 +5,11 @@ import cromwell.languages.config.CromwellLanguages.{CromwellLanguageName, Cromwe import cromwell.languages.LanguageFactory // Construct a singleton instance of this class using 'initLanguages' below. -final case class CromwellLanguages private(languageConfig: LanguagesConfiguration) { +final case class CromwellLanguages private (languageConfig: LanguagesConfiguration) { val languages: Map[CromwellLanguageName, LanguageVersions] = makeLanguages - val default: LanguageVersions = languages.find(lang => languageConfig.default.contains(lang._1)).getOrElse(languages.head)._2 + val default: LanguageVersions = + languages.find(lang => languageConfig.default.contains(lang._1)).getOrElse(languages.head)._2 private def makeLanguages: Map[CromwellLanguageName, LanguageVersions] = (languageConfig.languages map { lc => val versions = lc.versions map { case (languageVersion, languageConfigEntryFields) => @@ -19,18 +20,20 @@ final case class CromwellLanguages private(languageConfig: LanguagesConfiguratio lc.name.toUpperCase -> LanguageVersions(versions, default) }).toMap - private def makeLanguageFactory(className: String, config: Config) = { - Class.forName(className) + private def makeLanguageFactory(className: String, config: Config) = + Class + .forName(className) .getConstructor(classOf[Config]) .newInstance(config) .asInstanceOf[LanguageFactory] - } } /** * Holds all the registered versions of a language. */ -final case class LanguageVersions private(allVersions: Map[CromwellLanguageVersion, LanguageFactory], default: LanguageFactory) +final case class LanguageVersions private (allVersions: Map[CromwellLanguageVersion, LanguageFactory], + default: LanguageFactory +) object CromwellLanguages { type CromwellLanguageName = String @@ -39,7 +42,6 @@ object CromwellLanguages { private var _instance: CromwellLanguages = _ lazy val instance: CromwellLanguages = _instance - def initLanguages(backendEntries: LanguagesConfiguration): Unit = { + def initLanguages(backendEntries: LanguagesConfiguration): Unit = _instance = CromwellLanguages(backendEntries) - } } diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala index 2cb09dd8a3d..153bdfd029c 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala @@ -8,18 +8,22 @@ import cromwell.languages.config.CromwellLanguages.{CromwellLanguageName, Cromwe import scala.jdk.CollectionConverters._ final case class LanguagesConfiguration(languages: List[LanguageVersionConfigurationEntry], default: Option[String]) -final case class LanguageVersionConfigurationEntry(name: CromwellLanguageName, versions: Map[CromwellLanguageVersion, LanguageVersionConfig], default: Option[String]) +final case class LanguageVersionConfigurationEntry(name: CromwellLanguageName, + versions: Map[CromwellLanguageVersion, LanguageVersionConfig], + default: Option[String] +) final case class LanguageVersionConfig(className: String, config: Config) object LanguageConfiguration { private val LanguagesConfig = ConfigFactory.load.getConfig("languages") - private val DefaultLanguageName: Option[String] = if (LanguagesConfig.hasPath("default")) Option(LanguagesConfig.getString("default")) else None + private val DefaultLanguageName: Option[String] = + if (LanguagesConfig.hasPath("default")) Option(LanguagesConfig.getString("default")) else None - private val LanguageNames: Set[String] = LanguagesConfig.entrySet().asScala.map(findFirstKey).filterNot(_ == "default").toSet + private val LanguageNames: Set[String] = + LanguagesConfig.entrySet().asScala.map(findFirstKey).filterNot(_ == "default").toSet val AllLanguageEntries: LanguagesConfiguration = { val languages = LanguageNames.toList map { languageName => - val languageConfig = LanguagesConfig.getConfig(languageName) val versionSet = languageConfig.getConfig("versions") val allLanguageVersionNames: Set[String] = versionSet.entrySet().asScala.map(findFirstKey).toSet @@ -29,7 +33,8 @@ object LanguageConfiguration { val versions = (languageVersionNames.toList map { languageVersionName => val configEntry = versionSet.getConfig(s""""$languageVersionName"""") val className: String = configEntry.getString("language-factory") - val factoryConfig: Config = if (configEntry.hasPath("config")) configEntry.getConfig("config") else ConfigFactory.empty() + val factoryConfig: Config = + if (configEntry.hasPath("config")) configEntry.getConfig("config") else ConfigFactory.empty() val fields = LanguageVersionConfig(className, factoryConfig) languageVersionName -> fields }).toMap diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala index f7fd70cfc56..e3690248b09 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala @@ -32,13 +32,18 @@ import scala.util.{Failure, Success, Try} object ImportResolver { case class ImportResolutionRequest(toResolve: String, currentResolvers: List[ImportResolver]) - case class ResolvedImportBundle(source: WorkflowSource, newResolvers: List[ImportResolver], resolvedImportRecord: ResolvedImportRecord) + case class ResolvedImportBundle(source: WorkflowSource, + newResolvers: List[ImportResolver], + resolvedImportRecord: ResolvedImportRecord + ) trait ImportResolver { def name: String protected def innerResolver(path: String, currentResolvers: List[ImportResolver]): Checked[ResolvedImportBundle] def resolver: CheckedAtoB[ImportResolutionRequest, ResolvedImportBundle] = CheckedAtoB.fromCheck { request => - innerResolver(request.toResolve, request.currentResolvers).contextualizeErrors(s"resolve '${request.toResolve}' using resolver: '$name'") + innerResolver(request.toResolve, request.currentResolvers).contextualizeErrors( + s"resolve '${request.toResolve}' using resolver: '$name'" + ) } def cleanupIfNecessary(): ErrorOr[Unit] @@ -48,7 +53,10 @@ object ImportResolver { } object DirectoryResolver { - private def apply(directory: Path, allowEscapingDirectory: Boolean, customName: Option[String]): DirectoryResolver = { + private def apply(directory: Path, + allowEscapingDirectory: Boolean, + customName: Option[String] + ): DirectoryResolver = { val dontEscapeFrom = if (allowEscapingDirectory) None else Option(directory.toJava.getCanonicalPath) DirectoryResolver(directory, dontEscapeFrom, customName, deleteOnClose = false, directoryHash = None) } @@ -77,19 +85,23 @@ object ImportResolver { dontEscapeFrom: Option[String] = None, customName: Option[String], deleteOnClose: Boolean, - directoryHash: Option[String]) extends ImportResolver { + directoryHash: Option[String] + ) extends ImportResolver { lazy val absolutePathToDirectory: String = directory.toJava.getCanonicalPath override def innerResolver(path: String, currentResolvers: List[ImportResolver]): Checked[ResolvedImportBundle] = { - def updatedResolverSet(oldRootDirectory: Path, newRootDirectory: Path, current: List[ImportResolver]): List[ImportResolver] = { + def updatedResolverSet(oldRootDirectory: Path, + newRootDirectory: Path, + current: List[ImportResolver] + ): List[ImportResolver] = current map { - case d if d == this => DirectoryResolver(newRootDirectory, dontEscapeFrom, customName, deleteOnClose = false, directoryHash = None) + case d if d == this => + DirectoryResolver(newRootDirectory, dontEscapeFrom, customName, deleteOnClose = false, directoryHash = None) case other => other } - } - def fetchContentFromAbsolutePath(absolutePathToFile: NioPath): ErrorOr[String] = { + def fetchContentFromAbsolutePath(absolutePathToFile: NioPath): ErrorOr[String] = checkLocation(absolutePathToFile, path) flatMap { _ => val file = File(absolutePathToFile) if (file.exists) { @@ -98,7 +110,6 @@ object ImportResolver { s"File not found: $path".invalidNel } } - } val errorOr = for { resolvedPath <- resolvePath(path) @@ -111,7 +122,9 @@ object ImportResolver { } private def resolvePath(path: String): ErrorOr[Path] = Try(directory.resolve(path)).toErrorOr - private def makeAbsolute(resolvedPath: Path): ErrorOr[NioPath] = Try(Paths.get(resolvedPath.toFile.getCanonicalPath)).toErrorOr + private def makeAbsolute(resolvedPath: Path): ErrorOr[NioPath] = Try( + Paths.get(resolvedPath.toFile.getCanonicalPath) + ).toErrorOr private def checkLocation(absoluteNioPath: NioPath, reportedPathIfBad: String): ErrorOr[Unit] = if (dontEscapeFrom.forall(absoluteNioPath.startsWith)) ().validNel @@ -135,7 +148,8 @@ object ImportResolver { s"relative to directory $relativePathToDirectory (without escaping $relativePathToDontEscapeFrom)" case (None, None) => - val shortPathToDirectory = Paths.get(absolutePathToDirectory).toFile.getCanonicalFile.toPath.getFileName.toString + val shortPathToDirectory = + Paths.get(absolutePathToDirectory).toFile.getCanonicalFile.toPath.getFileName.toString s"relative to directory [...]/$shortPathToDirectory (escaping allowed)" } @@ -148,20 +162,25 @@ object ImportResolver { else ().validNel - override def hashKey: ErrorOr[String] = directoryHash.map(_.validNel).getOrElse("No hashKey available for directory importer".invalidNel) + override def hashKey: ErrorOr[String] = + directoryHash.map(_.validNel).getOrElse("No hashKey available for directory importer".invalidNel) } def zippedImportResolver(zippedImports: Array[Byte], workflowId: WorkflowId): ErrorOr[DirectoryResolver] = { val zipHash = new String(MessageDigest.getInstance("MD5").digest(zippedImports)) LanguageFactoryUtil.createImportsDirectory(zippedImports, workflowId) map { dir => - DirectoryResolver(dir, Option(dir.toJava.getCanonicalPath), None, deleteOnClose = true, directoryHash = Option(zipHash)) + DirectoryResolver(dir, + Option(dir.toJava.getCanonicalPath), + None, + deleteOnClose = true, + directoryHash = Option(zipHash) + ) } } - case class HttpResolver(relativeTo: Option[String], - headers: Map[String, String], - hostAllowlist: Option[List[String]]) extends ImportResolver { + case class HttpResolver(relativeTo: Option[String], headers: Map[String, String], hostAllowlist: Option[List[String]]) + extends ImportResolver { import HttpResolver._ override def name: String = relativeTo match { @@ -192,7 +211,7 @@ object ImportResolver { case None => true } - override def innerResolver(str: String, currentResolvers: List[ImportResolver]): Checked[ResolvedImportBundle] = { + override def innerResolver(str: String, currentResolvers: List[ImportResolver]): Checked[ResolvedImportBundle] = pathToLookup(str) flatMap { toLookup: WorkflowSource => (Try { val uri: Uri = uri"$toLookup" @@ -207,7 +226,6 @@ object ImportResolver { case Failure(e) => s"HTTP resolver with headers had an unexpected error (${e.getMessage})".invalidNelCheck }).contextualizeErrors(s"download $toLookup") } - } private def getUri(toLookup: WorkflowSource): Either[NonEmptyList[WorkflowSource], ResolvedImportBundle] = { implicit val sttpBackend = HttpResolver.sttpBackend() @@ -215,7 +233,9 @@ object ImportResolver { // temporary situation to get functionality working before // starting in on async-ifying the entire WdlNamespace flow - val result: Checked[WorkflowSource] = Await.result(responseIO.unsafeToFuture(), 15.seconds).body.leftMap { e => NonEmptyList(e.toString.trim, List.empty) } + val result: Checked[WorkflowSource] = Await.result(responseIO.unsafeToFuture(), 15.seconds).body.leftMap { e => + NonEmptyList(e.toString.trim, List.empty) + } result map { ResolvedImportBundle(_, newResolverList(toLookup), ResolvedImportRecord(toLookup)) @@ -232,8 +252,7 @@ object ImportResolver { import common.util.IntrospectableLazy import common.util.IntrospectableLazy._ - def apply(relativeTo: Option[String] = None, - headers: Map[String, String] = Map.empty): HttpResolver = { + def apply(relativeTo: Option[String] = None, headers: Map[String, String] = Map.empty): HttpResolver = { val config = ConfigFactory.load().getConfig("languages.WDL.http-allow-list") val allowListEnabled = config.as[Option[Boolean]]("enabled").getOrElse(false) val allowList: Option[List[String]] = diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala index da0860c3a86..8fabb445bac 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala @@ -32,7 +32,9 @@ object LanguageFactoryUtil { def createImportsDirectory(zipContents: Array[Byte], workflowId: WorkflowId): ErrorOr[Path] = { def makeZipFile: Try[Path] = Try { - DefaultPathBuilder.createTempFile(s"imports_workflow_${workflowId}_", ".zip").writeByteArray(zipContents)(OpenOptions.default) + DefaultPathBuilder + .createTempFile(s"imports_workflow_${workflowId}_", ".zip") + .writeByteArray(zipContents)(OpenOptions.default) } def unZipFile(f: Path) = Try(f.unzip()) @@ -49,25 +51,28 @@ object LanguageFactoryUtil { } } - def validateWomNamespace(womExecutable: Executable, ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] = for { - evaluatedInputs <- validateExecutableInputs(womExecutable.resolvedExecutableInputs, ioFunctions).toEither - validatedWomNamespace = ValidatedWomNamespace(womExecutable, evaluatedInputs, Map.empty) - _ <- validateWdlFiles(validatedWomNamespace.womValueInputs) - } yield validatedWomNamespace + def validateWomNamespace(womExecutable: Executable, ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] = + for { + evaluatedInputs <- validateExecutableInputs(womExecutable.resolvedExecutableInputs, ioFunctions).toEither + validatedWomNamespace = ValidatedWomNamespace(womExecutable, evaluatedInputs, Map.empty) + _ <- validateWdlFiles(validatedWomNamespace.womValueInputs) + } yield validatedWomNamespace /* - * At this point input values are either a WomValue (if it was provided through the input file) - * or a WomExpression (if we fell back to the default). - * We assume that default expressions do NOT reference any "variables" (other inputs, call outputs ...) - * Should this assumption prove not sufficient InstantiatedExpressions or ExpressionNodes would need to be provided - * instead so that they can be evaluated JIT. - * Note that the ioFunctions use engine level pathBuilders. This means that their credentials come from the engine section - * of the configuration, and are not backend specific. + * At this point input values are either a WomValue (if it was provided through the input file) + * or a WomExpression (if we fell back to the default). + * We assume that default expressions do NOT reference any "variables" (other inputs, call outputs ...) + * Should this assumption prove not sufficient InstantiatedExpressions or ExpressionNodes would need to be provided + * instead so that they can be evaluated JIT. + * Note that the ioFunctions use engine level pathBuilders. This means that their credentials come from the engine section + * of the configuration, and are not backend specific. */ - private def validateExecutableInputs(inputs: ResolvedExecutableInputs, ioFunctions: IoFunctionSet): ErrorOr[Map[OutputPort, WomValue]] = { + private def validateExecutableInputs(inputs: ResolvedExecutableInputs, + ioFunctions: IoFunctionSet + ): ErrorOr[Map[OutputPort, WomValue]] = { import common.validation.ErrorOr.MapTraversal - inputs.traverse { - case (key, value) => value.fold(ResolvedExecutableInputsPoly).apply(ioFunctions) map { key -> _ } + inputs.traverse { case (key, value) => + value.fold(ResolvedExecutableInputsPoly).apply(ioFunctions) map { key -> _ } } } @@ -88,16 +93,18 @@ object LanguageFactoryUtil { } } - def simpleLooksParseable(startsWithOptions: List[String], commentIndicators: List[String])(content: String): Boolean = { + def simpleLooksParseable(startsWithOptions: List[String], commentIndicators: List[String])( + content: String + ): Boolean = { val fileWithoutInitialWhitespace = content.linesIterator.toList.dropWhile { l => l.forall(_.isWhitespace) || commentIndicators.exists(l.dropWhile(_.isWhitespace).startsWith(_)) } val firstCodeLine = fileWithoutInitialWhitespace.headOption.map(_.dropWhile(_.isWhitespace)) - firstCodeLine.exists { line => startsWithOptions.contains(line.trim) } + firstCodeLine.exists(line => startsWithOptions.contains(line.trim)) } - def chooseFactory(workflowSource: WorkflowSource, wsfc: WorkflowSourceFilesCollection): ErrorOr[LanguageFactory] = { + def chooseFactory(workflowSource: WorkflowSource, wsfc: WorkflowSourceFilesCollection): ErrorOr[LanguageFactory] = wsfc.workflowType match { case Some(languageName) if CromwellLanguages.instance.languages.contains(languageName.toUpperCase) => val language = CromwellLanguages.instance.languages(languageName.toUpperCase) @@ -110,21 +117,25 @@ object LanguageFactoryUtil { case Some(other) => s"Unknown workflow type: $other".invalidNel[LanguageFactory] case None => val allFactories = CromwellLanguages.instance.languages.values.flatMap(_.allVersions.values) - allFactories.find(_.looksParsable(workflowSource)).getOrElse(CromwellLanguages.instance.default.default).validNel + allFactories + .find(_.looksParsable(workflowSource)) + .getOrElse(CromwellLanguages.instance.default.default) + .validNel } - } def findWorkflowSource(workflowSource: Option[WorkflowSource], workflowUrl: Option[WorkflowUrl], - resolvers: List[ImportResolver]): Checked[(WorkflowSource, List[ImportResolver])] = { + resolvers: List[ImportResolver] + ): Checked[(WorkflowSource, List[ImportResolver])] = (workflowSource, workflowUrl) match { case (Some(source), None) => (source, resolvers).validNelCheck case (None, Some(url)) => - val compoundImportResolver: CheckedAtoB[ImportResolutionRequest, ResolvedImportBundle] = CheckedAtoB.firstSuccess(resolvers.map(_.resolver), s"resolve workflowUrl '$url'") - val wfSourceAndResolvers: Checked[ResolvedImportBundle] = compoundImportResolver.run(ImportResolutionRequest(url, resolvers)) + val compoundImportResolver: CheckedAtoB[ImportResolutionRequest, ResolvedImportBundle] = + CheckedAtoB.firstSuccess(resolvers.map(_.resolver), s"resolve workflowUrl '$url'") + val wfSourceAndResolvers: Checked[ResolvedImportBundle] = + compoundImportResolver.run(ImportResolutionRequest(url, resolvers)) wfSourceAndResolvers map { v => (v.source, v.newResolvers) } case (Some(_), Some(_)) => "Both workflow source and url can't be supplied".invalidNelCheck case (None, None) => "Either workflow source or url has to be supplied".invalidNelCheck } - } } diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ParserCache.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ParserCache.scala index 5d6feda9882..839c4e855f8 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ParserCache.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ParserCache.scala @@ -22,23 +22,27 @@ import scala.concurrent.duration._ trait ParserCache[A] extends StrictLogging { this: LanguageFactory => - def retrieveOrCalculate(cacheInputs: ParserCacheInputs, - calculationCallable: Callable[ErrorOr[A]]): ErrorOr[A] = { - + def retrieveOrCalculate(cacheInputs: ParserCacheInputs, calculationCallable: Callable[ErrorOr[A]]): ErrorOr[A] = (cache map { c: Cache[String, ErrorOr[A]] => - workflowHashKey(cacheInputs.workflowSource, cacheInputs.workflowUrl, cacheInputs.workflowRoot, cacheInputs.importResolvers) match { + workflowHashKey(cacheInputs.workflowSource, + cacheInputs.workflowUrl, + cacheInputs.workflowRoot, + cacheInputs.importResolvers + ) match { case Valid(hashKey) => c.get(hashKey, calculationCallable) case Invalid(errors) => - logger.info(s"Failed to calculate hash key for 'workflow source to WOM' cache: {}", errors.toList.mkString(", ")) + logger.info(s"Failed to calculate hash key for 'workflow source to WOM' cache: {}", + errors.toList.mkString(", ") + ) calculationCallable.call } }).getOrElse(calculationCallable.call()) - } private[this] def workflowHashKey(workflowSource: Option[WorkflowSource], workflowUrl: Option[WorkflowUrl], workflowRoot: Option[String], - importResolvers: List[ImportResolver]): ErrorOr[String] = { + importResolvers: List[ImportResolver] + ): ErrorOr[String] = { def stringOptionToHash(opt: Option[String]): String = opt map { _.md5Sum } getOrElse "" val importResolversToHash: ErrorOr[String] = importResolvers.traverse(_.hashKey).map(_.mkString(":")) @@ -48,17 +52,21 @@ trait ParserCache[A] extends StrictLogging { this: LanguageFactory => } } - private[this] lazy val cacheConfig: Option[CacheConfig] = { + private[this] lazy val cacheConfig: Option[CacheConfig] = // Caching is an opt-in activity: for { _ <- enabled.option(()) cachingConfigSection <- config.as[Option[Config]]("caching") - cc <- CacheConfig.optionalConfig(cachingConfigSection, defaultConcurrency = 2, defaultSize = 1000L, defaultTtl = 20.minutes) + cc <- CacheConfig.optionalConfig(cachingConfigSection, + defaultConcurrency = 2, + defaultSize = 1000L, + defaultTtl = 20.minutes + ) } yield cc - } private[this] lazy val cache: Option[Cache[String, ErrorOr[A]]] = cacheConfig map { c => - CacheBuilder.newBuilder() + CacheBuilder + .newBuilder() .concurrencyLevel(c.concurrency) .expireAfterAccess(c.ttl.length, c.ttl.unit) .maximumSize(c.size) @@ -70,5 +78,6 @@ object ParserCache { final case class ParserCacheInputs(workflowSource: Option[WorkflowSource], workflowUrl: Option[WorkflowUrl], workflowRoot: Option[String], - importResolvers: List[ImportResolver]) + importResolvers: List[ImportResolver] + ) } diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ResolvedExecutableInputsPoly.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ResolvedExecutableInputsPoly.scala index dce4da0df38..888be25cfb5 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ResolvedExecutableInputsPoly.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ResolvedExecutableInputsPoly.scala @@ -8,10 +8,13 @@ import wom.expression.{IoFunctionSet, WomExpression} import wom.values.WomValue object ResolvedExecutableInputsPoly extends Poly1 { - implicit def fromWdlValue: Case.Aux[WomValue, IoFunctionSet => ErrorOr[WomValue]] = at[WomValue] { wdlValue => - _: IoFunctionSet => wdlValue.validNel + implicit def fromWdlValue: Case.Aux[WomValue, IoFunctionSet => ErrorOr[WomValue]] = at[WomValue] { + wdlValue => _: IoFunctionSet => wdlValue.validNel } - implicit def fromWomExpression: Case.Aux[WomExpression, IoFunctionSet => ErrorOr[WomValue]] = at[WomExpression] { womExpression => - ioFunctions: IoFunctionSet => womExpression.evaluateValue(Map.empty, ioFunctions).contextualizeErrors(s"evaluate expression '${womExpression.sourceString}'") + implicit def fromWomExpression: Case.Aux[WomExpression, IoFunctionSet => ErrorOr[WomValue]] = at[WomExpression] { + womExpression => ioFunctions: IoFunctionSet => + womExpression + .evaluateValue(Map.empty, ioFunctions) + .contextualizeErrors(s"evaluate expression '${womExpression.sourceString}'") } } diff --git a/languageFactories/language-factory-core/src/test/scala/cromwell/languages/util/ImportResolverSpec.scala b/languageFactories/language-factory-core/src/test/scala/cromwell/languages/util/ImportResolverSpec.scala index 813a688a573..720a7f279af 100644 --- a/languageFactories/language-factory-core/src/test/scala/cromwell/languages/util/ImportResolverSpec.scala +++ b/languageFactories/language-factory-core/src/test/scala/cromwell/languages/util/ImportResolverSpec.scala @@ -11,7 +11,6 @@ import cromwell.languages.util.ImportResolver.{DirectoryResolver, HttpResolver} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "HttpResolver" @@ -49,7 +48,8 @@ class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match it should "resolve a path and store the import in ResolvedImportRecord" in { val resolver = HttpResolver(None, Map.empty, None) - val importUri = "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/centaur/src/main/resources/standardTestCases/hello/hello.wdl" + val importUri = + "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/centaur/src/main/resources/standardTestCases/hello/hello.wdl" val resolvedBundle = resolver.innerResolver(importUri, List(resolver)) resolvedBundle.map(_.resolvedImportRecord) match { @@ -158,7 +158,8 @@ class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match behavior of "directory resolver from root" val workingDirectory = sys.props("user.dir") - val rootDirectoryResolver = DirectoryResolver(DefaultPath(Paths.get("/")), customName = None, deleteOnClose = false, directoryHash = None) + val rootDirectoryResolver = + DirectoryResolver(DefaultPath(Paths.get("/")), customName = None, deleteOnClose = false, directoryHash = None) it should "resolve a random path" in { val pathToLookup = rootDirectoryResolver.resolveAndMakeAbsolute("/path/to/file.wdl") @@ -177,10 +178,18 @@ class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match behavior of "unprotected relative directory resolver" - val relativeDirectoryResolver = DirectoryResolver(DefaultPath(Paths.get("/path/to/imports/")), customName = None, deleteOnClose = false, directoryHash = None) + val relativeDirectoryResolver = DirectoryResolver(DefaultPath(Paths.get("/path/to/imports/")), + customName = None, + deleteOnClose = false, + directoryHash = None + ) val relativeDirForSampleWf = s"$workingDirectory/languageFactories/language-factory-core/src/test/" - val relativeDirResolverForSampleWf = DirectoryResolver(DefaultPath(Paths.get(relativeDirForSampleWf)), customName = None, deleteOnClose = false, directoryHash = None) + val relativeDirResolverForSampleWf = DirectoryResolver(DefaultPath(Paths.get(relativeDirForSampleWf)), + customName = None, + deleteOnClose = false, + directoryHash = None + ) it should "resolve an absolute path" in { val pathToLookup = relativeDirectoryResolver.resolveAndMakeAbsolute("/path/to/file.wdl") @@ -208,14 +217,24 @@ class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match resolvedBundle.map(_.resolvedImportRecord) match { case Left(e) => fail(s"Expected ResolvedImportBundle but got $e") - case Right(resolvedImport) => resolvedImport.importPath shouldBe(relativeDirForSampleWf + path) + case Right(resolvedImport) => resolvedImport.importPath shouldBe (relativeDirForSampleWf + path) } } behavior of "protected relative directory resolver" - val protectedRelativeDirectoryResolver = DirectoryResolver(DefaultPath(Paths.get("/path/to/imports/")), Some("/path/to/imports/"), customName = None, deleteOnClose = false, directoryHash = None) - val protectedRelativeDirResolverForSampleWf = DirectoryResolver(DefaultPath(Paths.get(relativeDirForSampleWf)), Some(relativeDirForSampleWf), customName = None, deleteOnClose = false, directoryHash = None) + val protectedRelativeDirectoryResolver = DirectoryResolver(DefaultPath(Paths.get("/path/to/imports/")), + Some("/path/to/imports/"), + customName = None, + deleteOnClose = false, + directoryHash = None + ) + val protectedRelativeDirResolverForSampleWf = DirectoryResolver(DefaultPath(Paths.get(relativeDirForSampleWf)), + Some(relativeDirForSampleWf), + customName = None, + deleteOnClose = false, + directoryHash = None + ) it should "resolve a good relative path" in { val pathToLookup = protectedRelativeDirectoryResolver.resolveAndMakeAbsolute("path/to/file.wdl") @@ -224,11 +243,12 @@ class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match it should "resolve a good relative path to sampleWorkflow" in { val path = "resources/sampleWorkflow.wdl" - val resolvedBundle = protectedRelativeDirResolverForSampleWf.innerResolver(path, List(protectedRelativeDirResolverForSampleWf)) + val resolvedBundle = + protectedRelativeDirResolverForSampleWf.innerResolver(path, List(protectedRelativeDirResolverForSampleWf)) resolvedBundle.map(_.resolvedImportRecord) match { case Left(e) => fail(s"Expected ResolvedImportBundle but got $e") - case Right(resolvedImport) => resolvedImport.importPath shouldBe(relativeDirForSampleWf + path) + case Right(resolvedImport) => resolvedImport.importPath shouldBe (relativeDirForSampleWf + path) } } @@ -252,7 +272,7 @@ class ImportResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match resolver.resolveAndMakeAbsolute("QC.wdl").map(Files.exists(_)).toOption shouldBe Some(true) resolver.resolveAndMakeAbsolute("tasks/cutadapt.wdl").map(Files.exists(_)).toOption shouldBe Some(true) resolver.resolveAndMakeAbsolute("tasks/fastqc.wdl").map(Files.exists(_)).toOption shouldBe Some(true) - // Make sure above testing is correct by testing for a non-existent wdl. + // Make sure above testing is correct by testing for a non-existent wdl. resolver.resolveAndMakeAbsolute("machine_learning_skynet.wdl").map(Files.exists(_)).toOption shouldBe Some(false) } diff --git a/languageFactories/wdl-biscayne/src/main/scala/languages/wdl/biscayne/WdlBiscayneLanguageFactory.scala b/languageFactories/wdl-biscayne/src/main/scala/languages/wdl/biscayne/WdlBiscayneLanguageFactory.scala index 355a637439b..eace0c91f9b 100644 --- a/languageFactories/wdl-biscayne/src/main/scala/languages/wdl/biscayne/WdlBiscayneLanguageFactory.scala +++ b/languageFactories/wdl-biscayne/src/main/scala/languages/wdl/biscayne/WdlBiscayneLanguageFactory.scala @@ -38,13 +38,19 @@ class WdlBiscayneLanguageFactory(override val config: Config) extends LanguageFa importLocalFilesystem: Boolean, workflowIdForLogging: WorkflowId, ioFunctions: IoFunctionSet, - importResolvers: List[ImportResolver]): IOChecked[ValidatedWomNamespace] = { + importResolvers: List[ImportResolver] + ): IOChecked[ValidatedWomNamespace] = { val factories: List[LanguageFactory] = List(this) val checked: Checked[ValidatedWomNamespace] = for { _ <- enabledCheck - bundle <- getWomBundle(workflowSource, workflowSourceOrigin = None, source.workflowOptions.asPrettyJson, importResolvers, factories) + bundle <- getWomBundle(workflowSource, + workflowSourceOrigin = None, + source.workflowOptions.asPrettyJson, + importResolvers, + factories + ) executable <- createExecutable(bundle, source.inputsJson, ioFunctions) } yield executable @@ -57,17 +63,31 @@ class WdlBiscayneLanguageFactory(override val config: Config) extends LanguageFa workflowOptionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver], languageFactories: List[LanguageFactory], - convertNestedScatterToSubworkflow : Boolean = true): Checked[WomBundle] = { - - val converter: CheckedAtoB[FileStringParserInput, WomBundle] = stringToAst andThen wrapAst andThen astToFileElement.map(FileElementToWomBundleInputs(_, workflowOptionsJson, convertNestedScatterToSubworkflow, importResolvers, languageFactories, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + convertNestedScatterToSubworkflow: Boolean = true + ): Checked[WomBundle] = { + + val converter: CheckedAtoB[FileStringParserInput, WomBundle] = + stringToAst andThen wrapAst andThen astToFileElement.map( + FileElementToWomBundleInputs( + _, + workflowOptionsJson, + convertNestedScatterToSubworkflow, + importResolvers, + languageFactories, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle lazy val validationCallable = new Callable[ErrorOr[WomBundle]] { def call: ErrorOr[WomBundle] = converter .run(FileStringParserInput(workflowSource, workflowSourceOrigin.map(_.importPath).getOrElse("input.wdl"))) - .map(b => b.copyResolvedImportRecord(b, workflowSourceOrigin)).toValidated + .map(b => b.copyResolvedImportRecord(b, workflowSourceOrigin)) + .toValidated } - lazy val parserCacheInputs = ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) + lazy val parserCacheInputs = + ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) for { _ <- enabledCheck @@ -75,13 +95,16 @@ class WdlBiscayneLanguageFactory(override val config: Config) extends LanguageFa } yield womBundle } - override def createExecutable(womBundle: WomBundle, inputsJson: WorkflowJson, ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] = { + override def createExecutable(womBundle: WomBundle, + inputsJson: WorkflowJson, + ioFunctions: IoFunctionSet + ): Checked[ValidatedWomNamespace] = for { _ <- enabledCheck executable <- womBundle.toWomExecutable(Option(inputsJson), ioFunctions, strictValidation) validated <- LanguageFactoryUtil.validateWomNamespace(executable, ioFunctions) } yield validated - } - override def looksParsable(content: String): Boolean = LanguageFactoryUtil.simpleLooksParseable(List("version development-1.1"), List("#"))(content) + override def looksParsable(content: String): Boolean = + LanguageFactoryUtil.simpleLooksParseable(List("version development-1.1"), List("#"))(content) } diff --git a/languageFactories/wdl-cascades/src/main/scala/languages/wdl/cascades/WdlCascadesLanguageFactory.scala b/languageFactories/wdl-cascades/src/main/scala/languages/wdl/cascades/WdlCascadesLanguageFactory.scala index d7757e83e71..5954f964fc5 100644 --- a/languageFactories/wdl-cascades/src/main/scala/languages/wdl/cascades/WdlCascadesLanguageFactory.scala +++ b/languageFactories/wdl-cascades/src/main/scala/languages/wdl/cascades/WdlCascadesLanguageFactory.scala @@ -38,13 +38,19 @@ class WdlCascadesLanguageFactory(override val config: Config) extends LanguageFa importLocalFilesystem: Boolean, workflowIdForLogging: WorkflowId, ioFunctions: IoFunctionSet, - importResolvers: List[ImportResolver]): IOChecked[ValidatedWomNamespace] = { + importResolvers: List[ImportResolver] + ): IOChecked[ValidatedWomNamespace] = { val factories: List[LanguageFactory] = List(this) val checked: Checked[ValidatedWomNamespace] = for { _ <- enabledCheck - bundle <- getWomBundle(workflowSource, workflowSourceOrigin = None, source.workflowOptions.asPrettyJson, importResolvers, factories) + bundle <- getWomBundle(workflowSource, + workflowSourceOrigin = None, + source.workflowOptions.asPrettyJson, + importResolvers, + factories + ) executable <- createExecutable(bundle, source.inputsJson, ioFunctions) } yield executable @@ -57,17 +63,31 @@ class WdlCascadesLanguageFactory(override val config: Config) extends LanguageFa workflowOptionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver], languageFactories: List[LanguageFactory], - convertNestedScatterToSubworkflow : Boolean = true): Checked[WomBundle] = { - - val converter: CheckedAtoB[FileStringParserInput, WomBundle] = stringToAst andThen wrapAst andThen astToFileElement.map(FileElementToWomBundleInputs(_, workflowOptionsJson, convertNestedScatterToSubworkflow, importResolvers, languageFactories, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + convertNestedScatterToSubworkflow: Boolean = true + ): Checked[WomBundle] = { + + val converter: CheckedAtoB[FileStringParserInput, WomBundle] = + stringToAst andThen wrapAst andThen astToFileElement.map( + FileElementToWomBundleInputs( + _, + workflowOptionsJson, + convertNestedScatterToSubworkflow, + importResolvers, + languageFactories, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle lazy val validationCallable = new Callable[ErrorOr[WomBundle]] { def call: ErrorOr[WomBundle] = converter .run(FileStringParserInput(workflowSource, workflowSourceOrigin.map(_.importPath).getOrElse("input.wdl"))) - .map(b => b.copyResolvedImportRecord(b, workflowSourceOrigin)).toValidated + .map(b => b.copyResolvedImportRecord(b, workflowSourceOrigin)) + .toValidated } - lazy val parserCacheInputs = ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) + lazy val parserCacheInputs = + ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) for { _ <- enabledCheck @@ -75,13 +95,16 @@ class WdlCascadesLanguageFactory(override val config: Config) extends LanguageFa } yield womBundle } - override def createExecutable(womBundle: WomBundle, inputsJson: WorkflowJson, ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] = { + override def createExecutable(womBundle: WomBundle, + inputsJson: WorkflowJson, + ioFunctions: IoFunctionSet + ): Checked[ValidatedWomNamespace] = for { _ <- enabledCheck executable <- womBundle.toWomExecutable(Option(inputsJson), ioFunctions, strictValidation) validated <- LanguageFactoryUtil.validateWomNamespace(executable, ioFunctions) } yield validated - } - override def looksParsable(content: String): Boolean = LanguageFactoryUtil.simpleLooksParseable(List("version development"), List("#"))(content) + override def looksParsable(content: String): Boolean = + LanguageFactoryUtil.simpleLooksParseable(List("version development"), List("#"))(content) } diff --git a/languageFactories/wdl-draft2/src/main/scala/languages/wdl/draft2/WdlDraft2LanguageFactory.scala b/languageFactories/wdl-draft2/src/main/scala/languages/wdl/draft2/WdlDraft2LanguageFactory.scala index a2cbc5e90f4..a1b4927517a 100644 --- a/languageFactories/wdl-draft2/src/main/scala/languages/wdl/draft2/WdlDraft2LanguageFactory.scala +++ b/languageFactories/wdl-draft2/src/main/scala/languages/wdl/draft2/WdlDraft2LanguageFactory.scala @@ -24,7 +24,7 @@ import languages.wdl.draft2.WdlDraft2LanguageFactory._ import mouse.all._ import net.ceedubs.ficus.Ficus._ import wdl.draft2.Draft2ResolvedImportBundle -import wdl.draft2.model.{Draft2ImportResolver, WdlNamespace, WdlNamespaceWithWorkflow, WdlNamespaceWithoutWorkflow} +import wdl.draft2.model.{Draft2ImportResolver, WdlNamespace, WdlNamespaceWithoutWorkflow, WdlNamespaceWithWorkflow} import wdl.shared.transforms.wdlom2wom.WdlSharedInputParsing import wdl.transforms.draft2.wdlom2wom.WdlDraft2WomBundleMakers._ import wdl.transforms.draft2.wdlom2wom.WdlDraft2WomExecutableMakers._ @@ -40,7 +40,10 @@ import wom.values._ import scala.concurrent.duration._ import scala.language.postfixOps -class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFactory with ParserCache[WdlNamespace] with StrictLogging { +class WdlDraft2LanguageFactory(override val config: Config) + extends LanguageFactory + with ParserCache[WdlNamespace] + with StrictLogging { override val languageName: String = "WDL" override val languageVersionName: String = "draft-2" @@ -51,19 +54,22 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact importLocalFilesystem: Boolean, workflowIdForLogging: WorkflowId, ioFunctions: IoFunctionSet, - importResolvers: List[ImportResolver]): IOChecked[ValidatedWomNamespace] = { + importResolvers: List[ImportResolver] + ): IOChecked[ValidatedWomNamespace] = { def checkTypes(namespace: WdlNamespace, inputs: Map[OutputPort, WomValue]): Checked[Unit] = namespace match { case namespaceWithWorkflow: WdlNamespaceWithWorkflow => - val allDeclarations = namespaceWithWorkflow.workflow.declarations ++ namespaceWithWorkflow.workflow.calls.flatMap(_.declarations) - val list: List[Checked[Unit]] = inputs.map({ case (k, v) => + val allDeclarations = + namespaceWithWorkflow.workflow.declarations ++ namespaceWithWorkflow.workflow.calls.flatMap(_.declarations) + val list: List[Checked[Unit]] = inputs.map { case (k, v) => allDeclarations.find(_.fullyQualifiedName == k) match { case Some(decl) if decl.womType.coerceRawValue(v).isFailure => - s"Invalid right-side type of '$k'. Expecting ${decl.womType.stableName}, got ${v.womType.stableName}".invalidNelCheck[Unit] + s"Invalid right-side type of '$k'. Expecting ${decl.womType.stableName}, got ${v.womType.stableName}" + .invalidNelCheck[Unit] case _ => ().validNelCheck } - }).toList + }.toList list.sequence[Checked, Unit].void @@ -73,14 +79,16 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact } def validationCallable = new Callable[ErrorOr[WdlNamespace]] { - def call: ErrorOr[WdlNamespace] = WdlNamespaceWithWorkflow.load(workflowSource, importResolvers map resolverConverter).toErrorOr + def call: ErrorOr[WdlNamespace] = + WdlNamespaceWithWorkflow.load(workflowSource, importResolvers map resolverConverter).toErrorOr } - lazy val wdlNamespaceValidation: ErrorOr[WdlNamespace] = retrieveOrCalculate(ParserCacheInputs(Option(workflowSource), None, None, importResolvers), validationCallable) + lazy val wdlNamespaceValidation: ErrorOr[WdlNamespace] = + retrieveOrCalculate(ParserCacheInputs(Option(workflowSource), None, None, importResolvers), validationCallable) def evaluateImports(wdlNamespace: WdlNamespace): Map[String, String] = { // Descend the namespace looking for imports and construct `MetadataEvent`s for them. - def collectImportEvents: Map[String, String] = { + def collectImportEvents: Map[String, String] = (wdlNamespace.allNamespacesRecursively flatMap { ns => ns.importUri.toList collect { // Do not publish import events for URIs which correspond to literal strings as these are the top-level @@ -88,7 +96,6 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact case uri if uri != WdlNamespace.WorkflowResourceString => uri -> ns.sourceString } }).toMap - } collectImportEvents } @@ -108,7 +115,8 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact private def validateWorkflowNameLengths(namespace: WdlNamespace): Checked[Unit] = { import common.validation.Checked._ - def allWorkflowNames(n: WdlNamespace): Seq[String] = n.workflows.map(_.unqualifiedName) ++ n.namespaces.flatMap(allWorkflowNames) + def allWorkflowNames(n: WdlNamespace): Seq[String] = + n.workflows.map(_.unqualifiedName) ++ n.namespaces.flatMap(allWorkflowNames) val tooLong = allWorkflowNames(namespace).filter(_.length >= 100) if (tooLong.nonEmpty) { ("Workflow names must be shorter than 100 characters: " + tooLong.mkString(" ")).invalidNelCheck @@ -122,12 +130,15 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact workflowOptionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver], languageFactories: List[LanguageFactory], - convertNestedScatterToSubworkflow : Boolean = true): Checked[WomBundle] = { + convertNestedScatterToSubworkflow: Boolean = true + ): Checked[WomBundle] = { lazy val validationCallable = new Callable[ErrorOr[WdlNamespace]] { - def call: ErrorOr[WdlNamespace] = WdlNamespace.loadUsingSource(workflowSource, None, Some(importResolvers map resolverConverter)).toErrorOr + def call: ErrorOr[WdlNamespace] = + WdlNamespace.loadUsingSource(workflowSource, None, Some(importResolvers map resolverConverter)).toErrorOr } - lazy val parserCacheInputs = ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) + lazy val parserCacheInputs = + ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) for { _ <- enabledCheck @@ -136,7 +147,10 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact } yield womBundle.copyResolvedImportRecord(womBundle, workflowSourceOrigin) } - override def createExecutable(womBundle: WomBundle, inputs: WorkflowJson, ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] = for { + override def createExecutable(womBundle: WomBundle, + inputs: WorkflowJson, + ioFunctions: IoFunctionSet + ): Checked[ValidatedWomNamespace] = for { _ <- enabledCheck executable <- WdlSharedInputParsing.buildWomExecutable(womBundle, Option(inputs), ioFunctions, strictValidation) validatedNamespace <- LanguageFactoryUtil.validateWomNamespace(executable, ioFunctions) @@ -145,22 +159,25 @@ class WdlDraft2LanguageFactory(override val config: Config) extends LanguageFact // Commentary: we'll set this as the default in the reference.conf, so most people will get WDL draft 2 if nothing else looks parsable. override def looksParsable(content: String): Boolean = false - private[draft2] lazy val cacheConfig: Option[CacheConfig] = { + private[draft2] lazy val cacheConfig: Option[CacheConfig] = // WDL version 2 namespace caching is now opt-in. for { _ <- enabled.option(()) caching <- config.as[Option[Config]]("caching") cc <- CacheConfig.optionalConfig(caching, defaultConcurrency = 2, defaultSize = 1000L, defaultTtl = 20 minutes) } yield cc - } } object WdlDraft2LanguageFactory { - private def resolverConverter(importResolver: ImportResolver): Draft2ImportResolver = str => importResolver.resolver.run(ImportResolutionRequest(str, List.empty)) match { - case Right(imported) => Draft2ResolvedImportBundle(imported.source, imported.resolvedImportRecord) - case Left(errors) => throw new RuntimeException(s"Bad import $str: ${errors.toList.mkString(System.lineSeparator)}") - } + private def resolverConverter(importResolver: ImportResolver): Draft2ImportResolver = str => + importResolver.resolver.run(ImportResolutionRequest(str, List.empty)) match { + case Right(imported) => Draft2ResolvedImportBundle(imported.source, imported.resolvedImportRecord) + case Left(errors) => + throw new RuntimeException(s"Bad import $str: ${errors.toList.mkString(System.lineSeparator)}") + } val httpResolver = resolverConverter(ImportResolver.HttpResolver()) - def httpResolverWithHeaders(headers: Map[String, String]) = resolverConverter(ImportResolver.HttpResolver(headers = headers)) + def httpResolverWithHeaders(headers: Map[String, String]) = resolverConverter( + ImportResolver.HttpResolver(headers = headers) + ) } diff --git a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/ArrayCoercionsSpec.scala b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/ArrayCoercionsSpec.scala index 08cd91f3432..a44f34f5af1 100644 --- a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/ArrayCoercionsSpec.scala +++ b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/ArrayCoercionsSpec.scala @@ -14,22 +14,25 @@ import wom.expression.EmptyIoFunctionSet import wom.types.{WomArrayType, WomSingleFileType, WomStringType} import wom.values.{WomArray, WomSingleFile, WomString} - class ArrayCoercionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { var factory: WdlDraft2LanguageFactory = new WdlDraft2LanguageFactory(ConfigFactory.parseString(ConfigString)) - val arrayLiteralNamespace: WdlNamespaceWithWorkflow = WdlNamespaceWithWorkflow.load(ArrayDeclarationWorkflow, List.empty).get + val arrayLiteralNamespace: WdlNamespaceWithWorkflow = + WdlNamespaceWithWorkflow.load(ArrayDeclarationWorkflow, List.empty).get "A static Array[File] declaration" should "be a valid declaration" in { - val declaration = arrayLiteralNamespace.workflow.declarations.find {_.unqualifiedName == "arr"}.getOrElse { + val declaration = arrayLiteralNamespace.workflow.declarations.find(_.unqualifiedName == "arr").getOrElse { fail("Expected declaration 'arr' to be found") } val expression = declaration.expression.getOrElse { fail("Expected an expression for declaration 'arr'") } - expression.evaluate((_: String) => fail("No lookups"), NoFunctions).toChecked.shouldBeValid( - WomArray(WomArrayType(WomStringType), Seq(WomString("f1"), WomString("f2"), WomString("f3"))) - ) + expression + .evaluate((_: String) => fail("No lookups"), NoFunctions) + .toChecked + .shouldBeValid( + WomArray(WomArrayType(WomStringType), Seq(WomString("f1"), WomString("f2"), WomString("f3"))) + ) } "An Array[File]" should "be usable as an input" in { @@ -40,9 +43,10 @@ class ArrayCoercionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match val catTask = arrayLiteralNamespace.findTask("cat").getOrElse { fail("Expected to find task 'cat'") } - val command = catTask.instantiateCommand(catTask.inputsFromMap(Map("cat.files" -> expectedArray)), NoFunctions).getOrElse { - fail("Expected instantiation to work") - } + val command = + catTask.instantiateCommand(catTask.inputsFromMap(Map("cat.files" -> expectedArray)), NoFunctions).getOrElse { + fail("Expected instantiation to work") + } command.head.commandString shouldEqual "cat -s f1 f2 f3" } diff --git a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/MapWorkflowSpec.scala b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/MapWorkflowSpec.scala index 981ad1380c4..dbba026fcbb 100644 --- a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/MapWorkflowSpec.scala +++ b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/MapWorkflowSpec.scala @@ -11,21 +11,25 @@ import wom.values.{WomMap, WomSingleFile, WomString, WomValue} import scala.util.{Success, Try} - class MapWorkflowSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val namespace = WdlNamespaceWithWorkflow.load(WorkflowSource, Seq.empty[Draft2ImportResolver]).get - val expectedMap = WomMap(WomMapType(WomSingleFileType, WomStringType), Map( - WomSingleFile("f1") -> WomString("alice"), - WomSingleFile("f2") -> WomString("bob"), - WomSingleFile("f3") -> WomString("chuck") - )) + val expectedMap = WomMap( + WomMapType(WomSingleFileType, WomStringType), + Map( + WomSingleFile("f1") -> WomString("alice"), + WomSingleFile("f2") -> WomString("bob"), + WomSingleFile("f3") -> WomString("chuck") + ) + ) "A static Map[File, String] declaration" should "be a valid declaration" in { - val declaration = namespace.workflow.declarations.find { - _.unqualifiedName == "map" - }.getOrElse { - fail("Expected declaration 'map' to be found") - } + val declaration = namespace.workflow.declarations + .find { + _.unqualifiedName == "map" + } + .getOrElse { + fail("Expected declaration 'map' to be found") + } val expression = declaration.expression.getOrElse { fail("Expected an expression for declaration 'map'") } @@ -47,9 +51,11 @@ class MapWorkflowSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers case _ => throw new UnsupportedOperationException("Only write_map should be called") } } - val command = writeMapTask.instantiateCommand(writeMapTask.inputsFromMap(Map("file_to_name" -> expectedMap)), new CannedFunctions).getOrElse { - fail("Expected instantiation to work") - } + val command = writeMapTask + .instantiateCommand(writeMapTask.inputsFromMap(Map("file_to_name" -> expectedMap)), new CannedFunctions) + .getOrElse { + fail("Expected instantiation to work") + } command.head.commandString shouldEqual "cat /test/map/path" } } diff --git a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala index b96cfa5c2f6..eab4163410d 100644 --- a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala +++ b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala @@ -69,14 +69,18 @@ class NamespaceCacheSpec extends AnyFlatSpec with CromwellTimeoutSpec with Befor } def validate = { - val futureNamespace = factory.validateNamespace( - source = collection, - workflowSource = ThreeStep, - workflowOptions = WorkflowOptions(new spray.json.JsObject(Map.empty)), - importLocalFilesystem = false, - workflowIdForLogging = WorkflowId.randomId(), - ioFunctions = NoIoFunctionSet, - importResolvers = List(countingResolver)).value.unsafeToFuture() + val futureNamespace = factory + .validateNamespace( + source = collection, + workflowSource = ThreeStep, + workflowOptions = WorkflowOptions(new spray.json.JsObject(Map.empty)), + importLocalFilesystem = false, + workflowIdForLogging = WorkflowId.randomId(), + ioFunctions = NoIoFunctionSet, + importResolvers = List(countingResolver) + ) + .value + .unsafeToFuture() Await.result(futureNamespace, Duration.Inf).toOption.get } diff --git a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/WdlWorkflowHttpImportSpec.scala b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/WdlWorkflowHttpImportSpec.scala index a31df438f3a..1d1d96c07e8 100644 --- a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/WdlWorkflowHttpImportSpec.scala +++ b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/WdlWorkflowHttpImportSpec.scala @@ -11,8 +11,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import wdl.draft2.model._ - -class WdlWorkflowHttpImportSpec extends AnyFlatSpec with CromwellTimeoutSpec with BeforeAndAfterAll with Matchers { +class WdlWorkflowHttpImportSpec extends AnyFlatSpec with CromwellTimeoutSpec with BeforeAndAfterAll with Matchers { val tinyImport = s""" |task hello { @@ -27,7 +26,7 @@ class WdlWorkflowHttpImportSpec extends AnyFlatSpec with CromwellTimeoutSpec wit | """.stripMargin - def tinyWorkflow(imp:String) = + def tinyWorkflow(imp: String) = s""" | import "$imp" as imp | @@ -45,29 +44,35 @@ class WdlWorkflowHttpImportSpec extends AnyFlatSpec with CromwellTimeoutSpec wit mockServer = startClientAndServer(PortFactory.findFreePort()) host = "http://localhost:" + mockServer.getLocalPort - mockServer.when( - request().withPath("/hello.wdl") - ).respond( - response().withStatusCode(200).withBody(tinyImport) - ) - - mockServer.when( - request().withPath("/protected.wdl").withHeader("Authorization", "Bearer my-token-value") - ). - respond( - response().withStatusCode(200).withBody(tinyImport) - ) - - mockServer.when( - request().withPath("/redirect.wdl") - ).respond( - response().withStatusCode(301).withHeader("Location","/hello.wdl")) - - mockServer.when( - request().withPath("/none.wdl") - ).respond( - response().withStatusCode(404) - ) + mockServer + .when( + request().withPath("/hello.wdl") + ) + .respond( + response().withStatusCode(200).withBody(tinyImport) + ) + + mockServer + .when( + request().withPath("/protected.wdl").withHeader("Authorization", "Bearer my-token-value") + ) + .respond( + response().withStatusCode(200).withBody(tinyImport) + ) + + mockServer + .when( + request().withPath("/redirect.wdl") + ) + .respond(response().withStatusCode(301).withHeader("Location", "/hello.wdl")) + + mockServer + .when( + request().withPath("/none.wdl") + ) + .respond( + response().withStatusCode(404) + ) () // explicitly return unit } @@ -84,28 +89,28 @@ class WdlWorkflowHttpImportSpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "resolve an http URL" in { - val wf = tinyWorkflow( s"$host/hello.wdl") + val wf = tinyWorkflow(s"$host/hello.wdl") val ns = WdlNamespaceWithWorkflow.load(wf, httpResolver) ns.isFailure shouldBe false } it should "fail with a 404" in { - val wf = tinyWorkflow( s"$host/none.wdl") + val wf = tinyWorkflow(s"$host/none.wdl") val ns = WdlNamespaceWithWorkflow.load(wf, httpResolver) ns.isFailure shouldBe true } it should "follow a redirect" in { - val wf = tinyWorkflow( s"$host/redirect.wdl") + val wf = tinyWorkflow(s"$host/redirect.wdl") val ns = WdlNamespaceWithWorkflow.load(wf, httpResolver) ns.isFailure shouldBe false } it should "be able to supply a bearer token to a protected resource" in { val auth = Map("Authorization" -> "Bearer my-token-value") - val authHttpResolver : Seq[Draft2ImportResolver] = Seq(WdlDraft2LanguageFactory.httpResolverWithHeaders(auth)) + val authHttpResolver: Seq[Draft2ImportResolver] = Seq(WdlDraft2LanguageFactory.httpResolverWithHeaders(auth)) - val wf = tinyWorkflow( s"$host/protected.wdl") + val wf = tinyWorkflow(s"$host/protected.wdl") val ns = WdlNamespaceWithWorkflow.load(wf, authHttpResolver) ns.isFailure shouldBe false } diff --git a/languageFactories/wdl-draft3/src/main/scala/languages/wdl/draft3/WdlDraft3LanguageFactory.scala b/languageFactories/wdl-draft3/src/main/scala/languages/wdl/draft3/WdlDraft3LanguageFactory.scala index 56337b0010e..d6eb4ab056d 100644 --- a/languageFactories/wdl-draft3/src/main/scala/languages/wdl/draft3/WdlDraft3LanguageFactory.scala +++ b/languageFactories/wdl-draft3/src/main/scala/languages/wdl/draft3/WdlDraft3LanguageFactory.scala @@ -32,40 +32,57 @@ class WdlDraft3LanguageFactory(override val config: Config) extends LanguageFact override val languageName: String = "WDL" override val languageVersionName: String = "1.0" - override def validateNamespace(source: WorkflowSourceFilesCollection, workflowSource: WorkflowSource, workflowOptions: WorkflowOptions, importLocalFilesystem: Boolean, workflowIdForLogging: WorkflowId, ioFunctions: IoFunctionSet, - importResolvers: List[ImportResolver]): IOChecked[ValidatedWomNamespace] = { + importResolvers: List[ImportResolver] + ): IOChecked[ValidatedWomNamespace] = { val factories: List[LanguageFactory] = List(this) val checked: Checked[ValidatedWomNamespace] = for { _ <- enabledCheck - bundle <- getWomBundle(workflowSource, workflowSourceOrigin = None, source.workflowOptions.asPrettyJson, importResolvers, factories) + bundle <- getWomBundle(workflowSource, + workflowSourceOrigin = None, + source.workflowOptions.asPrettyJson, + importResolvers, + factories + ) executable <- createExecutable(bundle, source.inputsJson, ioFunctions) } yield executable fromEither[IO](checked) } - // The only reason this isn't a sub-def inside 'getWomBundle' is that it gets overridden in test cases: protected def makeWomBundle(workflowSource: WorkflowSource, - workflowSourceOrigin: Option[ResolvedImportRecord], - workflowOptionsJson: WorkflowOptionsJson, - importResolvers: List[ImportResolver], - languageFactories: List[LanguageFactory], - convertNestedScatterToSubworkflow : Boolean = true): ErrorOr[WomBundle] = { - - val converter: CheckedAtoB[FileStringParserInput, WomBundle] = stringToAst andThen wrapAst andThen astToFileElement.map(FileElementToWomBundleInputs(_, workflowOptionsJson, convertNestedScatterToSubworkflow, importResolvers, languageFactories, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + workflowSourceOrigin: Option[ResolvedImportRecord], + workflowOptionsJson: WorkflowOptionsJson, + importResolvers: List[ImportResolver], + languageFactories: List[LanguageFactory], + convertNestedScatterToSubworkflow: Boolean = true + ): ErrorOr[WomBundle] = { + + val converter: CheckedAtoB[FileStringParserInput, WomBundle] = + stringToAst andThen wrapAst andThen astToFileElement.map( + FileElementToWomBundleInputs( + _, + workflowOptionsJson, + convertNestedScatterToSubworkflow, + importResolvers, + languageFactories, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle converter .run(FileStringParserInput(workflowSource, workflowSourceOrigin.map(_.importPath).getOrElse("input.wdl"))) - .map(b => b.copyResolvedImportRecord(b, workflowSourceOrigin)).toValidated + .map(b => b.copyResolvedImportRecord(b, workflowSourceOrigin)) + .toValidated } override def getWomBundle(workflowSource: WorkflowSource, @@ -73,13 +90,21 @@ class WdlDraft3LanguageFactory(override val config: Config) extends LanguageFact workflowOptionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver], languageFactories: List[LanguageFactory], - convertNestedScatterToSubworkflow : Boolean = true): Checked[WomBundle] = { + convertNestedScatterToSubworkflow: Boolean = true + ): Checked[WomBundle] = { lazy val validationCallable = new Callable[ErrorOr[WomBundle]] { - def call: ErrorOr[WomBundle] = makeWomBundle(workflowSource, workflowSourceOrigin, workflowOptionsJson, importResolvers, languageFactories, convertNestedScatterToSubworkflow) + def call: ErrorOr[WomBundle] = makeWomBundle(workflowSource, + workflowSourceOrigin, + workflowOptionsJson, + importResolvers, + languageFactories, + convertNestedScatterToSubworkflow + ) } - lazy val parserCacheInputs = ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) + lazy val parserCacheInputs = + ParserCacheInputs(Option(workflowSource), workflowSourceOrigin.map(_.importPath), None, importResolvers) for { _ <- enabledCheck @@ -87,13 +112,16 @@ class WdlDraft3LanguageFactory(override val config: Config) extends LanguageFact } yield womBundle } - override def createExecutable(womBundle: WomBundle, inputsJson: WorkflowJson, ioFunctions: IoFunctionSet): Checked[ValidatedWomNamespace] = { + override def createExecutable(womBundle: WomBundle, + inputsJson: WorkflowJson, + ioFunctions: IoFunctionSet + ): Checked[ValidatedWomNamespace] = for { _ <- enabledCheck executable <- womBundle.toWomExecutable(Option(inputsJson), ioFunctions, strictValidation) validated <- LanguageFactoryUtil.validateWomNamespace(executable, ioFunctions) } yield validated - } - override def looksParsable(content: String): Boolean = LanguageFactoryUtil.simpleLooksParseable(List("version 1.0"), List("#"))(content) + override def looksParsable(content: String): Boolean = + LanguageFactoryUtil.simpleLooksParseable(List("version 1.0"), List("#"))(content) } diff --git a/languageFactories/wdl-draft3/src/test/scala/languages/wdl/draft3/WdlDraft3CachingSpec.scala b/languageFactories/wdl-draft3/src/test/scala/languages/wdl/draft3/WdlDraft3CachingSpec.scala index bca467e89c6..9ed41458025 100644 --- a/languageFactories/wdl-draft3/src/test/scala/languages/wdl/draft3/WdlDraft3CachingSpec.scala +++ b/languageFactories/wdl-draft3/src/test/scala/languages/wdl/draft3/WdlDraft3CachingSpec.scala @@ -14,7 +14,6 @@ import wom.core.{WorkflowOptionsJson, WorkflowSource} import wom.executable.WomBundle import wom.expression.NoIoFunctionSet - class WdlDraft3CachingSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val languageConfig = ConfigFactory.parseString( @@ -29,11 +28,8 @@ class WdlDraft3CachingSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat | } |} |""".stripMargin - ) - - it should "only evaluate files once" in { val invalidWorkflowSource = """ @@ -124,9 +120,21 @@ object WdlDraft3CachingSpec { var evaluationCount = 0 - override protected def makeWomBundle(workflowSource: WorkflowSource, workflowSourceOrigin: Option[ResolvedImportRecord], workflowOptionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver.ImportResolver], languageFactories: List[LanguageFactory], convertNestedScatterToSubworkflow: Boolean): ErrorOr[WomBundle] = { + override protected def makeWomBundle(workflowSource: WorkflowSource, + workflowSourceOrigin: Option[ResolvedImportRecord], + workflowOptionsJson: WorkflowOptionsJson, + importResolvers: List[ImportResolver.ImportResolver], + languageFactories: List[LanguageFactory], + convertNestedScatterToSubworkflow: Boolean + ): ErrorOr[WomBundle] = { evaluationCount = evaluationCount + 1 - super.makeWomBundle(workflowSource, workflowSourceOrigin, workflowOptionsJson, importResolvers, languageFactories, convertNestedScatterToSubworkflow) + super.makeWomBundle(workflowSource, + workflowSourceOrigin, + workflowOptionsJson, + importResolvers, + languageFactories, + convertNestedScatterToSubworkflow + ) } } diff --git a/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClient.scala b/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClient.scala index 3fa11515118..9590b9f405f 100644 --- a/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClient.scala +++ b/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClient.scala @@ -57,19 +57,19 @@ class DrsHubClientImpl[F[_]: Concurrent](client: Client[F], baseUrl: Uri) extend "localizationPath", "bondProvider" )(ResourceMetadata.apply) - implicit val resourceMetadataRequestEncoder: Encoder[ResourceMetadataRequest] = Encoder.forProduct2("url", "fields")(x => - (x.url, x.fields) - ) - implicit val resourceMetadataRequestEntityEncoder: EntityEncoder[F, ResourceMetadataRequest] = circeEntityEncoder[F, ResourceMetadataRequest] + implicit val resourceMetadataRequestEncoder: Encoder[ResourceMetadataRequest] = + Encoder.forProduct2("url", "fields")(x => (x.url, x.fields)) + implicit val resourceMetadataRequestEntityEncoder: EntityEncoder[F, ResourceMetadataRequest] = + circeEntityEncoder[F, ResourceMetadataRequest] override def fetchSystemStatus(): F[Boolean] = { val request = Request[F](uri = baseUrl / "status").withHeaders( org.http4s.headers.Accept(MediaType.application.json) ) client.run(request).use { resp => resp.status match { - case Status.Ok => true.pure[F] + case Status.Ok => true.pure[F] case Status.InternalServerError => false.pure[F] - case _ => UnknownError.raiseError + case _ => UnknownError.raiseError } } } @@ -77,17 +77,17 @@ class DrsHubClientImpl[F[_]: Concurrent](client: Client[F], baseUrl: Uri) extend override def resolveDrsObject(drsPath: String, fields: List[String]): F[ResourceMetadata] = { val body = ResourceMetadataRequest(url = drsPath, fields = fields) val entityBody: EntityBody[F] = EntityEncoder[F, ResourceMetadataRequest].toEntity(body).body - val request = Request[F](uri = baseUrl / "api" / apiVersion / "drs" / "resolve", method=Method.POST, body=entityBody).withHeaders( - org.http4s.headers.`Content-Type`(MediaType.application.json) - ) + val request = + Request[F](uri = baseUrl / "api" / apiVersion / "drs" / "resolve", method = Method.POST, body = entityBody) + .withHeaders( + org.http4s.headers.`Content-Type`(MediaType.application.json) + ) client.run(request).use { resp => resp.status match { - case Status.Ok => resp.as[ResourceMetadata] - case _ => UnknownError.raiseError + case Status.Ok => resp.as[ResourceMetadata] + case _ => UnknownError.raiseError } } } } - - diff --git a/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/Helper.scala b/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/Helper.scala index ceeefc7479f..2a63324247a 100644 --- a/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/Helper.scala +++ b/pact4s/src/main/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/Helper.scala @@ -14,7 +14,7 @@ object PactHelper { status: Int, responseHeaders: Seq[(String, String)], body: DslPart - ): PactDslResponse = + ): PactDslResponse = builder .`given`(state) .uponReceiving(uponReceiving) @@ -33,8 +33,8 @@ object PactHelper { path: String, requestHeaders: Seq[(String, String)], status: Int, - responseHeaders: Seq[(String, String)], - ): PactDslResponse = + responseHeaders: Seq[(String, String)] + ): PactDslResponse = builder .`given`(state) .uponReceiving(uponReceiving) @@ -52,7 +52,7 @@ object PactHelper { path: String, requestHeaders: Seq[(String, String)], status: Int - ): PactDslResponse = + ): PactDslResponse = builder .`given`(state) .uponReceiving(uponReceiving) @@ -68,7 +68,7 @@ object PactHelper { path: String, requestHeaders: Seq[(String, String)], status: Int - ): PactDslResponse = + ): PactDslResponse = builder .uponReceiving(uponReceiving) .method(method) @@ -86,7 +86,7 @@ object PactHelper { status: Int, responseHeaders: Seq[(String, String)], body: DslPart - ): PactDslResponse = + ): PactDslResponse = builder .`given`(state) .uponReceiving(uponReceiving) @@ -109,7 +109,7 @@ object PactHelper { status: Int, responseHeaders: Seq[(String, String)], responsBody: DslPart - ): PactDslResponse = + ): PactDslResponse = builder .`given`(state, scala.jdk.CollectionConverters.MapHasAsJava(stateParams).asJava) .uponReceiving(uponReceiving) @@ -132,7 +132,7 @@ object PactHelper { status: Int, responseHeaders: Seq[(String, String)], body: DslPart - ): PactDslResponse = + ): PactDslResponse = builder .`given`(state, scala.jdk.CollectionConverters.MapHasAsJava(stateParams).asJava) .uponReceiving(uponReceiving) @@ -154,7 +154,7 @@ object PactHelper { status: Int, responseHeaders: Seq[(String, String)], body: A - )(implicit ev: PactBodyJsonEncoder[A]): PactDslResponse = + )(implicit ev: PactBodyJsonEncoder[A]): PactDslResponse = builder .`given`(state, scala.jdk.CollectionConverters.MapHasAsJava(stateParams).asJava) .uponReceiving(uponReceiving) @@ -174,7 +174,7 @@ object PactHelper { requestHeaders: Seq[(String, String)], requestBody: A, status: Int - )(implicit ev: PactBodyJsonEncoder[A]): PactDslResponse = + )(implicit ev: PactBodyJsonEncoder[A]): PactDslResponse = builder .`given`(state) .uponReceiving(uponReceiving) @@ -194,7 +194,7 @@ object PactHelper { requestHeaders: Seq[(String, String)], requestBody: A, status: Int - )(implicit ev: PactBodyJsonEncoder[A]): PactDslResponse = + )(implicit ev: PactBodyJsonEncoder[A]): PactDslResponse = builder .`given`(state, scala.jdk.CollectionConverters.MapHasAsJava(stateParams).asJava) .uponReceiving(uponReceiving) diff --git a/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/BlobFileSystemContractSpec.scala b/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/BlobFileSystemContractSpec.scala index f023257fef9..0e003b5d3fe 100644 --- a/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/BlobFileSystemContractSpec.scala +++ b/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/BlobFileSystemContractSpec.scala @@ -13,6 +13,7 @@ class BlobFileSystemContractSpec extends AnyFlatSpec with Matchers with RequestR val resourceId = ""; val workspaceId = ""; + /** * we can define the folder that the pact contracts get written to upon completion of this test suite. */ @@ -27,24 +28,27 @@ class BlobFileSystemContractSpec extends AnyFlatSpec with Matchers with RequestR * scala tests on build and if the tests pass when run a pact file will be generated locally */ override def pact: RequestResponsePact = ConsumerPactBuilder - .consumer("cromwell-blob-filesystem-consumer") - .hasPactWith("wsm-provider") - .`given`( - "resource exists", - Map("id" -> resourceId.asJson, "value" -> 123.asJson) // we can use parameters to specify details about the provider state - ) - .`given`( - "workspace exists", - Map("id" -> workspaceId, "value" -> 123) // we can use parameters to specify details about the provider state - ) - .uponReceiving("Request to fetch SAS Token") - .method("POST") - .path(s"/api/workspaces/v1/${workspaceId}/resources/controlled/azure/storageContainer/${resourceId}/getSasToken") - .headers("Authorization" -> "sampleToken") - .willRespondWith() - .status(200) - .body( - Json.obj("id" -> "".asJson, "value" -> 123.asJson) - ).toPact() + .consumer("cromwell-blob-filesystem-consumer") + .hasPactWith("wsm-provider") + .`given`( + "resource exists", + Map("id" -> resourceId.asJson, + "value" -> 123.asJson + ) // we can use parameters to specify details about the provider state + ) + .`given`( + "workspace exists", + Map("id" -> workspaceId, "value" -> 123) // we can use parameters to specify details about the provider state + ) + .uponReceiving("Request to fetch SAS Token") + .method("POST") + .path(s"/api/workspaces/v1/${workspaceId}/resources/controlled/azure/storageContainer/${resourceId}/getSasToken") + .headers("Authorization" -> "sampleToken") + .willRespondWith() + .status(200) + .body( + Json.obj("id" -> "".asJson, "value" -> 123.asJson) + ) + .toPact() } diff --git a/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClientSpec.scala b/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClientSpec.scala index a4ebe56d76a..ead29c29d4f 100644 --- a/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClientSpec.scala +++ b/pact4s/src/test/scala/org/broadinstitute/dsde/workbench/cromwell/consumer/DrsHubClientSpec.scala @@ -28,19 +28,19 @@ class DrsHubClientSpec extends AnyFlatSpec with Matchers with RequestResponsePac ) private val requestFields = List( - "bucket", - "accessUrl", - "googleServiceAccount", - "fileName", - "hashes", - "localizationPath", - "bondProvider", - "name", - "size", - "timeCreated", - "timeUpdated", - "gsUri", - "contentType", + "bucket", + "accessUrl", + "googleServiceAccount", + "fileName", + "hashes", + "localizationPath", + "bondProvider", + "name", + "size", + "timeCreated", + "timeUpdated", + "gsUri", + "contentType" ) val filesize = 123L @@ -51,7 +51,6 @@ class DrsHubClientSpec extends AnyFlatSpec with Matchers with RequestResponsePac val fileHash = "a2317edbd2eb6cf6b0ee49cb81e3a556" val accessUrl = f"gs://${bucket}/${filename}" - val drsResourceResponsePlaceholder: ResourceMetadata = ResourceMetadata( "application/octet-stream", filesize, @@ -70,27 +69,33 @@ class DrsHubClientSpec extends AnyFlatSpec with Matchers with RequestResponsePac val resourceMetadataResponseDsl: DslPart = newJsonBody { o => o.stringType("contentType", "application/octet-stream") - o.numberType("size", filesize) - o.stringType("timeCreated", timeCreated) - o.stringType("timeUpdated", timeCreated) - o.nullValue("gsUri") - o.nullValue("googleServiceAccount") - o.nullValue("fileName") - o.`object`("accessUrl" , { a => - a.stringType("url", accessUrl) - a.`array`("headers", { h => - h.stringType("Header") - h.stringType("Example") - () - }) - () - }) - o.`object`("hashes", { o => - o.stringType("md5", fileHash) - () - }) - o.nullValue("localizationPath") - o.stringType("bondProvider", bondProvider) + o.numberType("size", filesize) + o.stringType("timeCreated", timeCreated) + o.stringType("timeUpdated", timeCreated) + o.nullValue("gsUri") + o.nullValue("googleServiceAccount") + o.nullValue("fileName") + o.`object`("accessUrl", + { a => + a.stringType("url", accessUrl) + a.`array`("headers", + { h => + h.stringType("Header") + h.stringType("Example") + () + } + ) + () + } + ) + o.`object`("hashes", + { o => + o.stringType("md5", fileHash) + () + } + ) + o.nullValue("localizationPath") + o.stringType("bondProvider", bondProvider) () }.build @@ -98,10 +103,12 @@ class DrsHubClientSpec extends AnyFlatSpec with Matchers with RequestResponsePac val resourceRequestDsl = newJsonBody { o => o.stringType("url", f"drs://test.theanvil.io/${fileId}") - o.array("fields", { a => - requestFields.map(a.stringType) - () - }) + o.array("fields", + { a => + requestFields.map(a.stringType) + () + } + ) () }.build @@ -147,9 +154,8 @@ class DrsHubClientSpec extends AnyFlatSpec with Matchers with RequestResponsePac override val pact: RequestResponsePact = pactDslResponse.toPact - val client: Client[IO] = { + val client: Client[IO] = BlazeClientBuilder[IO](ExecutionContext.global).resource.allocated.unsafeRunSync()._1 - } /* we should use these tests to ensure that our client class correctly handles responses from the provider - i.e. decoding, error mapping, validation diff --git a/perf/src/main/scala/cromwell/perf/Call.scala b/perf/src/main/scala/cromwell/perf/Call.scala index 3849cb4093d..b8e91589126 100644 --- a/perf/src/main/scala/cromwell/perf/Call.scala +++ b/perf/src/main/scala/cromwell/perf/Call.scala @@ -6,21 +6,21 @@ import io.circe.JsonObject case class CallCaching(hit: Option[Boolean], result: Option[String], - hitFailures: Option[Seq[Map[String, Seq[JsonObject]]]]) - - -case class ExecutionEvents(startTime: OffsetDateTime, - description: String, - endTime: OffsetDateTime) + hitFailures: Option[Seq[Map[String, Seq[JsonObject]]]] +) +case class ExecutionEvents(startTime: OffsetDateTime, description: String, endTime: OffsetDateTime) case class Call(shardIndex: Int, start: OffsetDateTime, end: OffsetDateTime, callCaching: Option[CallCaching], - executionEvents: Seq[ExecutionEvents]) { - val callCachingEventStates = List("CheckingCallCache", "FetchingCachedOutputsFromDatabase", "BackendIsCopyingCachedOutputs") - val jobPreparationEventStates = List("Pending", "RequestingExecutionToken", "WaitingForValueStore", "PreparingJob", "CheckingJobStore") + executionEvents: Seq[ExecutionEvents] +) { + val callCachingEventStates = + List("CheckingCallCache", "FetchingCachedOutputsFromDatabase", "BackendIsCopyingCachedOutputs") + val jobPreparationEventStates = + List("Pending", "RequestingExecutionToken", "WaitingForValueStore", "PreparingJob", "CheckingJobStore") val cacheCopyingEventStates = List("FetchingCachedOutputsFromDatabase", "BackendIsCopyingCachedOutputs") /** @@ -40,23 +40,24 @@ case class Call(shardIndex: Int, */ val timeInCheckingCallCacheState: Duration = { val eventsRelatedToCC = executionEvents.collect { - case event if event.description.equalsIgnoreCase("CheckingCallCache") => Duration.between(event.startTime, event.endTime) + case event if event.description.equalsIgnoreCase("CheckingCallCache") => + Duration.between(event.startTime, event.endTime) } - if(eventsRelatedToCC.nonEmpty) eventsRelatedToCC.reduce(_ plus _) else Duration.ZERO + if (eventsRelatedToCC.nonEmpty) eventsRelatedToCC.reduce(_ plus _) else Duration.ZERO } /** * @return Time (in Duration) job spent in Call Caching states */ val timeInCallCachingState: Duration = { - val eventsRelatedToCC = executionEvents.filter(event => callCachingEventStates.exists(state => state.equalsIgnoreCase(event.description))) + val eventsRelatedToCC = + executionEvents.filter(event => callCachingEventStates.exists(state => state.equalsIgnoreCase(event.description))) if (eventsRelatedToCC.nonEmpty) { val eventsSortedByStartTime = eventsRelatedToCC.sortBy(_.startTime) Duration.between(eventsSortedByStartTime.head.startTime, eventsSortedByStartTime.last.endTime) - } - else Duration.ZERO + } else Duration.ZERO } /** @@ -65,10 +66,12 @@ case class Call(shardIndex: Int, */ val timeInJobPreparation: Duration = { val durationOfEventsInPreparationState = executionEvents.collect { - case event if jobPreparationEventStates.exists(_.equalsIgnoreCase(event.description)) => Duration.between(event.startTime, event.endTime) + case event if jobPreparationEventStates.exists(_.equalsIgnoreCase(event.description)) => + Duration.between(event.startTime, event.endTime) } - if(durationOfEventsInPreparationState.nonEmpty) durationOfEventsInPreparationState.reduce(_ plus _) else Duration.ZERO + if (durationOfEventsInPreparationState.nonEmpty) durationOfEventsInPreparationState.reduce(_ plus _) + else Duration.ZERO } /** @@ -77,9 +80,11 @@ case class Call(shardIndex: Int, */ val timeForFetchingAndCopyingCacheHit: Duration = { val durationOfEventsInCacheCopyingState = executionEvents.collect { - case event if cacheCopyingEventStates.exists(_.equalsIgnoreCase(event.description)) => Duration.between(event.startTime, event.endTime) + case event if cacheCopyingEventStates.exists(_.equalsIgnoreCase(event.description)) => + Duration.between(event.startTime, event.endTime) } - if(durationOfEventsInCacheCopyingState.nonEmpty) durationOfEventsInCacheCopyingState.reduce(_ plus _) else Duration.ZERO + if (durationOfEventsInCacheCopyingState.nonEmpty) durationOfEventsInCacheCopyingState.reduce(_ plus _) + else Duration.ZERO } } diff --git a/perf/src/main/scala/cromwell/perf/CompareMetadata.scala b/perf/src/main/scala/cromwell/perf/CompareMetadata.scala index 600bd38aba0..64e49d93807 100644 --- a/perf/src/main/scala/cromwell/perf/CompareMetadata.scala +++ b/perf/src/main/scala/cromwell/perf/CompareMetadata.scala @@ -15,7 +15,7 @@ import io.circe._ import io.circe.generic.semiauto._ import io.circe.parser._ -object CompareMetadata extends App with StrictLogging{ +object CompareMetadata extends App with StrictLogging { private val REGRESSION_CONST = 1.1 // If removed, IntelliJ (Oct '18) thinks the import isn't used. @@ -33,7 +33,6 @@ object CompareMetadata extends App with StrictLogging{ decode[Metadata](metadataFileContent) } - def parseMetadataFromGcsFile(gcsUrl: String, pathToServiceAccount: String): Either[Error, Metadata] = { val gcsUrlArray = gcsUrl.replace("gs://", "").split("/", 2) val Array(gcsBucket, fileToBeLocalized) = gcsUrlArray @@ -46,13 +45,10 @@ object CompareMetadata extends App with StrictLogging{ decode[Metadata](metadataFileContent) } - - def parseMetadata(inputFile: String, pathToServiceAccount: String): Either[Error, Metadata] = { + def parseMetadata(inputFile: String, pathToServiceAccount: String): Either[Error, Metadata] = if (inputFile.startsWith("gs://")) parseMetadataFromGcsFile(inputFile, pathToServiceAccount) else parseMetadataFromLocalFile(inputFile) - } - def displayComputedMetrics(metadata: Metadata, displayMsg: String): Unit = { logger.info(displayMsg) @@ -64,59 +60,65 @@ object CompareMetadata extends App with StrictLogging{ logger.info(s"Avg time job spent in Call Caching state: ${metadata.avgTimeInCallCachingState}") logger.info(s"Avg time job spent in just CheckingCallCache state: ${metadata.avgTimeInCheckingCallCacheState}") logger.info(s"Avg time job spent in Job Preparation state: ${metadata.avgTimeInJobPreparation}") - logger.info(s"Avg time job spent in fetching and copying cache hit(s) state: ${metadata.avgTimeForFetchingAndCopyingCacheHit}") + logger.info( + s"Avg time job spent in fetching and copying cache hit(s) state: ${metadata.avgTimeForFetchingAndCopyingCacheHit}" + ) } - /*** * Compares the metrics in Metadata which have type 'Duration' (mostly the ones related to time metrics) */ - def compareDurationMetrics(metadataOld: Metadata, metadataNew: Metadata, metricFunc: Metadata => Duration, metricName: String): ErrorOr[String] = { - if(metricFunc(metadataNew).toMillis > REGRESSION_CONST * metricFunc(metadataOld).toMillis){ + def compareDurationMetrics(metadataOld: Metadata, + metadataNew: Metadata, + metricFunc: Metadata => Duration, + metricName: String + ): ErrorOr[String] = + if (metricFunc(metadataNew).toMillis > REGRESSION_CONST * metricFunc(metadataOld).toMillis) { (s"$metricName of new metadata is greater than 10% of old metadata. " + s"New metric value: ${metricFunc(metadataNew)}. " + s"Old metric value: ${metricFunc(metadataOld)}.").invalidNel[String] - } - else s"$metricName hasn't regressed yet".valid - } - + } else s"$metricName hasn't regressed yet".valid /*** * Compares the metrics in Metadata which have type 'Int' (mostly the ones related to size metrics) */ - def compareIntMetrics(metadataOld: Metadata, metadataNew: Metadata, metricFunc: Metadata => Int, metricName: String): ErrorOr[String] = { - if(metricFunc(metadataNew) > REGRESSION_CONST * metricFunc(metadataOld)){ + def compareIntMetrics(metadataOld: Metadata, + metadataNew: Metadata, + metricFunc: Metadata => Int, + metricName: String + ): ErrorOr[String] = + if (metricFunc(metadataNew) > REGRESSION_CONST * metricFunc(metadataOld)) { (s"$metricName of new metadata is greater than 10% of old metadata. " + s"New metric value: ${metricFunc(metadataNew)}. " + s"Old metric value: ${metricFunc(metadataOld)}.").invalidNel[String] - } - else s"$metricName hasn't regressed yet".valid - } - + } else s"$metricName hasn't regressed yet".valid def compareMetadataMetrics(metadataOld: Metadata, metadataNew: Metadata): ErrorOr[List[String]] = { - val durationFuncList: List[(Metadata => Duration, String)] = List((_.workflowStartedAfter, "workflowStartedAfter"), + val durationFuncList: List[(Metadata => Duration, String)] = List( + (_.workflowStartedAfter, "workflowStartedAfter"), (_.workflowRunningTime, "workflowRunningTime"), (_.avgTimeInCallCachingState, "avgTimeInCallCachingState"), (_.avgTimeInCheckingCallCacheState, "avgTimeInCheckingCallCacheState"), (_.avgTimeInJobPreparation, "avgTimeInJobPreparation"), - (_.avgTimeForFetchingAndCopyingCacheHit, "avgTimeForFetchingAndCopyingCacheHit")) + (_.avgTimeForFetchingAndCopyingCacheHit, "avgTimeForFetchingAndCopyingCacheHit") + ) - val intFuncList: List[(Metadata => Int, String)] = List((_.totalJobsPerRootWf, "totalJobsPerRootWf"), (_.avgCacheRetries, "avgCacheRetries")) + val intFuncList: List[(Metadata => Int, String)] = + List((_.totalJobsPerRootWf, "totalJobsPerRootWf"), (_.avgCacheRetries, "avgCacheRetries")) val durationMetricsComp = durationFuncList.map(x => compareDurationMetrics(metadataOld, metadataNew, x._1, x._2)) val intMetricsComp = intFuncList.map(x => compareIntMetrics(metadataOld, metadataNew, x._1, x._2)) (durationMetricsComp ::: intMetricsComp).sequence[ErrorOr, String] } - def printParseErrorToConsoleAndExit(metadataFile: String, error: Error, systemExit: Boolean): Unit = { logger.error(s"Something went wrong while parsing $metadataFile. Error: ${error.getLocalizedMessage}") if (systemExit) System.exit(1) } - - def generateAndCompareMetrics(metadataOldEither: Either[Error, Metadata], metadataNewEither: Either[Error, Metadata]): Unit = { + def generateAndCompareMetrics(metadataOldEither: Either[Error, Metadata], + metadataNewEither: Either[Error, Metadata] + ): Unit = (metadataOldEither, metadataNewEither) match { case (Right(metadataOld), Right(metadataNew)) => val metadataOldMsg = s"Metrics for metadata generated from ${args(0)}" @@ -136,16 +138,13 @@ object CompareMetadata extends App with StrictLogging{ printParseErrorToConsoleAndExit(args(0), e1, systemExit = false) printParseErrorToConsoleAndExit(args(1), e2, systemExit = true) } - } - args.length match { case 2 => if (args(0).startsWith("gs://") || args(1).startsWith("gs://")) { logger.error("Path to service account is needed to download GCS file. Please pass it as 3rd argument.") System.exit(1) - } - else generateAndCompareMetrics(parseMetadataFromLocalFile(args(0)), parseMetadataFromLocalFile(args(1))) + } else generateAndCompareMetrics(parseMetadataFromLocalFile(args(0)), parseMetadataFromLocalFile(args(1))) case 3 => generateAndCompareMetrics(parseMetadata(args(0), args(2)), parseMetadata(args(1), args(2))) case _ => logger.error("Please pass in 2 file paths!") diff --git a/perf/src/main/scala/cromwell/perf/Metadata.scala b/perf/src/main/scala/cromwell/perf/Metadata.scala index 190063e48b3..b61e88c795a 100644 --- a/perf/src/main/scala/cromwell/perf/Metadata.scala +++ b/perf/src/main/scala/cromwell/perf/Metadata.scala @@ -8,18 +8,18 @@ case class Metadata(id: String, start: OffsetDateTime, end: OffsetDateTime, status: String, - calls: Option[Map[String, Seq[Call]]]) { + calls: Option[Map[String, Seq[Call]]] +) { private def addInt(x: Int, y: Int): Int = x + y private def addDuration(x: Duration, y: Duration): Duration = x.plus(y) - private def sumElementsInOptionSeq[A](listOption: Option[Iterable[A]], op: (A, A) => A, default: A): A = { + private def sumElementsInOptionSeq[A](listOption: Option[Iterable[A]], op: (A, A) => A, default: A): A = listOption match { - case Some(list) => list.reduce[A]((a, b) => op(a,b)) + case Some(list) => list.reduce[A]((a, b) => op(a, b)) case None => default } - } /** * @return Time between the submission and start of workflow @@ -28,47 +28,47 @@ case class Metadata(id: String, val workflowRunningTime: Duration = Duration.between(start, end) - val totalJobsPerRootWf: Int = sumElementsInOptionSeq(calls.map(taskMap => taskMap.map(callsPerTask => callsPerTask._2.size)), addInt, 0) + val totalJobsPerRootWf: Int = + sumElementsInOptionSeq(calls.map(taskMap => taskMap.map(callsPerTask => callsPerTask._2.size)), addInt, 0) - val avgCacheRetries: Int = { + val avgCacheRetries: Int = if (totalJobsPerRootWf > 0) { - val cacheRetriesList = calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.cacheCopyRetries))) + val cacheRetriesList = + calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.cacheCopyRetries))) sumElementsInOptionSeq(cacheRetriesList, addInt, 0) / totalJobsPerRootWf - } - else 0 - } + } else 0 - val avgTimeInCallCachingState: Duration = { + val avgTimeInCallCachingState: Duration = if (totalJobsPerRootWf > 0) { - val timeInCallCachingStateList = calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeInCallCachingState))) - sumElementsInOptionSeq(timeInCallCachingStateList, addDuration, Duration.ZERO).dividedBy(totalJobsPerRootWf.toLong) - } - else Duration.ZERO - } + val timeInCallCachingStateList = + calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeInCallCachingState))) + sumElementsInOptionSeq(timeInCallCachingStateList, addDuration, Duration.ZERO).dividedBy( + totalJobsPerRootWf.toLong + ) + } else Duration.ZERO - val avgTimeInCheckingCallCacheState: Duration = { + val avgTimeInCheckingCallCacheState: Duration = if (totalJobsPerRootWf > 0) { - val timeInCheckingCallCacheStateList = calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeInCheckingCallCacheState))) - sumElementsInOptionSeq(timeInCheckingCallCacheStateList, addDuration, Duration.ZERO).dividedBy(totalJobsPerRootWf.toLong) - } - else Duration.ZERO - } + val timeInCheckingCallCacheStateList = calls.map(taskMap => + taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeInCheckingCallCacheState)) + ) + sumElementsInOptionSeq(timeInCheckingCallCacheStateList, addDuration, Duration.ZERO).dividedBy( + totalJobsPerRootWf.toLong + ) + } else Duration.ZERO - val avgTimeInJobPreparation: Duration = { + val avgTimeInJobPreparation: Duration = if (totalJobsPerRootWf > 0) { - val timeInJobPreparationList = calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeInJobPreparation))) + val timeInJobPreparationList = + calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeInJobPreparation))) sumElementsInOptionSeq(timeInJobPreparationList, addDuration, Duration.ZERO).dividedBy(totalJobsPerRootWf.toLong) - } - else Duration.ZERO - } + } else Duration.ZERO - val avgTimeForFetchingAndCopyingCacheHit: Duration = { + val avgTimeForFetchingAndCopyingCacheHit: Duration = if (totalJobsPerRootWf > 0) { - val timeInCopyingList = calls.map(taskMap => taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeForFetchingAndCopyingCacheHit))) + val timeInCopyingList = calls.map(taskMap => + taskMap.flatMap(callsPerTask => callsPerTask._2.map(call => call.timeForFetchingAndCopyingCacheHit)) + ) sumElementsInOptionSeq(timeInCopyingList, addDuration, Duration.ZERO).dividedBy(totalJobsPerRootWf.toLong) - } - else Duration.ZERO - } + } else Duration.ZERO } - - diff --git a/project/ContinuousIntegration.scala b/project/ContinuousIntegration.scala index e2b61994c22..d5dd3262f87 100644 --- a/project/ContinuousIntegration.scala +++ b/project/ContinuousIntegration.scala @@ -30,13 +30,20 @@ object ContinuousIntegration { "docker", "run", "--rm", - "-v", s"${vaultToken.value}:/root/.vault-token", - "-v", s"${srcCiResources.value}:${srcCiResources.value}", - "-v", s"${targetCiResources.value}:${targetCiResources.value}", - "-e", "ENVIRONMENT=not_used", - "-e", s"INPUT_PATH=${srcCiResources.value}", - "-e", s"OUT_PATH=${targetCiResources.value}", - "broadinstitute/dsde-toolbox:dev", "render-templates.sh" + "-v", + s"${vaultToken.value}:/root/.vault-token", + "-v", + s"${srcCiResources.value}:${srcCiResources.value}", + "-v", + s"${targetCiResources.value}:${targetCiResources.value}", + "-e", + "ENVIRONMENT=not_used", + "-e", + s"INPUT_PATH=${srcCiResources.value}", + "-e", + s"OUT_PATH=${targetCiResources.value}", + "broadinstitute/dsde-toolbox:dev", + "render-templates.sh" ) val result = cmd ! log if (result != 0) { @@ -45,7 +52,7 @@ object ContinuousIntegration { "https://hub.docker.com/r/broadinstitute/dsde-toolbox/" ) } - }, + } ) def aggregateSettings(rootProject: Project): Seq[Setting[_]] = List( @@ -54,7 +61,7 @@ object ContinuousIntegration { streams.value.log // make sure logger is loaded validateAggregatedProjects(rootProject, state.value) (Compile / compile).value - }, + } ) private val copyCiResources: TaskKey[Unit] = taskKey[Unit](s"Copy CI resources.") @@ -74,9 +81,9 @@ object ContinuousIntegration { */ private def getBuildSbtNames(rootProject: Project, state: State): Set[String] = { val extracted = Project.extract(state) - extracted.structure.units.flatMap({ - case (_, loadedBuildUnit) => loadedBuildUnit.defined.keys - }).toSet - rootProject.id + extracted.structure.units.flatMap { case (_, loadedBuildUnit) => + loadedBuildUnit.defined.keys + }.toSet - rootProject.id } /** @@ -85,8 +92,8 @@ object ContinuousIntegration { private def validateAggregatedProjects(rootProject: Project, state: State): Unit = { // Get the list of projects explicitly aggregated val projectReferences: Seq[ProjectReference] = rootProject.aggregate - val localProjectReferences = projectReferences collect { - case localProject: LocalProject => localProject + val localProjectReferences = projectReferences collect { case localProject: LocalProject => + localProject } val aggregatedNames = localProjectReferences.map(_.project).toSet @@ -98,7 +105,7 @@ object ContinuousIntegration { val falseNames = unaggregatedProjects.filterKeys(aggregatedNames.contains) if (falseNames.nonEmpty) { - val reasons = falseNames.map({case (name, reason) => s" ${name}: ${reason}"}).mkString("\n") + val reasons = falseNames.map { case (name, reason) => s" ${name}: ${reason}" }.mkString("\n") sys.error(s"There are projects aggregated in build.sbt that shouldn't be:\n$reasons") } } diff --git a/project/GenerateRestApiDocs.scala b/project/GenerateRestApiDocs.scala index 9173a25c3c5..3e0297a09f5 100644 --- a/project/GenerateRestApiDocs.scala +++ b/project/GenerateRestApiDocs.scala @@ -65,7 +65,7 @@ object GenerateRestApiDocs { * @param content The original contents of the RESTAPI.md. * @return The contents with updated paths. */ - private def replacePaths(content: String): String = { + private def replacePaths(content: String): String = content match { case PathsRegex(start, paths, end) => val replacedPaths = paths.linesWithSeparators map { @@ -73,12 +73,13 @@ object GenerateRestApiDocs { case other => other } replacedPaths.mkString(start, "", end) - case _ => throw new IllegalArgumentException( - "Content did not match expected regex. " + - "Did the swagger2markdown format change significantly? " + - "If so, a new regex may be required.") + case _ => + throw new IllegalArgumentException( + "Content did not match expected regex. " + + "Did the swagger2markdown format change significantly? " + + "If so, a new regex may be required." + ) } - } /** * Replaces generic strings in the generated RESTAPI.md. @@ -86,9 +87,8 @@ object GenerateRestApiDocs { * @param content The contents of the RESTAPI.md. * @return The contents with generic replacements. */ - private def replaceGenerics(content: String): String = { + private def replaceGenerics(content: String): String = GenericReplacements.foldRight(content)(replaceGeneric) - } /** * Replaces a single generic string in the generated RESTAPI.md. @@ -118,12 +118,11 @@ object GenerateRestApiDocs { try { currentThread.setContextClassLoader(classUtilsClassLoader) block - } finally { + } finally currentThread.setContextClassLoader(originalThreadClassLoader) - } } - private def getModifiedMarkdown: String = { + private def getModifiedMarkdown: String = withPatchedClassLoader { val config = new Swagger2MarkupConfigBuilder() .withMarkupLanguage(MarkupLanguage.MARKDOWN) @@ -135,7 +134,6 @@ object GenerateRestApiDocs { val contents = converter.toString replaceGenerics(replacePaths(contents)) } - } /** * Generates the markdown from the swagger YAML, with some Cromwell customizations. @@ -164,6 +162,6 @@ object GenerateRestApiDocs { // Returns a settings including the `generateRestApiDocs` task. val generateRestApiDocsSettings: Seq[Setting[_]] = List( generateRestApiDocs := writeModifiedMarkdown(), - checkRestApiDocs := checkModifiedMarkdown(streams.value.log), + checkRestApiDocs := checkModifiedMarkdown(streams.value.log) ) } diff --git a/project/Merging.scala b/project/Merging.scala index 91b135e59eb..4ed133b82ac 100644 --- a/project/Merging.scala +++ b/project/Merging.scala @@ -4,23 +4,30 @@ import sbtassembly.{MergeStrategy, PathList} object Merging { val customMergeStrategy: Def.Initialize[String => MergeStrategy] = Def.setting { - case PathList(ps@_*) if Set("project.properties", "execution.interceptors").contains(ps.last) => + case PathList(ps @ _*) if Set("project.properties", "execution.interceptors").contains(ps.last) => // Merge/Filter files from AWS/Google jars that otherwise collide at merge time. MergeStrategy.filterDistinctLines - case PathList(ps@_*) if ps.last == "logback.xml" => + case PathList(ps @ _*) if ps.last == "logback.xml" => MergeStrategy.first // Merge mozilla/public-suffix-list.txt if duplicated - case PathList(ps@_*) if ps.last == "public-suffix-list.txt" => + case PathList(ps @ _*) if ps.last == "public-suffix-list.txt" => MergeStrategy.last // Merge kotlin modules if duplicated - case PathList(ps@_*) if ps.last == "kotlin-stdlib-common.kotlin_module" => + case PathList(ps @ _*) if ps.last == "kotlin-stdlib-common.kotlin_module" => MergeStrategy.last - case PathList(ps@_*) if ps.last == "kotlin-stdlib.kotlin_module" => + case PathList(ps @ _*) if ps.last == "kotlin-stdlib.kotlin_module" => MergeStrategy.last // AWS SDK v2 configuration files - can be discarded - case PathList(ps@_*) if Set("codegen.config" , "service-2.json" , "waiters-2.json" , "customization.config" , "examples-1.json" , "paginators-1.json").contains(ps.last) => + case PathList(ps @ _*) + if Set("codegen.config", + "service-2.json", + "waiters-2.json", + "customization.config", + "examples-1.json", + "paginators-1.json" + ).contains(ps.last) => MergeStrategy.discard - case x@PathList("META-INF", path@_*) => + case x @ PathList("META-INF", path @ _*) => path map { _.toLowerCase } match { @@ -51,7 +58,7 @@ object Merging { val oldStrategy = (assembly / assemblyMergeStrategy).value oldStrategy(x) } - case x@PathList("OSGI-INF", path@_*) => + case x @ PathList("OSGI-INF", path @ _*) => path map { _.toLowerCase } match { @@ -61,10 +68,11 @@ object Merging { val oldStrategy = (assembly / assemblyMergeStrategy).value oldStrategy(x) } - case "asm-license.txt" | "module-info.class" | "overview.html" | "cobertura.properties" | "grammar.hgr" | "CHANGELOG.txt" => + case "asm-license.txt" | "module-info.class" | "overview.html" | "cobertura.properties" | "grammar.hgr" | + "CHANGELOG.txt" => MergeStrategy.discard // inspired by https://github.com/ergoplatform/explorer-backend/blob/7364ecfdeabeb691f0f25525e577d6c48240c672/build.sbt#L14-L15 - case other if other.contains("scala/annotation/nowarn.class") => MergeStrategy.discard + case other if other.contains("scala/annotation/nowarn.class") => MergeStrategy.discard case other if other.contains("scala/annotation/nowarn$.class") => MergeStrategy.discard case PathList("mime.types") => MergeStrategy.last diff --git a/project/Publishing.scala b/project/Publishing.scala index 1d4143fc13e..0af27e77147 100644 --- a/project/Publishing.scala +++ b/project/Publishing.scala @@ -34,7 +34,7 @@ object Publishing { `CROMWELL_SBT_DOCKER_TAGS=dev,develop sbt 'show docker::imageNames'` returns: ArrayBuffer(broadinstitute/womtool:dev, broadinstitute/womtool:develop) ArrayBuffer(broadinstitute/cromwell:dev, broadinstitute/cromwell:develop) - */ + */ dockerTags := { val versionsCsv = if (Version.isSnapshot) { // Tag looks like `85-443a6fc-SNAP` @@ -113,19 +113,19 @@ object Publishing { docker / buildOptions := BuildOptions( cache = false, removeIntermediateContainers = BuildOptions.Remove.Always - ), + ) ) - def dockerPushSettings(pushEnabled: Boolean): Seq[Setting[_]] = { + def dockerPushSettings(pushEnabled: Boolean): Seq[Setting[_]] = if (pushEnabled) { List( dockerPushCheck := { val projectName = name.value val repositoryName = s"broadinstitute/$projectName" val repositoryUrl = s"https://registry.hub.docker.com/v2/repositories/$repositoryName/" - try { + try url(repositoryUrl).cat.lineStream - } catch { + catch { case exception: Exception => throw new IllegalStateException( s"""|Verify that public repository https://hub.docker.com/r/$repositoryName exists. @@ -144,7 +144,6 @@ object Publishing { } ) } - } private val broadArtifactoryResolver: Resolver = "Broad Artifactory" at @@ -170,12 +169,11 @@ object Publishing { private val artifactoryCredentialsFile = file("target/ci/resources/artifactory_credentials.properties").getAbsoluteFile - private val artifactoryCredentials: Seq[Credentials] = { + private val artifactoryCredentials: Seq[Credentials] = if (artifactoryCredentialsFile.exists) List(Credentials(artifactoryCredentialsFile)) else Nil - } // BT-250 Check if publishing will fail due to already published artifacts val checkAlreadyPublished = taskKey[Boolean]("Verifies if publishing has already occurred") @@ -183,7 +181,8 @@ object Publishing { private case class CromwellMDArtifactType(artifactType: String, artifactExtension: String, - classifierOption: Option[String]) + classifierOption: Option[String] + ) /** * The types of MDArtifacts published by this sbt build. @@ -201,18 +200,18 @@ object Publishing { /** * Retrieve the IBiblioResolver from sbt's Ivy setup. */ - private def getIBiblioResolver(ivy: Ivy): IBiblioResolver = { + private def getIBiblioResolver(ivy: Ivy): IBiblioResolver = ivy.getSettings.getResolver(broadArtifactoryResolver.name) match { case iBiblioResolver: IBiblioResolver => iBiblioResolver case other => sys.error(s"Expected an IBiblioResolver, got $other") } - } /** * Maps an sbt artifact to the Apache Ivy artifact type. */ - private def makeMDArtifact(moduleDescriptor: DefaultModuleDescriptor) - (cromwellMDArtifactType: CromwellMDArtifactType): MDArtifact = { + private def makeMDArtifact(moduleDescriptor: DefaultModuleDescriptor)( + cromwellMDArtifactType: CromwellMDArtifactType + ): MDArtifact = new MDArtifact( moduleDescriptor, moduleDescriptor.getModuleRevisionId.getName, @@ -221,7 +220,6 @@ object Publishing { null, cromwellMDArtifactType.classifierOption.map("classifier" -> _).toMap.asJava ) - } /** * Returns true and prints out an error if an artifact already exists. @@ -243,20 +241,19 @@ object Publishing { val module = ivyModule.value val log = streams.value.log - module.withModule(log) { - case (ivy, moduleDescriptor, _) => - val resolver = getIBiblioResolver(ivy) - cromwellMDArtifactTypes - .map(makeMDArtifact(moduleDescriptor)) - .map(existsMDArtifact(resolver, log)) - .exists(identity) + module.withModule(log) { case (ivy, moduleDescriptor, _) => + val resolver = getIBiblioResolver(ivy) + cromwellMDArtifactTypes + .map(makeMDArtifact(moduleDescriptor)) + .map(existsMDArtifact(resolver, log)) + .exists(identity) } }, errorIfAlreadyPublished := { if (checkAlreadyPublished.value) { sys.error( s"Some ${version.value} artifacts were already published and will need to be manually deleted. " + - "See the errors above for the list of published artifacts." + "See the errors above for the list of published artifacts." ) } } diff --git a/project/Testing.scala b/project/Testing.scala index 54ddc458353..6997e31960c 100644 --- a/project/Testing.scala +++ b/project/Testing.scala @@ -62,7 +62,7 @@ object Testing { spanScaleFactor, "-W", "300", - "300", + "300" ) /** Run minnie-kenny only once per sbt invocation. */ @@ -71,7 +71,7 @@ object Testing { private var resultOption: Option[Int] = None /** Run using the logger, throwing an exception only on the first failure. */ - def runOnce(log: Logger, args: Seq[String]): Unit = { + def runOnce(log: Logger, args: Seq[String]): Unit = mutex synchronized { if (resultOption.isEmpty) { log.debug(s"Running minnie-kenny.sh${args.mkString(" ", " ", "")}") @@ -83,7 +83,6 @@ object Testing { sys.error("Running minnie-kenny.sh failed. Please double check for errors above.") } } - } } // Only run one minnie-kenny.sh at a time! @@ -116,24 +115,24 @@ object Testing { Test / test := { minnieKenny.toTask("").value (Test / test).value - }, + } ) private val integrationTestSettings = List( libraryDependencies ++= testDependencies.map(_ % IntegrationTest) ) ++ itSettings - def addTestSettings(project: Project) = { + def addTestSettings(project: Project) = project .settings(testSettings) - .configs(AllTests).settings(inConfig(AllTests)(Defaults.testTasks): _*) - .configs(CromwellBenchmarkTest).settings(inConfig(CromwellBenchmarkTest)(Defaults.testTasks): _*) - } + .configs(AllTests) + .settings(inConfig(AllTests)(Defaults.testTasks): _*) + .configs(CromwellBenchmarkTest) + .settings(inConfig(CromwellBenchmarkTest)(Defaults.testTasks): _*) - def addIntegrationTestSettings(project: Project) = { + def addIntegrationTestSettings(project: Project) = project .settings(integrationTestSettings) .configs(IntegrationTest) - } } diff --git a/project/Version.scala b/project/Version.scala index 4de639d9d41..e530e54e62e 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -33,10 +33,10 @@ object Version { ThisBuild / git.versionProperty := "project.version", ThisBuild / git.baseVersion := cromwellVersion, ThisBuild / version := - makeVersion( - versionProperty = git.versionProperty.value, - baseVersion = git.baseVersion.?.value, - headCommit = git.gitHeadCommit.value), + makeVersion(versionProperty = git.versionProperty.value, + baseVersion = git.baseVersion.?.value, + headCommit = git.gitHeadCommit.value + ), ThisBuild / shellPrompt := { state => "%s| %s> ".format(GitCommand.prompt.apply(state), cromwellVersion) } ) @@ -74,9 +74,7 @@ object Version { List(file) } - private def makeVersion(versionProperty: String, - baseVersion: Option[String], - headCommit: Option[String]): String = { + private def makeVersion(versionProperty: String, baseVersion: Option[String], headCommit: Option[String]): String = { // The version string passed in via command line settings, if desired. def overrideVersion = Option(sys.props(versionProperty)) @@ -88,7 +86,7 @@ object Version { // Version string fallback. val unknownVersion = basePrefix + "unknown" - //Now we fall through the potential version numbers... + // Now we fall through the potential version numbers... val version = overrideVersion orElse commitVersion getOrElse unknownVersion // For now, obfuscate SNAPSHOTs from sbt's developers: https://github.com/sbt/sbt/issues/2687#issuecomment-236586241 diff --git a/server/src/main/scala/cromwell/CommandLineArguments.scala b/server/src/main/scala/cromwell/CommandLineArguments.scala index 986a33a4dc2..7cebb1b01ba 100644 --- a/server/src/main/scala/cromwell/CommandLineArguments.scala +++ b/server/src/main/scala/cromwell/CommandLineArguments.scala @@ -25,7 +25,8 @@ object CommandLineArguments { worflowInputs: String, workflowOptions: WorkflowOptions, workflowLabels: String, - dependencies: Option[File]) + dependencies: Option[File] + ) case class WorkflowSourceOrUrl(source: Option[String], url: Option[String]) } @@ -41,52 +42,52 @@ case class CommandLineArguments(command: Option[Command] = None, imports: Option[Path] = None, metadataOutput: Option[Path] = None, host: URL = CommandLineArguments.DefaultCromwellHost - ) { +) { def validateSubmission(logger: Logger): ErrorOr[ValidSubmission] = { - def getWorkflowSourceFromPath(workflowPath: Path): ErrorOr[WorkflowSourceOrUrl] = { + def getWorkflowSourceFromPath(workflowPath: Path): ErrorOr[WorkflowSourceOrUrl] = WorkflowSourceOrUrl(None, Option(workflowPath.pathAsString)).validNel - } val workflowSourceAndUrl: ErrorOr[WorkflowSourceOrUrl] = DefaultPathBuilder.build(workflowSource.get) match { - case Success(workflowPath) => { + case Success(workflowPath) => if (!workflowPath.exists) s"Workflow source path does not exist: $workflowPath".invalidNel - else if(!workflowPath.isReadable) s"Workflow source path is not readable: $workflowPath".invalidNel + else if (!workflowPath.isReadable) s"Workflow source path is not readable: $workflowPath".invalidNel else getWorkflowSourceFromPath(workflowPath) - } case Failure(e: InvalidPathException) => s"Invalid file path. Error: ${e.getMessage}".invalidNel - case Failure(_) => PartialWorkflowSources.validateWorkflowUrl(workflowSource.get).map(validUrl => WorkflowSourceOrUrl(None, Option(validUrl))) + case Failure(_) => + PartialWorkflowSources + .validateWorkflowUrl(workflowSource.get) + .map(validUrl => WorkflowSourceOrUrl(None, Option(validUrl))) } val inputsJson: ErrorOr[String] = readOptionContent("Workflow inputs", workflowInputs) import common.validation.ErrorOr.ShortCircuitingFlatMap - val optionsJson = readOptionContent("Workflow options", workflowOptions).flatMap { WorkflowOptions.fromJsonString(_).toErrorOr } + val optionsJson = readOptionContent("Workflow options", workflowOptions).flatMap { + WorkflowOptions.fromJsonString(_).toErrorOr + } val labelsJson = readOptionContent("Workflow labels", workflowLabels) val workflowImports: Option[File] = imports.map(p => File(p.pathAsString)) - (workflowSourceAndUrl, inputsJson, optionsJson, labelsJson) mapN { - case (srcOrUrl, i, o, l) => - ValidSubmission(srcOrUrl.source, srcOrUrl.url, workflowRoot, i, o, l, workflowImports) + (workflowSourceAndUrl, inputsJson, optionsJson, labelsJson) mapN { case (srcOrUrl, i, o, l) => + ValidSubmission(srcOrUrl.source, srcOrUrl.url, workflowRoot, i, o, l, workflowImports) } } /** Read the path to a string. */ - private def readContent(inputDescription: String, path: Path): ErrorOr[String] = { + private def readContent(inputDescription: String, path: Path): ErrorOr[String] = if (!path.exists) { s"$inputDescription does not exist: $path".invalidNel } else if (!path.isReadable) { s"$inputDescription is not readable: $path".invalidNel } else path.contentAsString.validNel - } /** Read the path to a string, unless the path is None, in which case returns "{}". */ - private def readOptionContent(inputDescription: String, pathOption: Option[Path]): ErrorOr[String] = { + private def readOptionContent(inputDescription: String, pathOption: Option[Path]): ErrorOr[String] = pathOption match { case Some(path) => readContent(inputDescription, path) case None => "{}".validNel } - } } diff --git a/server/src/main/scala/cromwell/CommandLineParser.scala b/server/src/main/scala/cromwell/CommandLineParser.scala index e8b30d5e008..60127905f6f 100644 --- a/server/src/main/scala/cromwell/CommandLineParser.scala +++ b/server/src/main/scala/cromwell/CommandLineParser.scala @@ -33,33 +33,29 @@ object CommandLineParser { // An optional JSON file path to output metadata. // -h, --host Cromwell server URL class CommandLineParser extends scopt.OptionParser[CommandLineArguments]("java -jar /path/to/cromwell.jar") { - + private def commonSubmissionArguments = List( - arg[String]("workflow-source").text("Workflow source file or workflow url.").required(). - action((s, c) => - c.copy(workflowSource = Option(s))), - opt[String]("workflow-root").text("Workflow root."). - action((s, c) => - c.copy(workflowRoot = Option(s))), - opt[String]('i', "inputs").text("Workflow inputs file."). - action((s, c) => - c.copy(workflowInputs = Option(DefaultPathBuilder.get(s)))), - opt[String]('o', "options").text("Workflow options file."). - action((s, c) => - c.copy(workflowOptions = Option(DefaultPathBuilder.get(s)))), - opt[String]('t', "type").text("Workflow type."). - action((s, c) => - c.copy(workflowType = Option(s))), - opt[String]('v', "type-version").text("Workflow type version."). - action((s, c) => - c.copy(workflowTypeVersion = Option(s))), - opt[String]('l', "labels").text("Workflow labels file."). - action((s, c) => - c.copy(workflowLabels = Option(DefaultPathBuilder.get(s)))), - opt[String]('p', "imports").text( - "A zip file to search for workflow imports."). - action((s, c) => - c.copy(imports = Option(DefaultPathBuilder.get(s)))) + arg[String]("workflow-source") + .text("Workflow source file or workflow url.") + .required() + .action((s, c) => c.copy(workflowSource = Option(s))), + opt[String]("workflow-root").text("Workflow root.").action((s, c) => c.copy(workflowRoot = Option(s))), + opt[String]('i', "inputs") + .text("Workflow inputs file.") + .action((s, c) => c.copy(workflowInputs = Option(DefaultPathBuilder.get(s)))), + opt[String]('o', "options") + .text("Workflow options file.") + .action((s, c) => c.copy(workflowOptions = Option(DefaultPathBuilder.get(s)))), + opt[String]('t', "type").text("Workflow type.").action((s, c) => c.copy(workflowType = Option(s))), + opt[String]('v', "type-version") + .text("Workflow type version.") + .action((s, c) => c.copy(workflowTypeVersion = Option(s))), + opt[String]('l', "labels") + .text("Workflow labels file.") + .action((s, c) => c.copy(workflowLabels = Option(DefaultPathBuilder.get(s)))), + opt[String]('p', "imports") + .text("A zip file to search for workflow imports.") + .action((s, c) => c.copy(imports = Option(DefaultPathBuilder.get(s)))) ) head("cromwell", cromwellVersion) @@ -68,29 +64,29 @@ class CommandLineParser extends scopt.OptionParser[CommandLineArguments]("java - version("version") - cmd("server").action((_, c) => c.copy(command = Option(Server))).text( - "Starts a web server on port 8000. See the web server documentation for more details about the API endpoints.") + cmd("server") + .action((_, c) => c.copy(command = Option(Server))) + .text( + "Starts a web server on port 8000. See the web server documentation for more details about the API endpoints." + ) - cmd("run"). - action((_, c) => c.copy(command = Option(Run))). - text("Run the workflow and print out the outputs in JSON format."). - children( + cmd("run") + .action((_, c) => c.copy(command = Option(Run))) + .text("Run the workflow and print out the outputs in JSON format.") + .children( commonSubmissionArguments ++ List( - opt[String]('m', "metadata-output").text( - "An optional JSON file path to output metadata."). - action((s, c) => - c.copy(metadataOutput = Option(DefaultPathBuilder.get(s)))) + opt[String]('m', "metadata-output") + .text("An optional JSON file path to output metadata.") + .action((s, c) => c.copy(metadataOutput = Option(DefaultPathBuilder.get(s)))) ): _* ) cmd("submit") - .action((_, c) => c.copy(command = Option(Submit))). - text("Submit the workflow to a Cromwell server."). - children( + .action((_, c) => c.copy(command = Option(Submit))) + .text("Submit the workflow to a Cromwell server.") + .children( commonSubmissionArguments ++ List( - opt[String]('h', "host").text("Cromwell server URL."). - action((h, c) => - c.copy(host = new URL(h))) + opt[String]('h', "host").text("Cromwell server URL.").action((h, c) => c.copy(host = new URL(h))) ): _* ) } diff --git a/server/src/main/scala/cromwell/CromwellApp.scala b/server/src/main/scala/cromwell/CromwellApp.scala index 9c814c4aeb9..6b843069493 100644 --- a/server/src/main/scala/cromwell/CromwellApp.scala +++ b/server/src/main/scala/cromwell/CromwellApp.scala @@ -6,17 +6,16 @@ object CromwellApp extends App { case object Run extends Command case object Server extends Command case object Submit extends Command - + def buildParser(): scopt.OptionParser[CommandLineArguments] = new CommandLineParser() - def runCromwell(args: CommandLineArguments): Unit = { + def runCromwell(args: CommandLineArguments): Unit = args.command match { case Some(Run) => CromwellEntryPoint.runSingle(args) case Some(Server) => CromwellEntryPoint.runServer() case Some(Submit) => CromwellEntryPoint.submitToServer(args) case None => showUsageAndExitWithError() } - } val parser = buildParser() diff --git a/server/src/main/scala/cromwell/CromwellEntryPoint.scala b/server/src/main/scala/cromwell/CromwellEntryPoint.scala index d712ea71b28..cd3c3dd0458 100644 --- a/server/src/main/scala/cromwell/CromwellEntryPoint.scala +++ b/server/src/main/scala/cromwell/CromwellEntryPoint.scala @@ -18,7 +18,11 @@ import cromwell.api.CromwellClient import cromwell.api.model.{Label, LabelsJsonFormatter, WorkflowSingleSubmission} import cromwell.core.logging.JavaLoggingBridge import cromwell.core.path.{DefaultPathBuilder, Path} -import cromwell.core.{WorkflowSourceFilesCollection, WorkflowSourceFilesWithDependenciesZip, WorkflowSourceFilesWithoutImports} +import cromwell.core.{ + WorkflowSourceFilesCollection, + WorkflowSourceFilesWithDependenciesZip, + WorkflowSourceFilesWithoutImports +} import cromwell.engine.workflow.SingleWorkflowRunnerActor import cromwell.engine.workflow.SingleWorkflowRunnerActor.RunWorkflow import cromwell.server.{CromwellServer, CromwellShutdown, CromwellSystem} @@ -78,7 +82,9 @@ object CromwellEntryPoint extends GracefulStopSupport { val runner = cromwellSystem.actorSystem.actorOf(runnerProps, "SingleWorkflowRunnerActor") import cromwell.util.PromiseActor.EnhancedActorRef - waitAndExit(() => runner.askNoTimeout(RunWorkflow), () => CromwellShutdown.instance(cromwellSystem.actorSystem).run(JvmExitReason)) + waitAndExit(() => runner.askNoTimeout(RunWorkflow), + () => CromwellShutdown.instance(cromwellSystem.actorSystem).run(JvmExitReason) + ) } def submitToServer(args: CommandLineArguments): Unit = { @@ -123,11 +129,10 @@ object CromwellEntryPoint extends GracefulStopSupport { new CromwellSystem { override lazy val config: Config = CromwellEntryPoint.config } - } recoverWith { - case t: Throwable => - Log.error(s"Failed to instantiate Cromwell System. Shutting down Cromwell.", t) - System.exit(1) - Failure(t) + } recoverWith { case t: Throwable => + Log.error(s"Failed to instantiate Cromwell System. Shutting down Cromwell.", t) + System.exit(1) + Failure(t) } get } @@ -184,11 +189,12 @@ object CromwellEntryPoint extends GracefulStopSupport { val futureResult = operation() Await.ready(futureResult, Duration.Inf) - try { + try Await.ready(shutdown(), 30.seconds) - } catch { + catch { case _: TimeoutException => Console.err.println("Timed out trying to shutdown actor system") - case other: Exception => Console.err.println(s"Unexpected error trying to shutdown actor system: ${other.getMessage}") + case other: Exception => + Console.err.println(s"Unexpected error trying to shutdown actor system: ${other.getMessage}") } val returnCode = futureResult.value.get match { @@ -201,34 +207,33 @@ object CromwellEntryPoint extends GracefulStopSupport { sys.exit(returnCode) } - private def waitAndExit(runner: CromwellSystem => Future[Any], workflowManagerSystem: CromwellSystem): Unit = { + private def waitAndExit(runner: CromwellSystem => Future[Any], workflowManagerSystem: CromwellSystem): Unit = waitAndExit(() => runner(workflowManagerSystem), () => workflowManagerSystem.shutdownActorSystem()) - } def validateSubmitArguments(args: CommandLineArguments): WorkflowSingleSubmission = { import LabelsJsonFormatter._ import spray.json._ - val validation = args.validateSubmission(EntryPointLogger) map { - case ValidSubmission(s, u, r, i, o, l, z) => - val finalWorkflowSourceAndUrl: WorkflowSourceOrUrl = - (s, u) match { - case (None, Some(url)) if !url.startsWith("http") => //case where url is a WDL/CWL file - WorkflowSourceOrUrl(Option(DefaultPathBuilder.get(url).contentAsString), None) - case _ => - WorkflowSourceOrUrl(s, u) - } - - WorkflowSingleSubmission( - workflowSource = finalWorkflowSourceAndUrl.source, - workflowUrl = finalWorkflowSourceAndUrl.url, - workflowRoot = r, - workflowType = args.workflowType, - workflowTypeVersion = args.workflowTypeVersion, - inputsJson = Option(i), - options = Option(o.asPrettyJson), - labels = Option(l.parseJson.convertTo[List[Label]]), - zippedImports = z) + val validation = args.validateSubmission(EntryPointLogger) map { case ValidSubmission(s, u, r, i, o, l, z) => + val finalWorkflowSourceAndUrl: WorkflowSourceOrUrl = + (s, u) match { + case (None, Some(url)) if !url.startsWith("http") => // case where url is a WDL/CWL file + WorkflowSourceOrUrl(Option(DefaultPathBuilder.get(url).contentAsString), None) + case _ => + WorkflowSourceOrUrl(s, u) + } + + WorkflowSingleSubmission( + workflowSource = finalWorkflowSourceAndUrl.source, + workflowUrl = finalWorkflowSourceAndUrl.url, + workflowRoot = r, + workflowType = args.workflowType, + workflowTypeVersion = args.workflowTypeVersion, + inputsJson = Option(i), + options = Option(o.asPrettyJson), + labels = Option(l.parseJson.convertTo[List[Label]]), + zippedImports = z + ) } validOrFailSubmission(validation) @@ -236,37 +241,40 @@ object CromwellEntryPoint extends GracefulStopSupport { def validateRunArguments(args: CommandLineArguments): WorkflowSourceFilesCollection = { - val sourceFileCollection = (args.validateSubmission(EntryPointLogger), writeableMetadataPath(args.metadataOutput)) mapN { - case (ValidSubmission(s, u, r, i, o, l, Some(z)), _) => - //noinspection RedundantDefaultArgument - WorkflowSourceFilesWithDependenciesZip.apply( - workflowSource = s, - workflowUrl = u, - workflowRoot = r, - workflowType = args.workflowType, - workflowTypeVersion = args.workflowTypeVersion, - inputsJson = i, - workflowOptions = o, - labelsJson = l, - importsZip = z.loadBytes, - warnings = Vector.empty, - workflowOnHold = false, - requestedWorkflowId = None) - case (ValidSubmission(s, u, r, i, o, l, None), _) => - //noinspection RedundantDefaultArgument - WorkflowSourceFilesWithoutImports.apply( - workflowSource = s, - workflowUrl = u, - workflowRoot = r, - workflowType = args.workflowType, - workflowTypeVersion = args.workflowTypeVersion, - inputsJson = i, - workflowOptions = o, - labelsJson = l, - warnings = Vector.empty, - workflowOnHold = false, - requestedWorkflowId = None) - } + val sourceFileCollection = + (args.validateSubmission(EntryPointLogger), writeableMetadataPath(args.metadataOutput)) mapN { + case (ValidSubmission(s, u, r, i, o, l, Some(z)), _) => + // noinspection RedundantDefaultArgument + WorkflowSourceFilesWithDependenciesZip.apply( + workflowSource = s, + workflowUrl = u, + workflowRoot = r, + workflowType = args.workflowType, + workflowTypeVersion = args.workflowTypeVersion, + inputsJson = i, + workflowOptions = o, + labelsJson = l, + importsZip = z.loadBytes, + warnings = Vector.empty, + workflowOnHold = false, + requestedWorkflowId = None + ) + case (ValidSubmission(s, u, r, i, o, l, None), _) => + // noinspection RedundantDefaultArgument + WorkflowSourceFilesWithoutImports.apply( + workflowSource = s, + workflowUrl = u, + workflowRoot = r, + workflowType = args.workflowType, + workflowTypeVersion = args.workflowTypeVersion, + inputsJson = i, + workflowOptions = o, + labelsJson = l, + warnings = Vector.empty, + workflowOnHold = false, + requestedWorkflowId = None + ) + } val sourceFiles = for { sources <- sourceFileCollection @@ -276,19 +284,19 @@ object CromwellEntryPoint extends GracefulStopSupport { validOrFailSubmission(sourceFiles) } - def validOrFailSubmission[A](validation: ErrorOr[A]): A = { - validation.valueOr(errors => throw new RuntimeException with MessageAggregation { - override def exceptionContext: String = "ERROR: Unable to submit workflow to Cromwell:" - override def errorMessages: Iterable[String] = errors.toList - }) - } + def validOrFailSubmission[A](validation: ErrorOr[A]): A = + validation.valueOr(errors => + throw new RuntimeException with MessageAggregation { + override def exceptionContext: String = "ERROR: Unable to submit workflow to Cromwell:" + override def errorMessages: Iterable[String] = errors.toList + } + ) - private def writeableMetadataPath(path: Option[Path]): ErrorOr[Unit] = { + private def writeableMetadataPath(path: Option[Path]): ErrorOr[Unit] = path match { case Some(p) if !metadataPathIsWriteable(p) => s"Unable to write to metadata directory: $p".invalidNel case _ => ().validNel } - } private def metadataPathIsWriteable(metadataPath: Path): Boolean = Try(metadataPath.createIfNotExists(createParents = true).append("")).isSuccess diff --git a/server/src/test/scala/cromwell/CromwellCommandLineSpec.scala b/server/src/test/scala/cromwell/CromwellCommandLineSpec.scala index 41b1edccb36..00ff1f7cdc4 100644 --- a/server/src/test/scala/cromwell/CromwellCommandLineSpec.scala +++ b/server/src/test/scala/cromwell/CromwellCommandLineSpec.scala @@ -51,12 +51,14 @@ class CromwellCommandLineSpec extends AnyFlatSpec with CromwellTimeoutSpec with it should "run single when supplying wdl and inputs" in { val threeStep = WdlAndInputs(ThreeStep) - val optionsLast = parser.parse(Array("run", threeStep.wdl, "--inputs", threeStep.inputs), CommandLineArguments()).get + val optionsLast = + parser.parse(Array("run", threeStep.wdl, "--inputs", threeStep.inputs), CommandLineArguments()).get optionsLast.command shouldBe Some(Run) optionsLast.workflowSource.get shouldBe threeStep.wdl optionsLast.workflowInputs.get.pathAsString shouldBe threeStep.inputs - val optionsFirst = parser.parse(Array("run", "--inputs", threeStep.inputs, threeStep.wdl), CommandLineArguments()).get + val optionsFirst = + parser.parse(Array("run", "--inputs", threeStep.inputs, threeStep.wdl), CommandLineArguments()).get optionsFirst.command shouldBe Some(Run) optionsFirst.workflowSource.get shouldBe threeStep.wdl optionsFirst.workflowInputs.get.pathAsString shouldBe threeStep.inputs @@ -96,13 +98,21 @@ class CromwellCommandLineSpec extends AnyFlatSpec with CromwellTimeoutSpec with } it should "run single when supplying wdl and inputs and options" in { - val optionsLast = parser.parse(Array("run", "3step.wdl", "--inputs", "3step.inputs", "--options", "3step.options"), CommandLineArguments()).get + val optionsLast = parser + .parse(Array("run", "3step.wdl", "--inputs", "3step.inputs", "--options", "3step.options"), + CommandLineArguments() + ) + .get optionsLast.command shouldBe Some(Run) optionsLast.workflowSource.get shouldBe "3step.wdl" optionsLast.workflowInputs.get.pathAsString shouldBe "3step.inputs" optionsLast.workflowOptions.get.pathAsString shouldBe "3step.options" - val optionsFirst = parser.parse(Array("run", "--inputs", "3step.inputs", "--options", "3step.options", "3step.wdl"), CommandLineArguments()).get + val optionsFirst = parser + .parse(Array("run", "--inputs", "3step.inputs", "--options", "3step.options", "3step.wdl"), + CommandLineArguments() + ) + .get optionsFirst.command shouldBe Some(Run) optionsFirst.workflowSource.get shouldBe "3step.wdl" optionsFirst.workflowInputs.get.pathAsString shouldBe "3step.inputs" @@ -118,16 +128,29 @@ class CromwellCommandLineSpec extends AnyFlatSpec with CromwellTimeoutSpec with } it should "fail if workflow url length is more than 2000 characters" in { - val veryLongUrl = "https://this_url_has_more_than_2000_characters/why_would_someone_have_such_long_urls_one_would_ask/beats_me/now_starts_lorem_ipsum/At_vero_eos_et_accusamus_et_iusto_odio_dignissimos_ducimus_qui_blanditiis_praesentium_voluptatum_deleniti_atque_corrupti_quos_dolores_et_quas_molestias_excepturi_sint_occaecati_cupiditate_non_provident,_similique_sunt_in_culpa_qui_officia_deserunt_mollitia_animi,_id_est_laborum_et_dolorum_fuga._Et_harum_quidem_rerum_facilis_est_et_expedita_distinctio._Nam_libero_tempore,_cum_soluta_nobis_est_eligendi_optio_cumque_nihil_impedit_quo_minus_id_quod_maxime_placeat_facere_possimus,_omnis_voluptas_assumenda_est,_omnis_dolor_repellendus._Temporibus_autem_quibusdam_et_aut_officiis_debitis_aut_rerum_necessitatibus_saepe_eveniet_ut_et_voluptates_repudiandae_sint_et_molestiae_non_recusandae._Itaque_earum_rerum_hic_tenetur_a_sapiente_delectus,_ut_aut_reiciendis_voluptatibus_maiores_alias_consequatur_aut_perferendis_doloribus_asperiores_repellat/Sed_ut_perspiciatis_unde_omnis_iste_natus_error_sit_voluptatem_accusantium_doloremque_laudantium,_totam_rem_aperiam,_eaque_ipsa_quae_ab_illo_inventore_veritatis_et_quasi_architecto_beatae_vitae_dicta_sunt_explicabo._Nemo_enim_ipsam_voluptatem_quia_voluptas_sit_aspernatur_aut_odit_aut_fugit,_sed_quia_consequuntur_magni_dolores_eos_qui_ratione_voluptatem_sequi_nesciunt._Neque_porro_quisquam_est,_qui_dolorem_ipsum_quia_dolor_sit_amet,_consectetur,_adipisci_velit,_sed_quia_non_numquam_eius_modi_tempora_incidunt_ut_labore_et_dolore_magnam_aliquam_quaerat_voluptatem._Ut_enim_ad_minima_veniam,_quis_nostrum_exercitationem_ullam_corporis_suscipit_laboriosam,_nisi_ut_aliquid_ex_ea_commodi_consequatur?_Quis_autem_vel_eum_iure_reprehenderit_qui_in_ea_voluptate_velit_esse_quam_nihil_molestiae_consequatur,_vel_illum_qui_dolorem_eum_fugiat_quo_voluptas_nulla_pariatur?/Lorem_ipsum_dolor_sit_amet,_consectetur_adipiscing_elit,_sed_do_eiusmod_tempor_incididunt_ut_labore_et_dolore_magna_aliqua._Ut_enim_ad_minim_veniam,_quis_nostrud_exercitation_ullamco_laboris_nisi_ut_aliquip_ex_ea_commodo_consequat._Duis_aute_irure_dolor_in_reprehenderit_in_voluptate_velit_esse_cillum_dolore_eu_fugiat_nulla_pariatur._Excepteur_sint_occaecat_cupidatat_non_proident,_sunt_in_culpa_qui_officia_deserunt_mollit_anim_id_est_laborum/hello.wdl/hello.wdl" + val veryLongUrl = + "https://this_url_has_more_than_2000_characters/why_would_someone_have_such_long_urls_one_would_ask/beats_me/now_starts_lorem_ipsum/At_vero_eos_et_accusamus_et_iusto_odio_dignissimos_ducimus_qui_blanditiis_praesentium_voluptatum_deleniti_atque_corrupti_quos_dolores_et_quas_molestias_excepturi_sint_occaecati_cupiditate_non_provident,_similique_sunt_in_culpa_qui_officia_deserunt_mollitia_animi,_id_est_laborum_et_dolorum_fuga._Et_harum_quidem_rerum_facilis_est_et_expedita_distinctio._Nam_libero_tempore,_cum_soluta_nobis_est_eligendi_optio_cumque_nihil_impedit_quo_minus_id_quod_maxime_placeat_facere_possimus,_omnis_voluptas_assumenda_est,_omnis_dolor_repellendus._Temporibus_autem_quibusdam_et_aut_officiis_debitis_aut_rerum_necessitatibus_saepe_eveniet_ut_et_voluptates_repudiandae_sint_et_molestiae_non_recusandae._Itaque_earum_rerum_hic_tenetur_a_sapiente_delectus,_ut_aut_reiciendis_voluptatibus_maiores_alias_consequatur_aut_perferendis_doloribus_asperiores_repellat/Sed_ut_perspiciatis_unde_omnis_iste_natus_error_sit_voluptatem_accusantium_doloremque_laudantium,_totam_rem_aperiam,_eaque_ipsa_quae_ab_illo_inventore_veritatis_et_quasi_architecto_beatae_vitae_dicta_sunt_explicabo._Nemo_enim_ipsam_voluptatem_quia_voluptas_sit_aspernatur_aut_odit_aut_fugit,_sed_quia_consequuntur_magni_dolores_eos_qui_ratione_voluptatem_sequi_nesciunt._Neque_porro_quisquam_est,_qui_dolorem_ipsum_quia_dolor_sit_amet,_consectetur,_adipisci_velit,_sed_quia_non_numquam_eius_modi_tempora_incidunt_ut_labore_et_dolore_magnam_aliquam_quaerat_voluptatem._Ut_enim_ad_minima_veniam,_quis_nostrum_exercitationem_ullam_corporis_suscipit_laboriosam,_nisi_ut_aliquid_ex_ea_commodi_consequatur?_Quis_autem_vel_eum_iure_reprehenderit_qui_in_ea_voluptate_velit_esse_quam_nihil_molestiae_consequatur,_vel_illum_qui_dolorem_eum_fugiat_quo_voluptas_nulla_pariatur?/Lorem_ipsum_dolor_sit_amet,_consectetur_adipiscing_elit,_sed_do_eiusmod_tempor_incididunt_ut_labore_et_dolore_magna_aliqua._Ut_enim_ad_minim_veniam,_quis_nostrud_exercitation_ullamco_laboris_nisi_ut_aliquip_ex_ea_commodo_consequat._Duis_aute_irure_dolor_in_reprehenderit_in_voluptate_velit_esse_cillum_dolore_eu_fugiat_nulla_pariatur._Excepteur_sint_occaecat_cupidatat_non_proident,_sunt_in_culpa_qui_officia_deserunt_mollit_anim_id_est_laborum/hello.wdl/hello.wdl" val command = parser.parse(Array("run", veryLongUrl), CommandLineArguments()).get val validation = Try(CromwellEntryPoint.validateRunArguments(command)) validation.isFailure shouldBe true - validation.failed.get.getMessage should include("Invalid workflow url: url has length 2305, longer than the maximum allowed 2000 characters") + validation.failed.get.getMessage should include( + "Invalid workflow url: url has length 2305, longer than the maximum allowed 2000 characters" + ) } it should "fail if input files do not exist" in { - val parsedArgs = parser.parse(Array("run", "xyzshouldnotexist.wdl", "--inputs", "xyzshouldnotexist.inputs", "--options", "xyzshouldnotexist.options"), CommandLineArguments()).get + val parsedArgs = parser + .parse(Array("run", + "xyzshouldnotexist.wdl", + "--inputs", + "xyzshouldnotexist.inputs", + "--options", + "xyzshouldnotexist.options" + ), + CommandLineArguments() + ) + .get val validation = Try(CromwellEntryPoint.validateRunArguments(parsedArgs)) validation.isFailure shouldBe true @@ -147,7 +170,11 @@ class CromwellCommandLineSpec extends AnyFlatSpec with CromwellTimeoutSpec with it should "fail if metadata output path is not writeable" in { val threeStep = WdlAndInputs(ThreeStep) - val parsedArgs = parser.parse(Array("run", threeStep.wdl, "--inputs", threeStep.inputs, "--metadata-output", threeStep.metadata), CommandLineArguments()).get + val parsedArgs = parser + .parse(Array("run", threeStep.wdl, "--inputs", threeStep.inputs, "--metadata-output", threeStep.metadata), + CommandLineArguments() + ) + .get threeStep.metadataFile write "foo" threeStep.metadataFile setPermissions Set.empty val ccl = Try(CromwellEntryPoint.validateRunArguments(parsedArgs)) @@ -166,7 +193,8 @@ class CromwellCommandLineSpec extends AnyFlatSpec with CromwellTimeoutSpec with val zippedDir = wdlDir.zip() val zippedPath = zippedDir.pathAsString - val parsedArgs = parser.parse(Array("run", filePassing.pathAsString, "--imports", zippedPath), CommandLineArguments()).get + val parsedArgs = + parser.parse(Array("run", filePassing.pathAsString, "--imports", zippedPath), CommandLineArguments()).get val ccl = Try(CromwellEntryPoint.validateRunArguments(parsedArgs)) ccl.isFailure shouldBe false diff --git a/server/src/test/scala/cromwell/CromwellTestKitSpec.scala b/server/src/test/scala/cromwell/CromwellTestKitSpec.scala index 319b417e861..09688408552 100644 --- a/server/src/test/scala/cromwell/CromwellTestKitSpec.scala +++ b/server/src/test/scala/cromwell/CromwellTestKitSpec.scala @@ -9,21 +9,29 @@ import cromwell.CromwellTestKitSpec._ import cromwell.core._ import cromwell.core.path.BetterFileMethods.Cmds import cromwell.core.path.DefaultPathBuilder -import cromwell.docker.DockerInfoActor.{DockerInfoSuccessResponse, DockerInformation} +import cromwell.docker.DockerInfoActor.{DockerInformation, DockerInfoSuccessResponse} import cromwell.docker.{DockerHashResult, DockerInfoRequest} import cromwell.engine.MockCromwellTerminator import cromwell.engine.backend.{BackendConfiguration, CromwellBackends} import cromwell.engine.workflow.WorkflowManagerActor.RetrieveNewWorkflows -import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadActor.{CacheLookupNoHit, CacheLookupRequest} +import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadActor.{ + CacheLookupNoHit, + CacheLookupRequest +} import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheWriteActor.SaveCallCacheHashes import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheWriteSuccess import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.WorkflowSubmittedToStore import cromwell.engine.workflow.workflowstore.{InMemorySubWorkflowStore, InMemoryWorkflowStore, WorkflowStoreActor} -import cromwell.jobstore.JobStoreActor.{JobStoreWriteSuccess, JobStoreWriterCommand} +import cromwell.jobstore.JobStoreActor.{JobStoreWriterCommand, JobStoreWriteSuccess} import cromwell.languages.config.{CromwellLanguages, LanguageConfiguration} import cromwell.server.{CromwellRootActor, CromwellSystem} import cromwell.services.metadata.MetadataService._ -import cromwell.services.{FailedMetadataJsonResponse, MetadataJsonResponse, ServiceRegistryActor, SuccessfulMetadataJsonResponse} +import cromwell.services.{ + FailedMetadataJsonResponse, + MetadataJsonResponse, + ServiceRegistryActor, + SuccessfulMetadataJsonResponse +} import cromwell.subworkflowstore.EmptySubWorkflowStoreActor import cromwell.util.SampleWdl import org.scalactic.Equality @@ -43,7 +51,8 @@ import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} import scala.util.matching.Regex -case class OutputNotFoundException(outputFqn: String, actualOutputs: String) extends RuntimeException(s"Expected output $outputFqn was not found in: '$actualOutputs'") +case class OutputNotFoundException(outputFqn: String, actualOutputs: String) + extends RuntimeException(s"Expected output $outputFqn was not found in: '$actualOutputs'") case class LogNotFoundException(log: String) extends RuntimeException(s"Expected log $log was not found") object CromwellTestKitSpec { @@ -108,33 +117,32 @@ object CromwellTestKitSpec { class TestWorkflowManagerSystem(testActorSystem: ActorSystem, override val config: Config) extends CromwellSystem { override protected def systemName: String = testActorSystem.name override protected def newActorSystem(): ActorSystem = testActorSystem + /** * Do NOT shut down the test actor system inside the normal flow. * The actor system will be externally shutdown outside the block. */ // -Ywarn-value-discard - override def shutdownActorSystem(): Future[Terminated] = { Future.successful(null) } + override def shutdownActorSystem(): Future[Terminated] = Future.successful(null) } /** * Wait for exactly one occurrence of the specified info pattern in the specified block. The block is in its own * parameter list for usage syntax reasons. */ - def waitForInfo[T](pattern: String, occurrences: Int = 1)(block: => T)(implicit system: ActorSystem): T = { + def waitForInfo[T](pattern: String, occurrences: Int = 1)(block: => T)(implicit system: ActorSystem): T = EventFilter.info(pattern = pattern, occurrences = occurrences).intercept { block } - } /** * Wait for occurrence(s) of the specified warning pattern in the specified block. The block is in its own parameter * list for usage syntax reasons. */ - def waitForWarning[T](pattern: String, occurrences: Int = 1)(block: => T)(implicit system: ActorSystem): T = { + def waitForWarning[T](pattern: String, occurrences: Int = 1)(block: => T)(implicit system: ActorSystem): T = EventFilter.warning(pattern = pattern, occurrences = occurrences).intercept { block } - } /** * Akka TestKit appears to be unable to match errors generated by `log.error(Throwable, String)` with the normal @@ -144,40 +152,40 @@ object CromwellTestKitSpec { */ def waitForErrorWithException[T](pattern: String, throwableClass: Class[_ <: Throwable] = classOf[Throwable], - occurrences: Int = 1) - (block: => T) - (implicit system: ActorSystem): T = { + occurrences: Int = 1 + )(block: => T)(implicit system: ActorSystem): T = { val regex = Right[String, Regex](pattern.r) ErrorFilter(throwableClass, source = None, message = regex, complete = false)(occurrences = occurrences).intercept { block } } - /** * Special case for validating outputs. Used when the test wants to check that an output exists, but doesn't care what * the actual value was. */ - lazy val AnyValueIsFine: WomValue = WomString("Today you are you! That is truer than true! There is no one alive who is you-er than you!") + lazy val AnyValueIsFine: WomValue = WomString( + "Today you are you! That is truer than true! There is no one alive who is you-er than you!" + ) - def replaceVariables(womValue: WomValue, workflowId: WorkflowId): WomValue = { + def replaceVariables(womValue: WomValue, workflowId: WorkflowId): WomValue = womValue match { case WomString(value) => WomString(replaceVariables(value, workflowId)) case _ => womValue } - } def replaceVariables(value: String, workflowId: WorkflowId): String = { val variables = Map("PWD" -> Cmds.pwd.toString, "UUID" -> workflowId.toString) - variables.foldLeft(value) { - case (result, (variableName, variableValue)) => result.replace(s"<<$variableName>>", s"$variableValue") + variables.foldLeft(value) { case (result, (variableName, variableValue)) => + result.replace(s"<<$variableName>>", s"$variableValue") } } lazy val DefaultConfig: Config = ConfigFactory.load lazy val NooPServiceActorConfig: Config = DefaultConfig.withValue( - "services.LoadController.class", ConfigValueFactory.fromAnyRef("cromwell.services.NooPServiceActor") + "services.LoadController.class", + ConfigValueFactory.fromAnyRef("cromwell.services.NooPServiceActor") ) lazy val JesBackendConfig: Config = ConfigFactory.parseString( @@ -215,7 +223,8 @@ object CromwellTestKitSpec { | } | } |} - """.stripMargin) + """.stripMargin + ) /** * It turns out that tests using the metadata refresh actor running in parallel don't work so well if there are more @@ -229,19 +238,23 @@ object CromwellTestKitSpec { */ private val ServiceRegistryActorSystem = akka.actor.ActorSystem("cromwell-service-registry-system") - val ServiceRegistryActorInstance: ActorRef = { - ServiceRegistryActorSystem.actorOf(ServiceRegistryActor.props(CromwellTestKitSpec.DefaultConfig), "ServiceRegistryActor") - } + val ServiceRegistryActorInstance: ActorRef = + ServiceRegistryActorSystem.actorOf(ServiceRegistryActor.props(CromwellTestKitSpec.DefaultConfig), + "ServiceRegistryActor" + ) class TestCromwellRootActor(config: Config)(implicit materializer: ActorMaterializer) - extends CromwellRootActor(MockCromwellTerminator, false, false, serverMode = true, config = config) { + extends CromwellRootActor(MockCromwellTerminator, false, false, serverMode = true, config = config) { override lazy val serviceRegistryActor: ActorRef = ServiceRegistryActorInstance override lazy val workflowStore = new InMemoryWorkflowStore override lazy val subWorkflowStore = new InMemorySubWorkflowStore(workflowStore) def submitWorkflow(sources: WorkflowSourceFilesCollection): WorkflowId = { val submitMessage = WorkflowStoreActor.SubmitWorkflow(sources) - val result = Await.result(workflowStoreActor.ask(submitMessage)(TimeoutDuration), Duration.Inf).asInstanceOf[WorkflowSubmittedToStore].workflowId + val result = Await + .result(workflowStoreActor.ask(submitMessage)(TimeoutDuration), Duration.Inf) + .asInstanceOf[WorkflowSubmittedToStore] + .workflowId workflowManagerActor ! RetrieveNewWorkflows result } @@ -249,8 +262,14 @@ object CromwellTestKitSpec { } abstract class CromwellTestKitWordSpec extends CromwellTestKitSpec with AnyWordSpecLike -abstract class CromwellTestKitSpec extends TestKitSuite - with DefaultTimeout with ImplicitSender with Matchers with ScalaFutures with Eventually with OneInstancePerTest { +abstract class CromwellTestKitSpec + extends TestKitSuite + with DefaultTimeout + with ImplicitSender + with Matchers + with ScalaFutures + with Eventually + with OneInstancePerTest { override protected lazy val actorSystemConfig: Config = ConfigFactory.parseString(CromwellTestKitSpec.ConfigText) @@ -273,7 +292,7 @@ abstract class CromwellTestKitSpec extends TestKitSuite lazy val dummyServiceRegistryActor: ActorRef = system.actorOf(Props.empty, "dummyServiceRegistryActor") lazy val dummyLogCopyRouter: ActorRef = system.actorOf(Props.empty, "dummyLogCopyRouter") - //noinspection ConvertExpressionToSAM + // noinspection ConvertExpressionToSAM // Allow to use shouldEqual between 2 WdlTypes while acknowledging for edge cases implicit val wdlTypeSoftEquality: Equality[WomType] = new Equality[WomType] { @tailrec @@ -301,10 +320,10 @@ abstract class CromwellTestKitSpec extends TestKitSuite case (_: WomString, expectedFile: WomFile) => fileEquality(a.valueString, expectedFile.valueString) case (array: WomArray, expectedArray: WomArray) => (array.value.length == expectedArray.value.length) && - array.value.zip(expectedArray.value).map(Function.tupled(areEqual)).forall(identity) + array.value.zip(expectedArray.value).map(Function.tupled(areEqual)).forall(identity) case (map: WomMap, expectedMap: WomMap) => - val mapped = map.value.map { - case (k, v) => expectedMap.value.contains(k) && areEqual(v, expectedMap.value(k)) + val mapped = map.value.map { case (k, v) => + expectedMap.value.contains(k) && areEqual(v, expectedMap.value(k)) } (map.value.size == expectedMap.value.size) && mapped.forall(identity) @@ -315,9 +334,8 @@ abstract class CromwellTestKitSpec extends TestKitSuite } } - protected def buildCromwellRootActor(config: Config, actorName: String): TestActorRef[TestCromwellRootActor] = { + protected def buildCromwellRootActor(config: Config, actorName: String): TestActorRef[TestCromwellRootActor] = TestActorRef(new TestCromwellRootActor(config), actorName) - } def runWdl(sampleWdl: SampleWdl, runtime: String = "", @@ -326,8 +344,8 @@ abstract class CromwellTestKitSpec extends TestKitSuite terminalState: WorkflowState = WorkflowSucceeded, config: Config = NooPServiceActorConfig, patienceConfig: PatienceConfig = defaultPatience, - testActorName: String, - )(implicit ec: ExecutionContext): Map[FullyQualifiedName, WomValue] = { + testActorName: String + )(implicit ec: ExecutionContext): Map[FullyQualifiedName, WomValue] = { val rootActor = buildCromwellRootActor(config, testActorName) val sources = sampleWdl.asWorkflowSources( @@ -338,7 +356,11 @@ abstract class CromwellTestKitSpec extends TestKitSuite labels = customLabels ) val workflowId = rootActor.underlyingActor.submitWorkflow(sources) - eventually { verifyWorkflowComplete(rootActor.underlyingActor.serviceRegistryActor, workflowId) } (config = patienceConfig, pos = implicitly[org.scalactic.source.Position], retrying = implicitly[Retrying[Unit]]) + eventually(verifyWorkflowComplete(rootActor.underlyingActor.serviceRegistryActor, workflowId))( + config = patienceConfig, + pos = implicitly[org.scalactic.source.Position], + retrying = implicitly[Retrying[Unit]] + ) verifyWorkflowState(rootActor.underlyingActor.serviceRegistryActor, workflowId, terminalState) val outcome = getWorkflowOutputsFromMetadata(workflowId, rootActor.underlyingActor.serviceRegistryActor) @@ -354,14 +376,17 @@ abstract class CromwellTestKitSpec extends TestKitSuite allowOtherOutputs: Boolean = true, config: Config = NooPServiceActorConfig, patienceConfig: PatienceConfig = defaultPatience, - testActorName: String, - ) - (implicit ec: ExecutionContext): WorkflowId = { + testActorName: String + )(implicit ec: ExecutionContext): WorkflowId = { val rootActor = buildCromwellRootActor(config, testActorName) val sources = sampleWdl.asWorkflowSources(runtime, workflowOptions) val workflowId = rootActor.underlyingActor.submitWorkflow(sources) - eventually { verifyWorkflowComplete(rootActor.underlyingActor.serviceRegistryActor, workflowId) } (config = patienceConfig, pos = implicitly[org.scalactic.source.Position], retrying = implicitly[Retrying[Unit]]) + eventually(verifyWorkflowComplete(rootActor.underlyingActor.serviceRegistryActor, workflowId))( + config = patienceConfig, + pos = implicitly[org.scalactic.source.Position], + retrying = implicitly[Retrying[Unit]] + ) verifyWorkflowState(rootActor.underlyingActor.serviceRegistryActor, workflowId, WorkflowSucceeded) val outputs = getWorkflowOutputsFromMetadata(workflowId, rootActor.underlyingActor.serviceRegistryActor) @@ -374,7 +399,10 @@ abstract class CromwellTestKitSpec extends TestKitSuite } if (!allowOtherOutputs) { outputs foreach { case (actualFqn, actualValue) => - val expectedValue = expectedOutputs.getOrElse(actualFqn, throw new RuntimeException(s"Actual output $actualFqn was not wanted in '$expectedOutputNames'")) + val expectedValue = expectedOutputs.getOrElse( + actualFqn, + throw new RuntimeException(s"Actual output $actualFqn was not wanted in '$expectedOutputNames'") + ) if (expectedValue != AnyValueIsFine) actualValue shouldEqual expectedValue } } @@ -383,9 +411,12 @@ abstract class CromwellTestKitSpec extends TestKitSuite workflowId } - private def getWorkflowState(workflowId: WorkflowId, serviceRegistryActor: ActorRef)(implicit ec: ExecutionContext): WorkflowState = { + private def getWorkflowState(workflowId: WorkflowId, serviceRegistryActor: ActorRef)(implicit + ec: ExecutionContext + ): WorkflowState = { val statusResponse = serviceRegistryActor.ask(GetStatus(workflowId))(TimeoutDuration).collect { - case SuccessfulMetadataJsonResponse(_, jsObject) => WorkflowState.withName(jsObject.fields("status").asInstanceOf[JsString].value) + case SuccessfulMetadataJsonResponse(_, jsObject) => + WorkflowState.withName(jsObject.fields("status").asInstanceOf[JsString].value) case f => throw new RuntimeException(s"Unexpected status response for $workflowId: $f") } Await.result(statusResponse, TimeoutDuration) @@ -394,20 +425,29 @@ abstract class CromwellTestKitSpec extends TestKitSuite /** * Verifies that a workflow is complete */ - protected def verifyWorkflowComplete(serviceRegistryActor: ActorRef, workflowId: WorkflowId)(implicit ec: ExecutionContext): Unit = { - List(WorkflowSucceeded, WorkflowFailed, WorkflowAborted) should contain(getWorkflowState(workflowId, serviceRegistryActor)) + protected def verifyWorkflowComplete(serviceRegistryActor: ActorRef, workflowId: WorkflowId)(implicit + ec: ExecutionContext + ): Unit = { + List(WorkflowSucceeded, WorkflowFailed, WorkflowAborted) should contain( + getWorkflowState(workflowId, serviceRegistryActor) + ) () } /** * Verifies that a state is correct. */ - protected def verifyWorkflowState(serviceRegistryActor: ActorRef, workflowId: WorkflowId, expectedState: WorkflowState)(implicit ec: ExecutionContext): Unit = { - getWorkflowState(workflowId, serviceRegistryActor) should equal (expectedState) + protected def verifyWorkflowState(serviceRegistryActor: ActorRef, + workflowId: WorkflowId, + expectedState: WorkflowState + )(implicit ec: ExecutionContext): Unit = { + getWorkflowState(workflowId, serviceRegistryActor) should equal(expectedState) () } - private def getWorkflowOutputsFromMetadata(id: WorkflowId, serviceRegistryActor: ActorRef): Map[FullyQualifiedName, WomValue] = { + private def getWorkflowOutputsFromMetadata(id: WorkflowId, + serviceRegistryActor: ActorRef + ): Map[FullyQualifiedName, WomValue] = { val response = serviceRegistryActor.ask(WorkflowOutputs(id)).mapTo[MetadataJsonResponse] collect { case SuccessfulMetadataJsonResponse(_, r) => r @@ -416,12 +456,12 @@ abstract class CromwellTestKitSpec extends TestKitSuite val jsObject = Await.result(response, TimeoutDuration) jsObject.getFields(WorkflowMetadataKeys.Outputs).toList match { - case head::_ => head.asInstanceOf[JsObject].fields.map( x => (x._1, jsValueToWdlValue(x._2))) + case head :: _ => head.asInstanceOf[JsObject].fields.map(x => (x._1, jsValueToWdlValue(x._2))) case _ => Map.empty } } - private def jsValueToWdlValue(jsValue: JsValue): WomValue = { + private def jsValueToWdlValue(jsValue: JsValue): WomValue = jsValue match { case str: JsString => WomString(str.value) case JsNumber(number) if number.scale == 0 => WomInteger(number.intValue) @@ -433,17 +473,18 @@ abstract class CromwellTestKitSpec extends TestKitSuite else WomArray(WomArrayType(valuesArray.head.womType), valuesArray) case map: JsObject => // TODO: currently assuming all keys are String. But that's not WDL-complete... - val valuesMap: Map[WomValue, WomValue] = map.fields.map { case (fieldName, fieldValue) => (WomString(fieldName), jsValueToWdlValue(fieldValue)) } + val valuesMap: Map[WomValue, WomValue] = map.fields.map { case (fieldName, fieldValue) => + (WomString(fieldName), jsValueToWdlValue(fieldValue)) + } if (valuesMap.isEmpty) WomMap(WomMapType(WomStringType, WomStringType), Map.empty) else WomMap(WomMapType(WomStringType, valuesMap.head._2.womType), valuesMap) case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") } - } } class AlwaysHappyJobStoreActor extends Actor { - override def receive: Receive = { - case x: JobStoreWriterCommand => sender() ! JobStoreWriteSuccess(x) + override def receive: Receive = { case x: JobStoreWriterCommand => + sender() ! JobStoreWriteSuccess(x) } } @@ -456,14 +497,14 @@ object AlwaysHappyJobStoreActor { } class EmptyCallCacheReadActor extends Actor { - override def receive: Receive = { - case _: CacheLookupRequest => sender() ! CacheLookupNoHit + override def receive: Receive = { case _: CacheLookupRequest => + sender() ! CacheLookupNoHit } } class EmptyCallCacheWriteActor extends Actor { - override def receive: Receive = { - case SaveCallCacheHashes => sender() ! CallCacheWriteSuccess + override def receive: Receive = { case SaveCallCacheHashes => + sender() ! CallCacheWriteSuccess } } @@ -476,8 +517,8 @@ object EmptyCallCacheWriteActor { } class EmptyDockerHashActor extends Actor { - override def receive: Receive = { - case request: DockerInfoRequest => sender() ! DockerInfoSuccessResponse(DockerInformation(DockerHashResult("alg", "hash"), None), request) + override def receive: Receive = { case request: DockerInfoRequest => + sender() ! DockerInfoSuccessResponse(DockerInformation(DockerHashResult("alg", "hash"), None), request) } } diff --git a/server/src/test/scala/cromwell/DeclarationWorkflowSpec.scala b/server/src/test/scala/cromwell/DeclarationWorkflowSpec.scala index a2b8a4c8a8a..00b06994bcd 100644 --- a/server/src/test/scala/cromwell/DeclarationWorkflowSpec.scala +++ b/server/src/test/scala/cromwell/DeclarationWorkflowSpec.scala @@ -21,7 +21,11 @@ class DeclarationWorkflowSpec extends Matchers with AnyWordSpecLike { /* * WARNING: be aware that `workflow.inputs` is used by projects external to Cromwell (eg FC's input enumerator). */ - val actualInputs = WdlNamespaceWithWorkflow.load(SampleWdl.DeclarationsWorkflow.workflowSource(), Seq.empty[Draft2ImportResolver]).get.workflow.inputs + val actualInputs = WdlNamespaceWithWorkflow + .load(SampleWdl.DeclarationsWorkflow.workflowSource(), Seq.empty[Draft2ImportResolver]) + .get + .workflow + .inputs actualInputs foreach { case (inputName: FullyQualifiedName, actualRequiredInputDefinition: RequiredInputDefinition) => @@ -34,10 +38,13 @@ class DeclarationWorkflowSpec extends Matchers with AnyWordSpecLike { inputName should be("two_step.cat.flags2") inputDefinition.womType should be(WomOptionalType(WomStringType)) case (inputName: FullyQualifiedName, inputDefinition: OverridableInputDefinitionWithDefault) => - inputDefinition.localName.value should be ("two_step.static_string") + inputDefinition.localName.value should be("two_step.static_string") inputName should be("two_step.static_string") inputDefinition.womType should be(WomStringType) - case other => throw new RuntimeException(s"Programmer Error! Draft 2 isn't set up to produce or handle ${other.getClass.getSimpleName}") + case other => + throw new RuntimeException( + s"Programmer Error! Draft 2 isn't set up to produce or handle ${other.getClass.getSimpleName}" + ) } } } diff --git a/server/src/test/scala/cromwell/FilePassingWorkflowSpec.scala b/server/src/test/scala/cromwell/FilePassingWorkflowSpec.scala index 6a29981a44d..2690671e98d 100644 --- a/server/src/test/scala/cromwell/FilePassingWorkflowSpec.scala +++ b/server/src/test/scala/cromwell/FilePassingWorkflowSpec.scala @@ -21,7 +21,7 @@ class FilePassingWorkflowSpec extends CromwellTestKitWordSpec { "file_passing.b.contents" -> WomString("foo bar baz") ), patienceConfig = PatienceConfig(2.minutes.dilated), - testActorName = "TestCromwellRootActor-files", + testActorName = "TestCromwellRootActor-files" ) } } diff --git a/server/src/test/scala/cromwell/MultipleFilesWithSameNameWorkflowSpec.scala b/server/src/test/scala/cromwell/MultipleFilesWithSameNameWorkflowSpec.scala index 0e64170f02d..63f184b4eea 100644 --- a/server/src/test/scala/cromwell/MultipleFilesWithSameNameWorkflowSpec.scala +++ b/server/src/test/scala/cromwell/MultipleFilesWithSameNameWorkflowSpec.scala @@ -12,7 +12,7 @@ class MultipleFilesWithSameNameWorkflowSpec extends CromwellTestKitWordSpec { "two.x.out" -> WomString("first file.txt"), "two.y.out" -> WomString("second file.txt") ), - testActorName = "TestCromwellRootActor-clobber", + testActorName = "TestCromwellRootActor-clobber" ) } } diff --git a/server/src/test/scala/cromwell/OptionalParamWorkflowSpec.scala b/server/src/test/scala/cromwell/OptionalParamWorkflowSpec.scala index 7bee2c4cf7f..9c6081dae6e 100644 --- a/server/src/test/scala/cromwell/OptionalParamWorkflowSpec.scala +++ b/server/src/test/scala/cromwell/OptionalParamWorkflowSpec.scala @@ -11,17 +11,17 @@ class OptionalParamWorkflowSpec extends Matchers with AnyWordSpecLike { "A workflow with an optional parameter that has a prefix inside the tag" should { "not include that prefix if no value is specified" in { val wf = s""" - |task find { - | String? pattern - | File root - | command { - | find $${root} $${"-name " + pattern} - | } - |} - | - |workflow wf { - | call find - |} + |task find { + | String? pattern + | File root + | command { + | find $${root} $${"-name " + pattern} + | } + |} + | + |workflow wf { + | call find + |} """.stripMargin val ns = WdlNamespace.loadUsingSource(wf, None, None).get val findTask = ns.findTask("find") getOrElse { @@ -34,10 +34,16 @@ class OptionalParamWorkflowSpec extends Matchers with AnyWordSpecLike { ) instantiateWithoutValue.toTry.get.head.commandString shouldEqual "find src" - val instantiateWithValue = findTask.instantiateCommand(findTask.inputsFromMap(Map( - "find.root" -> WomSingleFile("src"), - "find.pattern" -> WomString("*.java") - )), NoFunctions).getOrElse {fail("Expected instantiation to work")} + val instantiateWithValue = findTask + .instantiateCommand(findTask.inputsFromMap( + Map( + "find.root" -> WomSingleFile("src"), + "find.pattern" -> WomString("*.java") + ) + ), + NoFunctions + ) + .getOrElse(fail("Expected instantiation to work")) instantiateWithValue.head.commandString shouldEqual "find src -name *.java" } } diff --git a/server/src/test/scala/cromwell/ReferenceConfSpec.scala b/server/src/test/scala/cromwell/ReferenceConfSpec.scala index 993ba4da0e6..69e3349e4c7 100644 --- a/server/src/test/scala/cromwell/ReferenceConfSpec.scala +++ b/server/src/test/scala/cromwell/ReferenceConfSpec.scala @@ -40,12 +40,11 @@ class ReferenceConfSpec extends Suite with AnyFlatSpecLike with Matchers { val overlappingKeys = fileKeys.intersect(nonFileKeys).toSeq.sorted if (overlappingKeys.nonEmpty) { - fail( - s"""|Key(s) overlapping - |=> ${overlappingKeys.mkString("=> ")} - |between $fileUrl - | and $nonFileUrl - |""".stripMargin) + fail(s"""|Key(s) overlapping + |=> ${overlappingKeys.mkString("=> ")} + |between $fileUrl + | and $nonFileUrl + |""".stripMargin) } } } diff --git a/server/src/test/scala/cromwell/SimpleWorkflowActorSpec.scala b/server/src/test/scala/cromwell/SimpleWorkflowActorSpec.scala index b3f4df64d07..4e67ac984a1 100644 --- a/server/src/test/scala/cromwell/SimpleWorkflowActorSpec.scala +++ b/server/src/test/scala/cromwell/SimpleWorkflowActorSpec.scala @@ -1,4 +1,3 @@ - package cromwell import java.time.OffsetDateTime @@ -27,11 +26,11 @@ import scala.concurrent.{Await, Promise} object SimpleWorkflowActorSpec { - case class TestableWorkflowActorAndMetadataPromise - ( + case class TestableWorkflowActorAndMetadataPromise( workflowActor: TestFSMRef[WorkflowActorState, WorkflowActorData, WorkflowActor], supervisor: TestProbe, - promise: Promise[Unit]) + promise: Promise[Unit] + ) } class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfter { @@ -40,7 +39,8 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte private def buildWorkflowActor(sampleWdl: SampleWdl, rawInputsOverride: String, workflowId: WorkflowId, - matchers: Matcher*): TestableWorkflowActorAndMetadataPromise = { + matchers: Matcher* + ): TestableWorkflowActorAndMetadataPromise = { val workflowSources = WorkflowSourceFilesWithoutImports( workflowSource = Option(sampleWdl.workflowSource()), workflowUrl = None, @@ -55,34 +55,44 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte ) val promise = Promise[Unit]() - val watchActor = system.actorOf(MetadataWatchActor.props(promise, matchers: _*), s"service-registry-$workflowId-${UUID.randomUUID()}") + val watchActor = system.actorOf(MetadataWatchActor.props(promise, matchers: _*), + s"service-registry-$workflowId-${UUID.randomUUID()}" + ) val supervisor = TestProbe() val config = ConfigFactory.load() val workflowToStart = WorkflowToStart(workflowId, OffsetDateTime.now(), workflowSources, Submitted, HogGroup("foo")) - val callCachingEnabled =false - val invalidateBadCacheResults =false + val callCachingEnabled = false + val invalidateBadCacheResults = false val workflowActor = TestFSMRef( - factory = new WorkflowActor(workflowToStart, config, + factory = new WorkflowActor( + workflowToStart, + config, ioActor = system.actorOf(SimpleIoActor.props), callCachingEnabled = callCachingEnabled, invalidateBadCacheResults = invalidateBadCacheResults, serviceRegistryActor = watchActor, - workflowLogCopyRouter = system.actorOf(Props.empty, s"workflow-copy-log-router-$workflowId-${UUID.randomUUID()}"), + workflowLogCopyRouter = + system.actorOf(Props.empty, s"workflow-copy-log-router-$workflowId-${UUID.randomUUID()}"), workflowCallbackActor = None, jobStoreActor = system.actorOf(AlwaysHappyJobStoreActor.props), subWorkflowStoreActor = system.actorOf(AlwaysHappySubWorkflowStoreActor.props), callCacheReadActor = system.actorOf(EmptyCallCacheReadActor.props), callCacheWriteActor = system.actorOf(EmptyCallCacheWriteActor.props), dockerHashActor = system.actorOf(EmptyDockerHashActor.props), - jobRestartCheckTokenDispenserActor = system.actorOf(JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running")), - jobExecutionTokenDispenserActor = system.actorOf(JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running")), + jobRestartCheckTokenDispenserActor = system.actorOf( + JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running") + ), + jobExecutionTokenDispenserActor = system.actorOf( + JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running") + ), backendSingletonCollection = BackendSingletonCollection(Map("Local" -> None)), serverMode = true, workflowStoreActor = system.actorOf(Props.empty), workflowHeartbeatConfig = WorkflowHeartbeatConfig(config), totalJobsByRootWf = new AtomicInteger(), fileHashCacheActorProps = None, - blacklistCache = None), + blacklistCache = None + ), supervisor = supervisor.ref, name = s"workflow-actor-$workflowId" ) @@ -96,16 +106,19 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte workflowId = WorkflowId.randomId() } - def workflowManagerActorAwaitsSingleWorkCompleteMessage(workflowManagerActor: TestProbe, finalState: WorkflowState): Unit = { - workflowManagerActor.expectMsgPF(TestExecutionTimeout) { - case WorkflowActorWorkComplete(_, _, finalState) => finalState should be(finalState) + def workflowManagerActorAwaitsSingleWorkCompleteMessage(workflowManagerActor: TestProbe, + finalState: WorkflowState + ): Unit = { + workflowManagerActor.expectMsgPF(TestExecutionTimeout) { case WorkflowActorWorkComplete(_, _, finalState) => + finalState should be(finalState) } workflowManagerActor.expectNoMessage(AwaitAlmostNothing) } "A WorkflowActor" should { "start, run, succeed and die" in { - val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, _) = buildWorkflowActor(SampleWdl.HelloWorld, SampleWdl.HelloWorld.workflowJson, workflowId) + val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, _) = + buildWorkflowActor(SampleWdl.HelloWorld, SampleWdl.HelloWorld.workflowJson, workflowId) val probe = TestProbe() probe watch workflowActor startingCallsFilter("wf_hello.hello") { @@ -120,7 +133,8 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte "fail to construct with missing inputs" in { val expectedError = "Required workflow input 'wf_hello.hello.addressee' not specified" val failureMatcher = FailureMatcher(expectedError) - val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = buildWorkflowActor(SampleWdl.HelloWorld, "{}", workflowId, failureMatcher) + val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = + buildWorkflowActor(SampleWdl.HelloWorld, "{}", workflowId, failureMatcher) val probe = TestProbe() probe watch workflowActor workflowActor ! StartWorkflowCommand @@ -140,18 +154,20 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte // TODO WOM: restore offending offensive input name val expectedError = "No coercion defined from '3' of type 'spray.json.JsNumber' to 'String'." val failureMatcher = FailureMatcher(expectedError) - val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = buildWorkflowActor(SampleWdl.HelloWorld, s""" { "$Addressee" : 3} """, - workflowId, failureMatcher) + val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = + buildWorkflowActor(SampleWdl.HelloWorld, s""" { "$Addressee" : 3} """, workflowId, failureMatcher) val probe = TestProbe() probe watch workflowActor workflowActor ! StartWorkflowCommand - try { + try Await.result(promise.future, TestExecutionTimeout) - } catch { + catch { case _: Throwable => val info = failureMatcher.nearMissInformation - fail(s"We didn't see the expected error message $expectedError within $TestExecutionTimeout. ${info.mkString(", ")}") + fail( + s"We didn't see the expected error message $expectedError within $TestExecutionTimeout. ${info.mkString(", ")}" + ) } probe.expectTerminated(workflowActor, AwaitAlmostNothing) supervisor.expectMsgPF(AwaitAlmostNothing, "parent should get a failed response") { @@ -165,9 +181,11 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte } "fail when a call fails" in { - val expectedError = "Job wf_goodbye.goodbye:NA:1 exited with return code 1 which has not been declared as a valid return code. See 'continueOnReturnCode' runtime attribute for more details." + val expectedError = + "Job wf_goodbye.goodbye:NA:1 exited with return code 1 which has not been declared as a valid return code. See 'continueOnReturnCode' runtime attribute for more details." val failureMatcher = FailureMatcher(expectedError) - val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = buildWorkflowActor(SampleWdl.GoodbyeWorld, SampleWdl.GoodbyeWorld.workflowJson, workflowId, failureMatcher) + val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = + buildWorkflowActor(SampleWdl.GoodbyeWorld, SampleWdl.GoodbyeWorld.workflowJson, workflowId, failureMatcher) val probe = TestProbe() probe watch workflowActor startingCallsFilter("wf_goodbye.goodbye") { @@ -188,18 +206,24 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte "gracefully handle malformed WDL" in { val expectedError = "No input bfile found evaluating inputs for expression bfile" val failureMatcher = FailureMatcher(expectedError) - val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = buildWorkflowActor(SampleWdl.CoercionNotDefined, SampleWdl.CoercionNotDefined.workflowJson, workflowId, failureMatcher) + val TestableWorkflowActorAndMetadataPromise(workflowActor, supervisor, promise) = + buildWorkflowActor(SampleWdl.CoercionNotDefined, + SampleWdl.CoercionNotDefined.workflowJson, + workflowId, + failureMatcher + ) val probe = TestProbe() probe watch workflowActor workflowActor ! StartWorkflowCommand - try { + try Await.result(promise.future, TestExecutionTimeout) - } catch { + catch { case _: Throwable => val info = failureMatcher.nearMissInformation val errorString = if (info.nonEmpty) "We had a near miss: " + info.mkString(", ") - else s"The expected key was never seen. We saw: [\n ${failureMatcher.fullEventList.map(e => s"${e.key} -> ${e.value}").mkString("\n ")}\n]." + else + s"The expected key was never seen. We saw: [\n ${failureMatcher.fullEventList.map(e => s"${e.key} -> ${e.value}").mkString("\n ")}\n]." fail(s"We didn't see the expected error message '$expectedError' within $TestExecutionTimeout. $errorString}") } probe.expectTerminated(workflowActor, AwaitAlmostNothing) @@ -223,4 +247,3 @@ class SimpleWorkflowActorSpec extends CromwellTestKitWordSpec with BeforeAndAfte } } } - diff --git a/server/src/test/scala/cromwell/WorkflowFailSlowSpec.scala b/server/src/test/scala/cromwell/WorkflowFailSlowSpec.scala index 165a6bcea20..8b173b28979 100644 --- a/server/src/test/scala/cromwell/WorkflowFailSlowSpec.scala +++ b/server/src/test/scala/cromwell/WorkflowFailSlowSpec.scala @@ -3,7 +3,6 @@ package cromwell import cromwell.core.WorkflowFailed import cromwell.util.SampleWdl - // TODO: These tests are (and were) somewhat unsatisfactory. They'd be much better if we use TestFSMRefs and TestProbes to simulate job completions against the WorkflowActor and make sure it only completes the workflow at the appropriate time. class WorkflowFailSlowSpec extends CromwellTestKitWordSpec { val FailFastOptions: String = @@ -19,7 +18,7 @@ class WorkflowFailSlowSpec extends CromwellTestKitWordSpec { sampleWdl = SampleWdl.WorkflowFailSlow, workflowOptions = FailFastOptions, terminalState = WorkflowFailed, - testActorName = "TestCromwellRootActor-not-complete", + testActorName = "TestCromwellRootActor-not-complete" ) outputs.size should be(0) } @@ -30,7 +29,7 @@ class WorkflowFailSlowSpec extends CromwellTestKitWordSpec { val outputs = runWdl( sampleWdl = SampleWdl.WorkflowFailSlow, terminalState = WorkflowFailed, - testActorName = "TestCromwellRootActor-workflowFailureMode", + testActorName = "TestCromwellRootActor-workflowFailureMode" ) outputs.size should be(0) } diff --git a/server/src/test/scala/cromwell/WorkflowOutputsSpec.scala b/server/src/test/scala/cromwell/WorkflowOutputsSpec.scala index f20b4d29c6c..2c164b51d51 100644 --- a/server/src/test/scala/cromwell/WorkflowOutputsSpec.scala +++ b/server/src/test/scala/cromwell/WorkflowOutputsSpec.scala @@ -14,7 +14,7 @@ class WorkflowOutputsSpec extends CromwellTestKitWordSpec { "three_step.wc.count" -> AnyValueIsFine ), allowOtherOutputs = false, - testActorName = "TestCromwellRootActor-use-all", + testActorName = "TestCromwellRootActor-use-all" ) } @@ -26,7 +26,7 @@ class WorkflowOutputsSpec extends CromwellTestKitWordSpec { "three_step.wc.count" -> AnyValueIsFine ), allowOtherOutputs = false, - testActorName = "TestCromwellRootActor-output", + testActorName = "TestCromwellRootActor-output" ) } @@ -38,7 +38,7 @@ class WorkflowOutputsSpec extends CromwellTestKitWordSpec { "scatter0.inside_scatter.out" -> AnyValueIsFine ), allowOtherOutputs = false, - testActorName = "TestCromwellRootActor-shards", + testActorName = "TestCromwellRootActor-shards" ) } @@ -49,7 +49,7 @@ class WorkflowOutputsSpec extends CromwellTestKitWordSpec { "scatter0.inside_scatter.out" -> AnyValueIsFine ), allowOtherOutputs = false, - testActorName = "TestCromwellRootActor-wildcards", + testActorName = "TestCromwellRootActor-wildcards" ) } } diff --git a/server/src/test/scala/cromwell/engine/WorkflowManagerActorSpec.scala b/server/src/test/scala/cromwell/engine/WorkflowManagerActorSpec.scala index c1bcaf083b4..07645654b63 100644 --- a/server/src/test/scala/cromwell/engine/WorkflowManagerActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/WorkflowManagerActorSpec.scala @@ -1,4 +1,3 @@ - package cromwell.engine import akka.actor.ActorSystem @@ -14,7 +13,7 @@ import scala.concurrent.duration._ import scala.language.postfixOps class WorkflowManagerActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorBuilderForSpecs { - override implicit val actorSystem: ActorSystem = system + implicit override val actorSystem: ActorSystem = system "A WorkflowManagerActor" should { @@ -80,18 +79,21 @@ class WorkflowManagerActorSpec extends CromwellTestKitWordSpec with WorkflowDesc ) } - val config = CromwellTestKitSpec.NooPServiceActorConfig. - withValue("system.max-concurrent-workflows", ConfigValueFactory.fromAnyRef(2)). - withValue("system.new-workflow-poll-rate", ConfigValueFactory.fromAnyRef(1)) + val config = CromwellTestKitSpec.NooPServiceActorConfig + .withValue("system.max-concurrent-workflows", ConfigValueFactory.fromAnyRef(2)) + .withValue("system.new-workflow-poll-rate", ConfigValueFactory.fromAnyRef(1)) val rootActor = buildCromwellRootActor(config = config, actorName = "TestCromwellRootActor-pickup") val serviceRegistryActor = rootActor.underlyingActor.serviceRegistryActor val firstSources = SubWorkflows(naptime = 60 seconds).asWorkflowSources() - def waitForState(workflowId: WorkflowId, state: WorkflowState): Unit = { - eventually { verifyWorkflowState(serviceRegistryActor, workflowId, state) } (config = defaultPatience, pos = implicitly[org.scalactic.source.Position], retrying = implicitly[Retrying[Unit]]) - } + def waitForState(workflowId: WorkflowId, state: WorkflowState): Unit = + eventually(verifyWorkflowState(serviceRegistryActor, workflowId, state))( + config = defaultPatience, + pos = implicitly[org.scalactic.source.Position], + retrying = implicitly[Retrying[Unit]] + ) val firstWorkflowId = rootActor.underlyingActor.submitWorkflow(firstSources) waitForState(firstWorkflowId, WorkflowRunning) diff --git a/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala b/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala index d0f39fe8889..e9d5e6f1041 100644 --- a/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala @@ -7,14 +7,22 @@ import akka.testkit._ import cats.data.{NonEmptyList, NonEmptyVector} import common.assertion.CromwellTimeoutSpec import cromwell.core._ -import cromwell.core.abort.{AbortResponse, WorkflowAbortFailureResponse, WorkflowAbortRequestedResponse, WorkflowAbortedResponse} +import cromwell.core.abort.{ + AbortResponse, + WorkflowAbortedResponse, + WorkflowAbortFailureResponse, + WorkflowAbortRequestedResponse +} import cromwell.engine.workflow.{CoordinatedWorkflowStoreActorBuilder, SqlWorkflowStoreBuilder} import cromwell.engine.workflow.WorkflowManagerActor.WorkflowNotFoundException import cromwell.engine.workflow.workflowstore.SqlWorkflowStore.WorkflowStoreState import cromwell.engine.workflow.workflowstore.WorkflowStoreActor._ import cromwell.engine.workflow.workflowstore.WorkflowStoreCoordinatedAccessActor.WriteHeartbeats import cromwell.engine.workflow.workflowstore.WorkflowStoreEngineActor.{NewWorkflowsToStart, NoNewWorkflowsToStart} -import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{WorkflowSubmittedToStore, WorkflowsBatchSubmittedToStore} +import cromwell.engine.workflow.workflowstore.WorkflowStoreSubmitActor.{ + WorkflowsBatchSubmittedToStore, + WorkflowSubmittedToStore +} import cromwell.engine.workflow.workflowstore._ import cromwell.services.metadata.MetadataQuery import cromwell.services.metadata.MetadataService.{GetMetadataAction, MetadataLookupResponse} @@ -31,8 +39,14 @@ import scala.concurrent.Await import scala.concurrent.duration._ import scala.language.postfixOps -class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWorkflowStoreActorBuilder - with SqlWorkflowStoreBuilder with Matchers with BeforeAndAfter with Eventually with CromwellTimeoutSpec { +class WorkflowStoreActorSpec + extends CromwellTestKitWordSpec + with CoordinatedWorkflowStoreActorBuilder + with SqlWorkflowStoreBuilder + with Matchers + with BeforeAndAfter + with Eventually + with CromwellTimeoutSpec { private val helloWorldSourceFiles = HelloWorld.asWorkflowSources().asInstanceOf[WorkflowSourceFilesWithoutImports] private val helloWorldSourceFilesOnHold = HelloWorld.asWorkflowSources(workflowOnHold = true) private val helloCwlWorldSourceFiles = @@ -46,8 +60,7 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor val (list, distinct) = knownDistinct if (!distinct) { (list :+ next, false) - } - else { + } else { (list :+ next, !list.map(_.id).contains(next.id)) } } @@ -89,8 +102,8 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor "WorkflowStoreActor-CheckOnHold" ) storeActor ! SubmitWorkflow(helloWorldSourceFilesOnHold) - expectMsgPF(10 seconds) { - case submit: WorkflowSubmittedToStore => submit.state shouldBe WorkflowOnHold + expectMsgPF(10 seconds) { case submit: WorkflowSubmittedToStore => + submit.state shouldBe WorkflowOnHold } } @@ -107,9 +120,11 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor ), "WorkflowStoreActor-ReturnIdsForBatch" ) - storeActor ! BatchSubmitWorkflows(NonEmptyList.of(helloWorldSourceFiles, helloWorldSourceFiles, helloWorldSourceFiles)) - expectMsgPF(10 seconds) { - case WorkflowsBatchSubmittedToStore(ids, WorkflowSubmitted) => ids.toList.size shouldBe 3 + storeActor ! BatchSubmitWorkflows( + NonEmptyList.of(helloWorldSourceFiles, helloWorldSourceFiles, helloWorldSourceFiles) + ) + expectMsgPF(10 seconds) { case WorkflowsBatchSubmittedToStore(ids, WorkflowSubmitted) => + ids.toList.size shouldBe 3 } } @@ -126,46 +141,42 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor ), "WorkflowStoreActor-FetchExactlyN" ) - storeActor ! BatchSubmitWorkflows(NonEmptyList.of(helloWorldSourceFiles, helloWorldSourceFiles, helloCwlWorldSourceFiles)) + storeActor ! BatchSubmitWorkflows( + NonEmptyList.of(helloWorldSourceFiles, helloWorldSourceFiles, helloCwlWorldSourceFiles) + ) val insertedIds = expectMsgType[WorkflowsBatchSubmittedToStore](10 seconds).workflowIds.toList storeActor ! FetchRunnableWorkflows(2, Set.empty) - expectMsgPF(10 seconds) { - case NewWorkflowsToStart(workflowNel) => - workflowNel.toList.size shouldBe 2 - checkDistinctIds(workflowNel.toList) shouldBe true - workflowNel map { - case WorkflowToStart(id, _, sources, state, _) => - insertedIds.contains(id) shouldBe true - sources shouldBe helloWorldSourceFiles - state shouldBe Submitted - } + expectMsgPF(10 seconds) { case NewWorkflowsToStart(workflowNel) => + workflowNel.toList.size shouldBe 2 + checkDistinctIds(workflowNel.toList) shouldBe true + workflowNel map { case WorkflowToStart(id, _, sources, state, _) => + insertedIds.contains(id) shouldBe true + sources shouldBe helloWorldSourceFiles + state shouldBe Submitted + } } storeActor ! FetchRunnableWorkflows(1, Set.empty) - expectMsgPF(10 seconds) { - case NewWorkflowsToStart(workflowNel) => - workflowNel.toList.size shouldBe 1 - checkDistinctIds(workflowNel.toList) shouldBe true - workflowNel map { - case WorkflowToStart(id, _, sources, state, _) => - insertedIds.contains(id) shouldBe true - sources shouldBe helloCwlWorldSourceFiles - state shouldBe Submitted - } + expectMsgPF(10 seconds) { case NewWorkflowsToStart(workflowNel) => + workflowNel.toList.size shouldBe 1 + checkDistinctIds(workflowNel.toList) shouldBe true + workflowNel map { case WorkflowToStart(id, _, sources, state, _) => + insertedIds.contains(id) shouldBe true + sources shouldBe helloCwlWorldSourceFiles + state shouldBe Submitted + } } } "fetch encrypted and cleared workflow options" in { EncryptionSpec.assumeAes256Cbc() - val optionedSourceFiles = HelloWorld.asWorkflowSources(workflowOptions = - s"""|{ - | "key": "value", - | "refresh_token": "it's a secret" - |} - |""".stripMargin) - + val optionedSourceFiles = HelloWorld.asWorkflowSources(workflowOptions = s"""|{ + | "key": "value", + | "refresh_token": "it's a secret" + |} + |""".stripMargin) val store = new InMemoryWorkflowStore val storeActor = system.actorOf( @@ -183,36 +194,35 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor val insertedIds = expectMsgType[WorkflowsBatchSubmittedToStore](10 seconds).workflowIds.toList storeActor ! FetchRunnableWorkflows(1, Set.empty) - expectMsgPF(10 seconds) { - case NewWorkflowsToStart(workflowNel) => - workflowNel.toList.size should be(1) - checkDistinctIds(workflowNel.toList) should be(true) - workflowNel.toList.foreach { - case WorkflowToStart(id, _, sources, state, _) => - insertedIds.contains(id) should be(true) - sources.workflowSource should be(optionedSourceFiles.workflowSource) - sources.inputsJson should be(optionedSourceFiles.inputsJson) - state should be(Submitted) - - import spray.json._ - - // We need to wait for workflow metadata to be flushed before we can successfully query for it - eventually(timeout(15.seconds.dilated), interval(500.millis.dilated)) { - val actorNameUniquificationString = UUID.randomUUID().toString.take(7) - val readMetadataActor = system.actorOf( - ReadDatabaseMetadataWorkerActor.props(metadataReadTimeout = 30 seconds, metadataReadRowNumberSafetyThreshold = 20000), - s"ReadMetadataActor-FetchEncryptedOptions-$actorNameUniquificationString" - ) - - readMetadataActor ! GetMetadataAction(MetadataQuery.forWorkflow(id)) - expectMsgPF(10 seconds) { - case MetadataLookupResponse(_, eventList) => - val optionsEvent = eventList.find(_.key.key == "submittedFiles:options").get - val clearedJsObject = optionsEvent.value.get.value.parseJson.asJsObject - clearedJsObject.fields("key") should be(JsString("value")) - } - } + expectMsgPF(10 seconds) { case NewWorkflowsToStart(workflowNel) => + workflowNel.toList.size should be(1) + checkDistinctIds(workflowNel.toList) should be(true) + workflowNel.toList.foreach { case WorkflowToStart(id, _, sources, state, _) => + insertedIds.contains(id) should be(true) + sources.workflowSource should be(optionedSourceFiles.workflowSource) + sources.inputsJson should be(optionedSourceFiles.inputsJson) + state should be(Submitted) + + import spray.json._ + + // We need to wait for workflow metadata to be flushed before we can successfully query for it + eventually(timeout(15.seconds.dilated), interval(500.millis.dilated)) { + val actorNameUniquificationString = UUID.randomUUID().toString.take(7) + val readMetadataActor = system.actorOf( + ReadDatabaseMetadataWorkerActor.props(metadataReadTimeout = 30 seconds, + metadataReadRowNumberSafetyThreshold = 20000 + ), + s"ReadMetadataActor-FetchEncryptedOptions-$actorNameUniquificationString" + ) + + readMetadataActor ! GetMetadataAction(MetadataQuery.forWorkflow(id)) + expectMsgPF(10 seconds) { case MetadataLookupResponse(_, eventList) => + val optionsEvent = eventList.find(_.key.key == "submittedFiles:options").get + val clearedJsObject = optionsEvent.value.get.value.parseJson.asJsObject + clearedJsObject.fields("key") should be(JsString("value")) + } } + } } } @@ -229,20 +239,20 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor ), "WorkflowStoreActor-ReturnOnlyRemaining" ) - storeActor ! BatchSubmitWorkflows(NonEmptyList.of(helloWorldSourceFiles, helloWorldSourceFiles, helloWorldSourceFiles)) + storeActor ! BatchSubmitWorkflows( + NonEmptyList.of(helloWorldSourceFiles, helloWorldSourceFiles, helloWorldSourceFiles) + ) val insertedIds = expectMsgType[WorkflowsBatchSubmittedToStore](10 seconds).workflowIds.toList storeActor ! FetchRunnableWorkflows(100, Set.empty) - expectMsgPF(10 seconds) { - case NewWorkflowsToStart(workflowNel) => - workflowNel.toList.size shouldBe 3 - checkDistinctIds(workflowNel.toList) shouldBe true - workflowNel map { - case WorkflowToStart(id, _, sources, state, _) => - insertedIds.contains(id) shouldBe true - sources shouldBe helloWorldSourceFiles - state shouldBe Submitted - } + expectMsgPF(10 seconds) { case NewWorkflowsToStart(workflowNel) => + workflowNel.toList.size shouldBe 3 + checkDistinctIds(workflowNel.toList) shouldBe true + workflowNel map { case WorkflowToStart(id, _, sources, state, _) => + insertedIds.contains(id) shouldBe true + sources shouldBe helloWorldSourceFiles + state shouldBe Submitted + } } } @@ -426,7 +436,8 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor abortResponse.asInstanceOf[WorkflowAbortFailureResponse].workflowId should be(notFoundWorkflowId) abortResponse.asInstanceOf[WorkflowAbortFailureResponse].failure should be(a[WorkflowNotFoundException]) abortResponse.asInstanceOf[WorkflowAbortFailureResponse].failure.getMessage should be( - s"Couldn't abort 7ff8dff3-bc80-4500-af3b-57dbe7a6ecbb because no workflow with that ID is in progress") + s"Couldn't abort 7ff8dff3-bc80-4500-af3b-57dbe7a6ecbb because no workflow with that ID is in progress" + ) } } diff --git a/server/src/test/scala/cromwell/engine/workflow/CoordinatedWorkflowStoreActorBuilder.scala b/server/src/test/scala/cromwell/engine/workflow/CoordinatedWorkflowStoreActorBuilder.scala index 33703ef0ea8..7bce743a166 100644 --- a/server/src/test/scala/cromwell/engine/workflow/CoordinatedWorkflowStoreActorBuilder.scala +++ b/server/src/test/scala/cromwell/engine/workflow/CoordinatedWorkflowStoreActorBuilder.scala @@ -1,7 +1,11 @@ package cromwell.engine.workflow import akka.testkit.TestKitBase -import cromwell.engine.workflow.workflowstore.{CoordinatedWorkflowStoreAccess, WorkflowStore, WorkflowStoreCoordinatedAccessActor} +import cromwell.engine.workflow.workflowstore.{ + CoordinatedWorkflowStoreAccess, + WorkflowStore, + WorkflowStoreCoordinatedAccessActor +} trait CoordinatedWorkflowStoreActorBuilder { testKit: TestKitBase => def access(coordinatedAccessActorName: String)(store: WorkflowStore): CoordinatedWorkflowStoreAccess = { diff --git a/server/src/test/scala/cromwell/engine/workflow/SqlWorkflowStoreBuilder.scala b/server/src/test/scala/cromwell/engine/workflow/SqlWorkflowStoreBuilder.scala index 0b772789da7..59bdae414cc 100644 --- a/server/src/test/scala/cromwell/engine/workflow/SqlWorkflowStoreBuilder.scala +++ b/server/src/test/scala/cromwell/engine/workflow/SqlWorkflowStoreBuilder.scala @@ -16,10 +16,11 @@ trait SqlWorkflowStoreBuilder { def runWithDatabase[T](databaseConfig: Config)(block: SqlWorkflowStore => T): T = { val database = new EngineSlickDatabase(databaseConfig).initialized(EngineServicesStore.EngineLiquibaseSettings) - val metadataDatabase = new MetadataSlickDatabase(databaseConfig).initialized(MetadataServicesStore.MetadataLiquibaseSettings) - try { + val metadataDatabase = + new MetadataSlickDatabase(databaseConfig).initialized(MetadataServicesStore.MetadataLiquibaseSettings) + try block(SqlWorkflowStore(database, metadataDatabase)) - } finally { + finally { Try(database.close()) () } diff --git a/server/src/test/scala/cromwell/engine/workflow/WorkflowActorSpec.scala b/server/src/test/scala/cromwell/engine/workflow/WorkflowActorSpec.scala index 6f7c57ae985..9d7eb2ad59b 100644 --- a/server/src/test/scala/cromwell/engine/workflow/WorkflowActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/WorkflowActorSpec.scala @@ -14,10 +14,24 @@ import cromwell.engine.backend.BackendSingletonCollection import cromwell.engine.workflow.WorkflowActor._ import cromwell.engine.workflow.WorkflowManagerActor.WorkflowActorWorkComplete import cromwell.engine.workflow.lifecycle.EngineLifecycleActorAbortCommand -import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor.{ExecuteWorkflowCommand, WorkflowExecutionAbortedResponse, WorkflowExecutionFailedResponse, WorkflowExecutionSucceededResponse} +import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor.{ + ExecuteWorkflowCommand, + WorkflowExecutionAbortedResponse, + WorkflowExecutionFailedResponse, + WorkflowExecutionSucceededResponse +} import cromwell.engine.workflow.lifecycle.finalization.{CopyWorkflowLogsActor, WorkflowCallbackActor} -import cromwell.engine.workflow.lifecycle.finalization.WorkflowFinalizationActor.{StartFinalizationCommand, WorkflowFinalizationFailedResponse, WorkflowFinalizationSucceededResponse} -import cromwell.engine.workflow.lifecycle.initialization.WorkflowInitializationActor.{StartInitializationCommand, WorkflowInitializationAbortedResponse, WorkflowInitializationFailedResponse, WorkflowInitializationSucceededResponse} +import cromwell.engine.workflow.lifecycle.finalization.WorkflowFinalizationActor.{ + StartFinalizationCommand, + WorkflowFinalizationFailedResponse, + WorkflowFinalizationSucceededResponse +} +import cromwell.engine.workflow.lifecycle.initialization.WorkflowInitializationActor.{ + StartInitializationCommand, + WorkflowInitializationAbortedResponse, + WorkflowInitializationFailedResponse, + WorkflowInitializationSucceededResponse +} import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.MaterializeWorkflowDescriptorFailureResponse import cromwell.engine.workflow.workflowstore.{StartableState, Submitted, WorkflowHeartbeatConfig, WorkflowToStart} import cromwell.engine.{EngineFilesystems, EngineWorkflowDescriptor} @@ -31,13 +45,18 @@ import wom.values.WomString import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} -class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorBuilderForSpecs with BeforeAndAfter with Eventually with StrictLogging { +class WorkflowActorSpec + extends CromwellTestKitWordSpec + with WorkflowDescriptorBuilderForSpecs + with BeforeAndAfter + with Eventually + with StrictLogging { override protected lazy val actorSystemConfig: Config = ConfigFactory.parseString("""akka.loggers = ["akka.testkit.TestEventListener"]""") // https://doc.akka.io/docs/akka/current/testing.html#expecting-log-messages - override implicit lazy val actorSystem: ActorSystem = system + implicit override lazy val actorSystem: ActorSystem = system val mockServiceRegistryActor: TestActorRef[Actor] = TestActorRef( @@ -48,7 +67,7 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB case _ => // No action } }, - "mockServiceRegistryActor", + "mockServiceRegistryActor" ) val mockDir: Path = DefaultPathBuilder.get("/where/to/copy/wf/logs") @@ -57,7 +76,8 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB var currentWorkflowId: WorkflowId = _ val currentLifecycleActor: TestProbe = TestProbe("currentLifecycleActor") - val workflowSources: WorkflowSourceFilesCollection = ThreeStep.asWorkflowSources(workflowOptions = mockWorkflowOptions) + val workflowSources: WorkflowSourceFilesCollection = + ThreeStep.asWorkflowSources(workflowOptions = mockWorkflowOptions) lazy val descriptor: EngineWorkflowDescriptor = createMaterializedEngineWorkflowDescriptor(WorkflowId.randomId(), workflowSources = workflowSources) val supervisorProbe: TestProbe = TestProbe("supervisorProbe") @@ -76,8 +96,8 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB currentWorkflowId = WorkflowId.randomId() copyWorkflowLogsProbe = TestProbe(s"copyWorkflowLogsProbe-$currentWorkflowId") // Clear the supervisor probe of anything remaining from previous runs: - supervisorProbe.receiveWhile(max = 1.second, idle = 1.second) { - case _ => println("Ignoring excess message to WMA: ") + supervisorProbe.receiveWhile(max = 1.second, idle = 1.second) { case _ => + println("Ignoring excess message to WMA: ") } } @@ -87,7 +107,8 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB extraPathBuilderFactory: Option[PathBuilderFactory] = None, workflowCallbackActor: Option[ActorRef] = None, initializationMaxRetries: Int = 3, - initializationInterval: FiniteDuration = 10.millis) = { + initializationInterval: FiniteDuration = 10.millis + ) = { val actor = TestFSMRef( factory = new WorkflowActorWithTestAddons( finalizationProbe = finalizationProbe, @@ -119,10 +140,17 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB workflowExecutionActorProbe = executionProbe ), supervisor = supervisorProbe.ref, - name = s"workflowActor-$currentWorkflowId", + name = s"workflowActor-$currentWorkflowId" + ) + actor.setState( + stateName = state, + stateData = WorkflowActorData(Option(currentLifecycleActor.ref), + Option(descriptor), + AllBackendInitializationData.empty, + StateCheckpoint(InitializingWorkflowState), + Submitted + ) ) - actor.setState(stateName = state, stateData = WorkflowActorData(Option(currentLifecycleActor.ref), Option(descriptor), - AllBackendInitializationData.empty, StateCheckpoint(InitializingWorkflowState), Submitted)) actor } @@ -152,7 +180,9 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB finalizationProbe.expectMsg(StartFinalizationCommand) actor.stateName should be(FinalizingWorkflowState) actor ! WorkflowFinalizationSucceededResponse - supervisorProbe.expectMsgPF(TimeoutDuration) { case x: WorkflowFailedResponse => x.workflowId should be(currentWorkflowId) } + supervisorProbe.expectMsgPF(TimeoutDuration) { case x: WorkflowFailedResponse => + x.workflowId should be(currentWorkflowId) + } workflowManagerActorExpectsSingleWorkCompleteNotification(WorkflowFailed) deathwatch.expectTerminated(actor) } @@ -179,9 +209,9 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB val actor = createWorkflowActor(InitializingWorkflowState) deathwatch watch actor actor ! AbortWorkflowCommand - eventually { actor.stateName should be(WorkflowAbortingState) } - currentLifecycleActor.expectMsgPF(TimeoutDuration) { - case EngineLifecycleActorAbortCommand => actor ! WorkflowInitializationAbortedResponse + eventually(actor.stateName should be(WorkflowAbortingState)) + currentLifecycleActor.expectMsgPF(TimeoutDuration) { case EngineLifecycleActorAbortCommand => + actor ! WorkflowInitializationAbortedResponse } finalizationProbe.expectMsg(StartFinalizationCommand) actor.stateName should be(FinalizingWorkflowState) @@ -201,11 +231,11 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB initializationProbe.expectMsg(StartInitializationCommand) actor ! AbortWorkflowCommand - eventually { actor.stateName should be(WorkflowAbortingState) } + eventually(actor.stateName should be(WorkflowAbortingState)) // Because we failed a few times, the actor test's initial "currentLifecycleActor will have been replaced by this // new initializationProbe: - initializationProbe.expectMsgPF(TimeoutDuration) { - case EngineLifecycleActorAbortCommand => actor ! WorkflowInitializationAbortedResponse + initializationProbe.expectMsgPF(TimeoutDuration) { case EngineLifecycleActorAbortCommand => + actor ! WorkflowInitializationAbortedResponse } finalizationProbe.expectMsg(StartFinalizationCommand) @@ -222,12 +252,12 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB // Set the stage with a few unfortunate retries: actor ! WorkflowInitializationFailedResponse(Seq(new Exception("Initialization Failed (1)"))) - eventually { actor.stateData.currentLifecycleStateActor should be(None) } + eventually(actor.stateData.currentLifecycleStateActor should be(None)) actor ! AbortWorkflowCommand // Because there are no active lifecycle actors, this actor should jump to Finalizing without // needing any further input: - eventually { actor.stateName should be(FinalizingWorkflowState) } + eventually(actor.stateName should be(FinalizingWorkflowState)) // Expect the mailboxes for the initialization actor to be empty (and check "currentLifecycleActor" for good measure) currentLifecycleActor.expectNoMessage(10.millis) @@ -247,7 +277,9 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB finalizationProbe.expectMsg(StartFinalizationCommand) actor.stateName should be(FinalizingWorkflowState) actor ! WorkflowFinalizationSucceededResponse - supervisorProbe.expectMsgPF(TimeoutDuration) { case x: WorkflowFailedResponse => x.workflowId should be(currentWorkflowId) } + supervisorProbe.expectMsgPF(TimeoutDuration) { case x: WorkflowFailedResponse => + x.workflowId should be(currentWorkflowId) + } workflowManagerActorExpectsSingleWorkCompleteNotification(WorkflowFailed) deathwatch.expectTerminated(actor) } @@ -256,10 +288,9 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB val actor = createWorkflowActor(ExecutingWorkflowState) deathwatch watch actor actor ! AbortWorkflowCommand - eventually { actor.stateName should be(WorkflowAbortingState) } - currentLifecycleActor.expectMsgPF(CromwellTestKitSpec.TimeoutDuration) { - case EngineLifecycleActorAbortCommand => - actor ! WorkflowExecutionAbortedResponse(Map.empty) + eventually(actor.stateName should be(WorkflowAbortingState)) + currentLifecycleActor.expectMsgPF(CromwellTestKitSpec.TimeoutDuration) { case EngineLifecycleActorAbortCommand => + actor ! WorkflowExecutionAbortedResponse(Map.empty) } finalizationProbe.expectMsg(StartFinalizationCommand) actor.stateName should be(FinalizingWorkflowState) @@ -300,7 +331,9 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB deathwatch watch actor copyWorkflowLogsProbe.expectNoMessage(AwaitAlmostNothing) - actor ! MaterializeWorkflowDescriptorFailureResponse(new Exception("Intentionally failing workflow materialization to test log copying")) + actor ! MaterializeWorkflowDescriptorFailureResponse( + new Exception("Intentionally failing workflow materialization to test log copying") + ) copyWorkflowLogsProbe.expectMsg(CopyWorkflowLogsActor.Copy(currentWorkflowId, mockDir)) supervisorProbe.expectMsgPF(TimeoutDuration) { case _: WorkflowFailedResponse => /* success! */ } workflowManagerActorExpectsSingleWorkCompleteNotification(WorkflowFailed) @@ -313,10 +346,9 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB workflowActor.children.head ! Kill - eventually { workflowActor.stateName should be(WorkflowAbortingState) } - currentLifecycleActor.expectMsgPF(TimeoutDuration) { - case EngineLifecycleActorAbortCommand => - workflowActor ! WorkflowExecutionAbortedResponse(Map.empty) + eventually(workflowActor.stateName should be(WorkflowAbortingState)) + currentLifecycleActor.expectMsgPF(TimeoutDuration) { case EngineLifecycleActorAbortCommand => + workflowActor ! WorkflowExecutionAbortedResponse(Map.empty) } finalizationProbe.expectMsg(StartFinalizationCommand) workflowActor.stateName should be(FinalizingWorkflowState) @@ -327,22 +359,31 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB } "log an error when a path builder factory initialization fails" in { - EventFilter.error(start = "Failed to copy workflow log", pattern = ".*Failing as requested.*", occurrences = 1).intercept { - val _ = createWorkflowActor(WorkflowSucceededState, Option(new FailingPathBuilderFactory())) - } + EventFilter + .error(start = "Failed to copy workflow log", pattern = ".*Failing as requested.*", occurrences = 1) + .intercept { + val _ = createWorkflowActor(WorkflowSucceededState, Option(new FailingPathBuilderFactory())) + } } "log an error when a path builder factory initialization throws" in { - EventFilter.error(start = "Failed to copy workflow log", pattern = ".*Throwing as requested.*", occurrences = 1).intercept { - val _ = createWorkflowActor(WorkflowSucceededState, Option(new ThrowingPathBuilderFactory())) - } + EventFilter + .error(start = "Failed to copy workflow log", pattern = ".*Throwing as requested.*", occurrences = 1) + .intercept { + val _ = createWorkflowActor(WorkflowSucceededState, Option(new ThrowingPathBuilderFactory())) + } } "send a workflow callback message" in { val actor = createWorkflowActor(ExecutingWorkflowState, workflowCallbackActor = Option(workflowCallbackProbe.ref)) deathwatch watch actor val mockOutputs = WomMocks.mockOutputExpectations(Map("foo" -> WomString("bar"))) - val msg = WorkflowCallbackActor.PerformCallbackCommand(currentWorkflowId, Some(mockUri), WorkflowSucceeded, mockOutputs, List.empty) + val msg = WorkflowCallbackActor.PerformCallbackCommand(currentWorkflowId, + Some(mockUri), + WorkflowSucceeded, + mockOutputs, + List.empty + ) workflowCallbackProbe.expectNoMessage(AwaitAlmostNothing) actor ! WorkflowExecutionSucceededResponse(Map.empty, Set(currentWorkflowId), mockOutputs, Set.empty) @@ -352,10 +393,16 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB } "send a workflow callback message for a failing workflow" in { - val actor = createWorkflowActor(FinalizingWorkflowState, workflowCallbackActor = Option(workflowCallbackProbe.ref)) + val actor = + createWorkflowActor(FinalizingWorkflowState, workflowCallbackActor = Option(workflowCallbackProbe.ref)) deathwatch watch actor val errorText = "oh nooo :(" - val msg = WorkflowCallbackActor.PerformCallbackCommand(currentWorkflowId, Some(mockUri), WorkflowFailed, CallOutputs.empty, List(errorText)) + val msg = WorkflowCallbackActor.PerformCallbackCommand(currentWorkflowId, + Some(mockUri), + WorkflowFailed, + CallOutputs.empty, + List(errorText) + ) workflowCallbackProbe.expectNoMessage(AwaitAlmostNothing) actor ! WorkflowFinalizationFailedResponse(Seq(new RuntimeException(errorText))) @@ -366,15 +413,17 @@ class WorkflowActorSpec extends CromwellTestKitWordSpec with WorkflowDescriptorB } class FailingPathBuilderFactory() extends PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = { + override def withOptions( + options: WorkflowOptions + )(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = Future(throw new Exception("Failing as requested")) - } } class ThrowingPathBuilderFactory() extends PathBuilderFactory { - override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = { + override def withOptions( + options: WorkflowOptions + )(implicit as: ActorSystem, ec: ExecutionContext): Future[PathBuilder] = throw new Exception("Throwing as requested") - } } class WorkflowActorWithTestAddons(val finalizationProbe: TestProbe, @@ -402,36 +451,42 @@ class WorkflowActorWithTestAddons(val finalizationProbe: TestProbe, initializationMaxRetries: Int, initializationInterval: FiniteDuration, workflowInitializationActorProbe: TestProbe, - workflowExecutionActorProbe: TestProbe) extends WorkflowActor( - workflowToStart = WorkflowToStart(id = workflowId, - submissionTime = OffsetDateTime.now, - state = startState, - sources = workflowSources, - hogGroup = HogGroup("foo")), - conf = conf, - callCachingEnabled = callCachingEnabled, - invalidateBadCacheResults = invalidateBadCacheResults, - ioActor = ioActor, - serviceRegistryActor = serviceRegistryActor, - workflowLogCopyRouter = workflowLogCopyRouter, - workflowCallbackActor, - jobStoreActor = jobStoreActor, - subWorkflowStoreActor = subWorkflowStoreActor, - callCacheReadActor = callCacheReadActor, - callCacheWriteActor = callCacheWriteActor, - dockerHashActor = dockerHashActor, - jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, - jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, - backendSingletonCollection = BackendSingletonCollection(Map.empty), - workflowStoreActor = workflowStoreActor, - serverMode = true, - workflowHeartbeatConfig = workflowHeartbeatConfig, - totalJobsByRootWf = totalJobsByRootWf, - fileHashCacheActorProps = None, - blacklistCache = None) { - - override def createInitializationActor(workflowDescriptor: EngineWorkflowDescriptor, name: String): ActorRef = workflowInitializationActorProbe.ref - override def createWorkflowExecutionActor(workflowDescriptor: EngineWorkflowDescriptor, data: WorkflowActorData): ActorRef = workflowExecutionActorProbe.ref + workflowExecutionActorProbe: TestProbe +) extends WorkflowActor( + workflowToStart = WorkflowToStart(id = workflowId, + submissionTime = OffsetDateTime.now, + state = startState, + sources = workflowSources, + hogGroup = HogGroup("foo") + ), + conf = conf, + callCachingEnabled = callCachingEnabled, + invalidateBadCacheResults = invalidateBadCacheResults, + ioActor = ioActor, + serviceRegistryActor = serviceRegistryActor, + workflowLogCopyRouter = workflowLogCopyRouter, + workflowCallbackActor, + jobStoreActor = jobStoreActor, + subWorkflowStoreActor = subWorkflowStoreActor, + callCacheReadActor = callCacheReadActor, + callCacheWriteActor = callCacheWriteActor, + dockerHashActor = dockerHashActor, + jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, + jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, + backendSingletonCollection = BackendSingletonCollection(Map.empty), + workflowStoreActor = workflowStoreActor, + serverMode = true, + workflowHeartbeatConfig = workflowHeartbeatConfig, + totalJobsByRootWf = totalJobsByRootWf, + fileHashCacheActorProps = None, + blacklistCache = None + ) { + + override def createInitializationActor(workflowDescriptor: EngineWorkflowDescriptor, name: String): ActorRef = + workflowInitializationActorProbe.ref + override def createWorkflowExecutionActor(workflowDescriptor: EngineWorkflowDescriptor, + data: WorkflowActorData + ): ActorRef = workflowExecutionActorProbe.ref override val initializationRetryInterval: FiniteDuration = initializationInterval override val maxInitializationAttempts: Int = initializationMaxRetries @@ -443,6 +498,6 @@ class WorkflowActorWithTestAddons(val finalizationProbe: TestProbe, override def makeFinalizationActor(workflowDescriptor: EngineWorkflowDescriptor, jobExecutionMap: JobExecutionMap, - workflowOutputs: CallOutputs, - ): ActorRef = finalizationProbe.ref + workflowOutputs: CallOutputs + ): ActorRef = finalizationProbe.ref } diff --git a/server/src/test/scala/cromwell/engine/workflow/WorkflowDescriptorBuilderForSpecs.scala b/server/src/test/scala/cromwell/engine/workflow/WorkflowDescriptorBuilderForSpecs.scala index ccec86872c8..86f7d1b0b48 100644 --- a/server/src/test/scala/cromwell/engine/workflow/WorkflowDescriptorBuilderForSpecs.scala +++ b/server/src/test/scala/cromwell/engine/workflow/WorkflowDescriptorBuilderForSpecs.scala @@ -6,7 +6,12 @@ import cromwell.CromwellTestKitSpec import cromwell.core.{HogGroup, SimpleIoActor, WorkflowId, WorkflowSourceFilesCollection} import cromwell.engine.EngineWorkflowDescriptor import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor -import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.{MaterializeWorkflowDescriptorCommand, MaterializeWorkflowDescriptorFailureResponse, MaterializeWorkflowDescriptorSuccessResponse, WorkflowDescriptorMaterializationResult} +import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.{ + MaterializeWorkflowDescriptorCommand, + MaterializeWorkflowDescriptorFailureResponse, + MaterializeWorkflowDescriptorSuccessResponse, + WorkflowDescriptorMaterializationResult +} import scala.concurrent.Await @@ -16,32 +21,40 @@ trait WorkflowDescriptorBuilderForSpecs { implicit val actorSystem: ActorSystem lazy val ioActor = actorSystem.actorOf(SimpleIoActor.props) - def createMaterializedEngineWorkflowDescriptor(id: WorkflowId, workflowSources: WorkflowSourceFilesCollection): EngineWorkflowDescriptor = { + def createMaterializedEngineWorkflowDescriptor(id: WorkflowId, + workflowSources: WorkflowSourceFilesCollection + ): EngineWorkflowDescriptor = { import akka.pattern.ask implicit val timeout = akka.util.Timeout(awaitTimeout) implicit val ec = actorSystem.dispatcher val callCachingEnabled = true val invalidateBadCacheResults = true val serviceRegistryIgnorer = actorSystem.actorOf(Props.empty) - val actor = actorSystem.actorOf(MaterializeWorkflowDescriptorActor.props( - serviceRegistryIgnorer, - id, - importLocalFilesystem = false, - ioActorProxy = ioActor, - hogGroup = HogGroup("testcase")), "MaterializeWorkflowDescriptorActor-" + id.id) - val workflowDescriptorFuture = actor.ask( - MaterializeWorkflowDescriptorCommand( - workflowSources, - ConfigFactory.load, - callCachingEnabled, - invalidateBadCacheResults) - ).mapTo[WorkflowDescriptorMaterializationResult] - - Await.result(workflowDescriptorFuture map { - case MaterializeWorkflowDescriptorSuccessResponse(workflowDescriptor) => workflowDescriptor - case MaterializeWorkflowDescriptorFailureResponse(reason) => throw reason - }, awaitTimeout) + val actor = actorSystem.actorOf( + MaterializeWorkflowDescriptorActor.props(serviceRegistryIgnorer, + id, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = HogGroup("testcase") + ), + "MaterializeWorkflowDescriptorActor-" + id.id + ) + val workflowDescriptorFuture = actor + .ask( + MaterializeWorkflowDescriptorCommand(workflowSources, + ConfigFactory.load, + callCachingEnabled, + invalidateBadCacheResults + ) + ) + .mapTo[WorkflowDescriptorMaterializationResult] + + Await.result( + workflowDescriptorFuture map { + case MaterializeWorkflowDescriptorSuccessResponse(workflowDescriptor) => workflowDescriptor + case MaterializeWorkflowDescriptorFailureResponse(reason) => throw reason + }, + awaitTimeout + ) } } - - diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala index fb44db3003c..216afa6ee4f 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala @@ -9,7 +9,11 @@ import cromwell.core._ import cromwell.core.labels.Labels import cromwell.engine.backend.{BackendConfigurationEntry, CromwellBackends} import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor -import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.{MaterializeWorkflowDescriptorCommand, MaterializeWorkflowDescriptorFailureResponse, MaterializeWorkflowDescriptorSuccessResponse} +import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDescriptorActor.{ + MaterializeWorkflowDescriptorCommand, + MaterializeWorkflowDescriptorFailureResponse, + MaterializeWorkflowDescriptorSuccessResponse +} import cromwell.util.SampleWdl.HelloWorld import cromwell.{CromwellTestKitSpec, CromwellTestKitWordSpec} import org.scalatest.BeforeAndAfter @@ -23,29 +27,34 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit private val ioActor = system.actorOf(SimpleIoActor.props) private val workflowId = WorkflowId.randomId() - private val minimumConf = ConfigFactory.parseString( - """ - |backend { - | default = "Local" - |} - |""".stripMargin - ).withFallback(CromwellTestKitSpec.DefaultConfig) - private val differentDefaultBackendConf = ConfigFactory.parseString( - """ - |backend { - | default = "DefaultBackend" - | // These providers are empty here because the MaterializeWorkflowDescriptorActor won't introspect them: - | providers { - | DefaultBackend {} - | SpecifiedBackend {} - | } - |} - |""".stripMargin - ).withFallback(CromwellTestKitSpec.DefaultConfig) + private val minimumConf = ConfigFactory + .parseString( + """ + |backend { + | default = "Local" + |} + |""".stripMargin + ) + .withFallback(CromwellTestKitSpec.DefaultConfig) + private val differentDefaultBackendConf = ConfigFactory + .parseString( + """ + |backend { + | default = "DefaultBackend" + | // These providers are empty here because the MaterializeWorkflowDescriptorActor won't introspect them: + | providers { + | DefaultBackend {} + | SpecifiedBackend {} + | } + |} + |""".stripMargin + ) + .withFallback(CromwellTestKitSpec.DefaultConfig) val unstructuredFile = "fubar badness!" private val validOptions = WorkflowOptions.fromJsonString(""" { "write_to_cache": true } """).get - val validCustomLabelsFile="""{ "label1": "value1", "label2": "value2", "Label1": "valu£1" }""" - val badCustomLabelsFile="""{ "key with characters more than 255-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpas": "value with characters more than 255-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa" }""" + val validCustomLabelsFile = """{ "label1": "value1", "label2": "value2", "Label1": "valu£1" }""" + val badCustomLabelsFile = + """{ "key with characters more than 255-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpas": "value with characters more than 255-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa" }""" private val validInputsJson = HelloWorld.rawInputs.toJson.toString() private val workflowSourceNoDocker = HelloWorld.workflowSource(""" runtime { } """) @@ -71,8 +80,7 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit private val invalidMemoryRetryOptions5 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": true } """).get - before { - } + before {} after { system.stop(NoBehaviorActor) @@ -83,7 +91,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit "MaterializeWorkflowDescriptorActor" should { "accept valid WDL, inputs and options files" in { - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(workflowSourceNoDocker), workflowUrl = None, @@ -94,9 +109,13 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { @@ -126,9 +145,18 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit } "accept valid workflowUrl" in { - val workflowUrl = Option("https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl") + val workflowUrl = Option( + "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" + ) val inputs = Map("my_workflow.i" -> 5) - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = None, workflowUrl = workflowUrl, @@ -139,9 +167,13 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = WorkflowOptions.empty, labelsJson = "{}", warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { @@ -181,11 +213,20 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit val placeholdingActorFactoryClass = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory" val fauxBackendEntries = List( BackendConfigurationEntry("SpecifiedBackend", placeholdingActorFactoryClass, ConfigFactory.parseString("")), - BackendConfigurationEntry("DefaultBackend", placeholdingActorFactoryClass, ConfigFactory.parseString(""))) + BackendConfigurationEntry("DefaultBackend", placeholdingActorFactoryClass, ConfigFactory.parseString("")) + ) val cromwellBackends = CromwellBackends(fauxBackendEntries) // Run the test: - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, cromwellBackends, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + cromwellBackends, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(wdl), workflowUrl = None, @@ -196,9 +237,13 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = WorkflowOptions.empty, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, differentDefaultBackendConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + differentDefaultBackendConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { @@ -210,7 +255,8 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit wfDesc.backendAssignments(call) shouldBe "DefaultBackend" case call => fail(s"Unexpected task: ${call.callable.name}") } - case MaterializeWorkflowDescriptorFailureResponse(reason) => fail(s"Materialization unexpectedly failed ($reason)") + case MaterializeWorkflowDescriptorFailureResponse(reason) => + fail(s"Materialization unexpectedly failed ($reason)") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -231,7 +277,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit |} """.stripMargin - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(wdl), workflowUrl = None, @@ -242,16 +295,25 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = WorkflowOptions.empty, labelsJson = "{}", warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, differentDefaultBackendConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + differentDefaultBackendConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => - if (!reason.getMessage.contains("Backend for call foo.a ('NoSuchBackend') not registered in configuration file")) + if ( + !reason.getMessage.contains( + "Backend for call foo.a ('NoSuchBackend') not registered in configuration file" + ) + ) fail(s"Unexpected failure message from MaterializeWorkflowDescriptorActor: ${reason.getMessage}") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -261,7 +323,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit } "reject an invalid WDL source" in { - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(unstructuredFile), workflowUrl = None, @@ -272,15 +341,22 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => - reason.getMessage should startWith("Workflow input processing failed:\nERROR: Finished parsing without consuming all tokens.") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + reason.getMessage should startWith( + "Workflow input processing failed:\nERROR: Finished parsing without consuming all tokens." + ) + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -292,7 +368,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit "reject workflow with invalid URL" in { val workflowUrl = Option("https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow") val inputs = Map("my_workflow.i" -> 5) - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = None, workflowUrl = workflowUrl, @@ -303,18 +386,23 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = WorkflowOptions.empty, labelsJson = "{}", warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => reason.getMessage should startWith( """Workflow input processing failed: - |Failed to resolve 'https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow (reason 1 of 1): 404: Not Found""" - .stripMargin) - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + |Failed to resolve 'https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow (reason 1 of 1): 404: Not Found""".stripMargin + ) + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -330,7 +418,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit | |# no workflow foo { ... } block!! """.stripMargin - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(noWorkflowWdl), workflowUrl = None, @@ -341,15 +436,22 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => - reason.getMessage should startWith("Workflow input processing failed:\nNamespace does not have a local workflow to run") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + reason.getMessage should startWith( + "Workflow input processing failed:\nNamespace does not have a local workflow to run" + ) + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -359,7 +461,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit } "reject an invalid workflow inputs file" in { - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(workflowSourceNoDocker), workflowUrl = None, @@ -370,15 +479,20 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => reason.getMessage should startWith("Workflow input processing failed:\n") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -388,7 +502,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit } "reject requests if any required inputs are missing" in { - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val noInputsJson = "{}" val badOptionsSources = WorkflowSourceFilesWithoutImports( workflowSource = Option(workflowSourceNoDocker), @@ -400,15 +521,22 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(badOptionsSources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(badOptionsSources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => - reason.getMessage should startWith("Workflow input processing failed:\nRequired workflow input 'wf_hello.hello.addressee' not specified") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + reason.getMessage should startWith( + "Workflow input processing failed:\nRequired workflow input 'wf_hello.hello.addressee' not specified" + ) + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } @@ -426,7 +554,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit | call bar |} """.stripMargin - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(wdl), workflowUrl = None, @@ -437,15 +572,22 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => - reason.getMessage should startWith("Workflow input processing failed:\nERROR: Value 'j' is declared as a 'Int' but the expression evaluates to 'String'") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + reason.getMessage should startWith( + "Workflow input processing failed:\nERROR: Value 'j' is declared as a 'Int' but the expression evaluates to 'String'" + ) + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } } @@ -472,7 +614,14 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit "foo.bad_two" -> "\"gs://another/bad/gcs/path.txt", "foo.bad_three" -> "" ).toJson.toString - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(wdl), workflowUrl = None, @@ -483,18 +632,27 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = validOptions, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => reason.getMessage should startWith("Workflow input processing failed:\n") - reason.getMessage should include("Invalid value for File input 'foo.bad_one': \"gs://this/is/a/bad/gcs/path.txt starts with a '\"'") - reason.getMessage should include("Invalid value for File input 'foo.bad_two': \"gs://another/bad/gcs/path.txt starts with a '\"'") + reason.getMessage should include( + "Invalid value for File input 'foo.bad_one': \"gs://this/is/a/bad/gcs/path.txt starts with a '\"'" + ) + reason.getMessage should include( + "Invalid value for File input 'foo.bad_two': \"gs://another/bad/gcs/path.txt starts with a '\"'" + ) reason.getMessage should include("Invalid value for File input 'foo.bad_three': empty value") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } } @@ -514,23 +672,36 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit "reject invalid memory_retry_multiplier" in { List(invalidMemoryRetryOptions1, invalidMemoryRetryOptions2, invalidMemoryRetryOptions3) map { options => MaterializeWorkflowDescriptorActor.validateMemoryRetryMultiplier(options) match { - case Invalid(errorsList) => errorsList.head should be("Workflow option 'memory_retry_multiplier' is invalid. " + - "It should be in the range 1.0 ≤ n ≤ 99.0") + case Invalid(errorsList) => + errorsList.head should be( + "Workflow option 'memory_retry_multiplier' is invalid. " + + "It should be in the range 1.0 ≤ n ≤ 99.0" + ) case Valid(_) => fail(s"memory_retry_multiplier validation for $options succeeded but should have failed!") } } List(invalidMemoryRetryOptions4, invalidMemoryRetryOptions5) map { options => MaterializeWorkflowDescriptorActor.validateMemoryRetryMultiplier(options) match { - case Invalid(errorsList) => errorsList.head should startWith(s"Workflow option 'memory_retry_multiplier' is invalid. " + - "It should be of type Double and in the range 1.0 ≤ n ≤ 99.0. Error: NumberFormatException:") + case Invalid(errorsList) => + errorsList.head should startWith( + s"Workflow option 'memory_retry_multiplier' is invalid. " + + "It should be of type Double and in the range 1.0 ≤ n ≤ 99.0. Error: NumberFormatException:" + ) case Valid(_) => fail(s"memory_retry_multiplier validation for $options succeeded but should have failed!") } } } "fail materialization if memory_retry_multiplier is invalid" in { - val materializeWfActor = system.actorOf(MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, workflowId, importLocalFilesystem = false, ioActorProxy = ioActor, hogGroup = fooHogGroup)) + val materializeWfActor = system.actorOf( + MaterializeWorkflowDescriptorActor.props(NoBehaviorActor, + workflowId, + importLocalFilesystem = false, + ioActorProxy = ioActor, + hogGroup = fooHogGroup + ) + ) val sources = WorkflowSourceFilesWithoutImports( workflowSource = Option(workflowSourceNoDocker), workflowUrl = None, @@ -541,16 +712,23 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit workflowOptions = invalidMemoryRetryOptions1, labelsJson = validCustomLabelsFile, warnings = Vector.empty, - requestedWorkflowId = None) - materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, minimumConf, callCachingEnabled, - invalidateBadCacheResults) + requestedWorkflowId = None + ) + materializeWfActor ! MaterializeWorkflowDescriptorCommand(sources, + minimumConf, + callCachingEnabled, + invalidateBadCacheResults + ) within(Timeout) { expectMsgPF() { case MaterializeWorkflowDescriptorFailureResponse(reason) => reason.getMessage should startWith("Workflow input processing failed:\n") - reason.getMessage should include("Workflow option 'memory_retry_multiplier' is invalid. It should be in the range 1.0 ≤ n ≤ 99.0") - case _: MaterializeWorkflowDescriptorSuccessResponse => fail("This materialization should not have succeeded!") + reason.getMessage should include( + "Workflow option 'memory_retry_multiplier' is invalid. It should be in the range 1.0 ≤ n ≤ 99.0" + ) + case _: MaterializeWorkflowDescriptorSuccessResponse => + fail("This materialization should not have succeeded!") case unknown => fail(s"Unexpected materialization response: $unknown") } } @@ -559,4 +737,3 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit } } } - diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorSpec.scala index 5edba1ff413..06ad805e6f6 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorSpec.scala @@ -10,7 +10,10 @@ import cromwell.backend.AllBackendInitializationData import cromwell.core.{SimpleIoActor, WorkflowId} import cromwell.engine.backend.{BackendConfigurationEntry, BackendSingletonCollection, CromwellBackends} import cromwell.engine.workflow.WorkflowDescriptorBuilderForSpecs -import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor.{ExecuteWorkflowCommand, WorkflowExecutionFailedResponse} +import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActor.{ + ExecuteWorkflowCommand, + WorkflowExecutionFailedResponse +} import cromwell.engine.workflow.tokens.DynamicRateLimiter.Rate import cromwell.engine.workflow.tokens.JobTokenDispenserActor import cromwell.engine.workflow.workflowstore.Submitted @@ -24,14 +27,18 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.concurrent.{Await, Promise} -class WorkflowExecutionActorSpec extends CromwellTestKitSpec with AnyFlatSpecLike with Matchers with BeforeAndAfter with WorkflowDescriptorBuilderForSpecs { +class WorkflowExecutionActorSpec + extends CromwellTestKitSpec + with AnyFlatSpecLike + with Matchers + with BeforeAndAfter + with WorkflowDescriptorBuilderForSpecs { - override implicit val actorSystem = system + implicit override val actorSystem = system implicit val DefaultDuration = 60.seconds.dilated def mockServiceRegistryActor = TestActorRef(new Actor { - override def receive = { - case _ => // No action + override def receive = { case _ => // No action } }) @@ -57,17 +64,33 @@ class WorkflowExecutionActorSpec extends CromwellTestKitSpec with AnyFlatSpecLik import MetadataWatchActor.metadataKeyAttemptChecker val metadataSuccessPromise = Promise[Unit]() val requiredMetadataMatchers: Seq[MetadataWatchActor.Matcher] = List( - MetadataWatchActor.JobKeyMetadataKeyAndValueContainStringMatcher(metadataKeyAttemptChecker(1), "executionStatus", "RetryableFailure"), - MetadataWatchActor.JobKeyMetadataKeyAndValueContainStringMatcher(metadataKeyAttemptChecker(2), "executionStatus", "RetryableFailure"), - MetadataWatchActor.JobKeyMetadataKeyAndValueContainStringMatcher(metadataKeyAttemptChecker(3), "executionStatus", "Failed") + MetadataWatchActor.JobKeyMetadataKeyAndValueContainStringMatcher(metadataKeyAttemptChecker(1), + "executionStatus", + "RetryableFailure" + ), + MetadataWatchActor.JobKeyMetadataKeyAndValueContainStringMatcher(metadataKeyAttemptChecker(2), + "executionStatus", + "RetryableFailure" + ), + MetadataWatchActor.JobKeyMetadataKeyAndValueContainStringMatcher(metadataKeyAttemptChecker(3), + "executionStatus", + "Failed" + ) + ) + val metadataWatcherActor = + TestActorRef[MetadataWatchActor](Props(MetadataWatchActor(metadataSuccessPromise, requiredMetadataMatchers: _*))) + val serviceRegistryActor = system.actorOf( + ServiceRegistryActor.props(ConfigFactory.load(), + overrides = Map(MetadataService.MetadataServiceName -> metadataWatcherActor.props) + ) ) - val metadataWatcherActor = TestActorRef[MetadataWatchActor](Props(MetadataWatchActor(metadataSuccessPromise, requiredMetadataMatchers: _*))) - val serviceRegistryActor = system.actorOf(ServiceRegistryActor.props(ConfigFactory.load(), overrides = Map(MetadataService.MetadataServiceName -> metadataWatcherActor.props))) val jobStoreActor = system.actorOf(AlwaysHappyJobStoreActor.props) val ioActor = system.actorOf(SimpleIoActor.props) val subWorkflowStoreActor = system.actorOf(AlwaysHappySubWorkflowStoreActor.props) - val jobRestartCheckTokenDispenserActor = system.actorOf(JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running")) - val jobExecutionTokenDispenserActor = system.actorOf(JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running")) + val jobRestartCheckTokenDispenserActor = + system.actorOf(JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running")) + val jobExecutionTokenDispenserActor = + system.actorOf(JobTokenDispenserActor.props(serviceRegistry, Rate(100, 1.second), None, "execution", "Running")) val MockBackendConfigEntry = BackendConfigurationEntry( name = "Mock", lifecycleActorFactoryClass = "cromwell.engine.backend.mock.RetryableBackendLifecycleActorFactory", @@ -76,18 +99,38 @@ class WorkflowExecutionActorSpec extends CromwellTestKitSpec with AnyFlatSpecLik CromwellBackends.initBackends(List(MockBackendConfigEntry)) val workflowId = WorkflowId.randomId() - val engineWorkflowDescriptor = createMaterializedEngineWorkflowDescriptor(workflowId, SampleWdl.HelloWorld.asWorkflowSources(runtime = runtimeSection)) + val engineWorkflowDescriptor = + createMaterializedEngineWorkflowDescriptor(workflowId, + SampleWdl.HelloWorld.asWorkflowSources(runtime = runtimeSection) + ) val callCacheReadActor = TestProbe() val callCacheWriteActor = TestProbe() val dockerHashActor = TestProbe() val weaSupervisor = TestProbe() val workflowExecutionActor = TestActorRef( - props = WorkflowExecutionActor.props(engineWorkflowDescriptor, ioActor, serviceRegistryActor, jobStoreActor, subWorkflowStoreActor, - callCacheReadActor.ref, callCacheWriteActor.ref, dockerHashActor.ref, jobRestartCheckTokenDispenserActor, jobExecutionTokenDispenserActor, MockBackendSingletonCollection, - AllBackendInitializationData.empty, startState = Submitted, rootConfig, new AtomicInteger(), fileHashCacheActor = None, blacklistCache = None), + props = WorkflowExecutionActor.props( + engineWorkflowDescriptor, + ioActor, + serviceRegistryActor, + jobStoreActor, + subWorkflowStoreActor, + callCacheReadActor.ref, + callCacheWriteActor.ref, + dockerHashActor.ref, + jobRestartCheckTokenDispenserActor, + jobExecutionTokenDispenserActor, + MockBackendSingletonCollection, + AllBackendInitializationData.empty, + startState = Submitted, + rootConfig, + new AtomicInteger(), + fileHashCacheActor = None, + blacklistCache = None + ), name = "WorkflowExecutionActor", - supervisor = weaSupervisor.ref) + supervisor = weaSupervisor.ref + ) EventFilter.info(pattern = "Starting wf_hello.hello", occurrences = 3).intercept { workflowExecutionActor ! ExecuteWorkflowCommand @@ -96,10 +139,12 @@ class WorkflowExecutionActorSpec extends CromwellTestKitSpec with AnyFlatSpecLik weaSupervisor.expectMsgClass(classOf[WorkflowExecutionFailedResponse]) // Super-helpful debug in case the metadata watcher is still unhappy: - if(metadataWatcherActor.underlyingActor.unsatisfiedMatchers.nonEmpty) { + if (metadataWatcherActor.underlyingActor.unsatisfiedMatchers.nonEmpty) { requiredMetadataMatchers foreach { matcher => matcher.nearMissInformation.foreach { info => - System.out.println("A matcher had a near miss (it might still get a matching value later!): " + info.replace("\n", "...")) + System.out.println( + "A matcher had a near miss (it might still get a matching value later!): " + info.replace("\n", "...") + ) } } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaBackendIsCopyingCachedOutputsSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaBackendIsCopyingCachedOutputsSpec.scala index 8171eefeb34..e05d5958c1a 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaBackendIsCopyingCachedOutputsSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaBackendIsCopyingCachedOutputsSpec.scala @@ -4,7 +4,12 @@ import cats.data.NonEmptyList import cromwell.core.callcaching._ import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.NextHit -import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CacheHit, CallCacheHashes, EJHAResponse, HashError} +import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{ + CacheHit, + CallCacheHashes, + EJHAResponse, + HashError +} import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec._ import cromwell.engine.workflow.lifecycle.execution.ejea.HasJobSuccessResponse.SuccessfulCallCacheHashes import cromwell.services.CallCaching.CallCachingEntryId @@ -15,9 +20,16 @@ import scala.concurrent.duration._ import scala.util.control.NoStackTrace import scala.util.{Failure, Success, Try} -class EjeaBackendIsCopyingCachedOutputsSpec extends EngineJobExecutionActorSpec with HasJobSuccessResponse with HasCopyFailureResponses with HasJobFailureResponses with CanExpectJobStoreWrites with CanExpectCacheWrites with CanExpectCacheInvalidation { +class EjeaBackendIsCopyingCachedOutputsSpec + extends EngineJobExecutionActorSpec + with HasJobSuccessResponse + with HasCopyFailureResponses + with HasJobFailureResponses + with CanExpectJobStoreWrites + with CanExpectCacheWrites + with CanExpectCacheInvalidation { - override implicit val stateUnderTest = BackendIsCopyingCachedOutputs + implicit override val stateUnderTest = BackendIsCopyingCachedOutputs "An EJEA in BackendIsCopyingCachedOutputs state" should { @@ -32,179 +44,231 @@ class EjeaBackendIsCopyingCachedOutputsSpec extends EngineJobExecutionActorSpec initialHashData: Option[Try[CallCacheHashes]], ejhaResponse: Option[EJHAResponse], expectedFinalHashData: Option[Try[CallCacheHashes]], - validForMode: CallCachingMode => Boolean) { - def hashingCompletedSuccessfully = ejhaResponse == hashResultsEjhaResponse || initialHashData == hashResultsDataValue + validForMode: CallCachingMode => Boolean + ) { + def hashingCompletedSuccessfully = + ejhaResponse == hashResultsEjhaResponse || initialHashData == hashResultsDataValue } val initialHashDataAndEjhaResponseCombinations = List( // The mode will NEVER have "writeToCache" in these scenarios: InitialHashDataAndEjhaResponseCombination("no hashes (ever)", None, None, None, mode => !mode.writeToCache), - InitialHashDataAndEjhaResponseCombination("unsuccessful hashes in initial data", hashErrorDataValue, None, hashErrorDataValue, mode => !mode.writeToCache), + InitialHashDataAndEjhaResponseCombination("unsuccessful hashes in initial data", + hashErrorDataValue, + None, + hashErrorDataValue, + mode => !mode.writeToCache + ), // The mode will ALWAYS have "writeToCache" in these scenarios: - InitialHashDataAndEjhaResponseCombination("unsuccessful hashes from EJHA", None, hashErrorEjhaResponse, hashErrorDataValue, mode => mode.writeToCache), - InitialHashDataAndEjhaResponseCombination("successful hashes from EJHA", None, hashResultsEjhaResponse, hashResultsDataValue, mode => mode.writeToCache), - InitialHashDataAndEjhaResponseCombination("successful hashes in initial data", hashResultsDataValue, None, hashResultsDataValue, mode => mode.writeToCache) + InitialHashDataAndEjhaResponseCombination("unsuccessful hashes from EJHA", + None, + hashErrorEjhaResponse, + hashErrorDataValue, + mode => mode.writeToCache + ), + InitialHashDataAndEjhaResponseCombination("successful hashes from EJHA", + None, + hashResultsEjhaResponse, + hashResultsDataValue, + mode => mode.writeToCache + ), + InitialHashDataAndEjhaResponseCombination("successful hashes in initial data", + hashResultsDataValue, + None, + hashResultsDataValue, + mode => mode.writeToCache + ) ) CallCachingModes.filter(_.readFromCache) foreach { mode => - initialHashDataAndEjhaResponseCombinations filter { _.validForMode(mode) } foreach { case combo @ InitialHashDataAndEjhaResponseCombination(hashComboName, initialHashData, ejhaResponse, finalHashData, _) => - - val cacheUpdateRequired = combo.hashingCompletedSuccessfully && mode.writeToCache - val nextStepName = if (cacheUpdateRequired) "Update call cache" else "Update job store" - s"$nextStepName when call caching is $mode, the EJEA has $hashComboName and then gets a success result" in { - ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, mode) - // Send the response from the EJHA (if there was one!): - ejhaResponse foreach { ejea ! _ } - - // Nothing should happen here: - helper.jobStoreProbe.expectNoMessage(awaitAlmostNothing) - - // Send the response from the copying actor - ejea ! successResponse - - if (cacheUpdateRequired) { - expectCacheWrite(successResponse, finalHashData.get.get) - } else { - expectJobStoreWrite(SucceededResponseData(successResponse, finalHashData)) - } - // A separate check of the final effective call caching mode: - if (ejhaResponse == hashErrorEjhaResponse || initialHashData == hashErrorDataValue) { - ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode.withoutWrite) - } else { - ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode) - } - } - - s"$nextStepName when it gets a success result and it then gets $hashComboName, if call caching is $mode" in { - ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, mode) - // Send the response from the copying actor - ejea ! successResponse + initialHashDataAndEjhaResponseCombinations filter { _.validForMode(mode) } foreach { + case combo @ InitialHashDataAndEjhaResponseCombination(hashComboName, + initialHashData, + ejhaResponse, + finalHashData, + _ + ) => + val cacheUpdateRequired = combo.hashingCompletedSuccessfully && mode.writeToCache + val nextStepName = if (cacheUpdateRequired) "Update call cache" else "Update job store" + s"$nextStepName when call caching is $mode, the EJEA has $hashComboName and then gets a success result" in { + ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, mode) + // Send the response from the EJHA (if there was one!): + ejhaResponse foreach { ejea ! _ } - ejhaResponse foreach { resp => - // Nothing should have happened yet: + // Nothing should happen here: helper.jobStoreProbe.expectNoMessage(awaitAlmostNothing) - // Ok, now send the response from the EJHA (if there was one!): - ejea ! resp - } + // Send the response from the copying actor + ejea ! successResponse - if (cacheUpdateRequired) { - expectCacheWrite(successResponse, finalHashData.get.get) - } else { - expectJobStoreWrite(SucceededResponseData(successResponse, finalHashData)) - } - // A separate check of the final effective call caching mode: - if (ejhaResponse == hashErrorEjhaResponse || initialHashData == hashErrorDataValue) { - ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode.withoutWrite) - } else { - ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode) + if (cacheUpdateRequired) { + expectCacheWrite(successResponse, finalHashData.get.get) + } else { + expectJobStoreWrite(SucceededResponseData(successResponse, finalHashData)) + } + // A separate check of the final effective call caching mode: + if (ejhaResponse == hashErrorEjhaResponse || initialHashData == hashErrorDataValue) { + ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode.withoutWrite) + } else { + ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode) + } } - } - - if (mode.readFromCache) { - s"invalidate a call for caching if backend copying failed when it was going to receive $hashComboName, if call caching is $mode" in { + s"$nextStepName when it gets a success result and it then gets $hashComboName, if call caching is $mode" in { ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, mode) - - ejea.stateData should be(ResponsePendingData( - helper.backendJobDescriptor, - helper.bjeaProps, - initialHashData, - Option(helper.ejhaProbe.ref), - cacheHit, - None, - cacheHitFailureCount = 0, - failedCopyAttempts = 0 - )) - // Send the response from the copying actor - ejea ! copyAttemptFailedResponse(cacheHitNumber) + ejea ! successResponse - expectInvalidateCallCacheActor(cacheId) - eventually { - ejea.stateName should be(InvalidatingCacheEntry) - } - ejea.stateData should be(ResponsePendingData( - helper.backendJobDescriptor, - helper.bjeaProps, - initialHashData, - Option(helper.ejhaProbe.ref), - cacheHit, - None, - cacheHitFailureCount = 1, - failedCopyAttempts = 1 - )) - } + ejhaResponse foreach { resp => + // Nothing should have happened yet: + helper.jobStoreProbe.expectNoMessage(awaitAlmostNothing) - s"not invalidate a call for caching if backend copying failed when invalidation is disabled, when it was going to receive $hashComboName, if call caching is $mode" in { - val invalidationDisabledOptions = CallCachingOptions(invalidateBadCacheResults = false, workflowOptionCallCachePrefixes = None) - val cacheInvalidationDisabledMode = mode match { - case CallCachingActivity(rw, _) => CallCachingActivity(rw, invalidationDisabledOptions) - case _ => fail(s"Mode $mode not appropriate for cache invalidation tests") + // Ok, now send the response from the EJHA (if there was one!): + ejea ! resp } - ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, cacheInvalidationDisabledMode) - // Send the response from the copying actor - ejea ! copyAttemptFailedResponse(cacheHitNumber) - - helper.ejhaProbe.expectMsg(NextHit) - eventually { - ejea.stateName should be(CheckingCallCache) + if (cacheUpdateRequired) { + expectCacheWrite(successResponse, finalHashData.get.get) + } else { + expectJobStoreWrite(SucceededResponseData(successResponse, finalHashData)) + } + // A separate check of the final effective call caching mode: + if (ejhaResponse == hashErrorEjhaResponse || initialHashData == hashErrorDataValue) { + ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode.withoutWrite) + } else { + ejea.underlyingActor.checkEffectiveCallCachingMode should be(mode) } - // Make sure we didn't start invalidating anything: - helper.invalidateCacheActorCreations.hasExactlyOne should be(false) - ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, initialHashData, Option(helper.ejhaProbe.ref), cacheHit, cacheHitFailureCount = 1, failedCopyAttempts = 1)) } - def checkInvalidateOnCopyFailure(expectMetric: Boolean) = { - val metricsExpectationString = (if (expectMetric) "and " else "but not ") + "generate a metric" - s"invalidate a call for caching $metricsExpectationString if backend copying failed (preserving any received hashes) when call caching is $mode, the EJEA has $hashComboName and then gets a success result" in { + if (mode.readFromCache) { + s"invalidate a call for caching if backend copying failed when it was going to receive $hashComboName, if call caching is $mode" in { + ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, mode) - // Send the response from the EJHA (if there was one!): - ejhaResponse foreach { - ejea ! _ - } - // Nothing should happen here: - helper.jobStoreProbe.expectNoMessage(awaitAlmostNothing) + ejea.stateData should be( + ResponsePendingData( + helper.backendJobDescriptor, + helper.bjeaProps, + initialHashData, + Option(helper.ejhaProbe.ref), + cacheHit, + None, + cacheHitFailureCount = 0, + failedCopyAttempts = 0 + ) + ) // Send the response from the copying actor - val copyFailureMessage = if (expectMetric) cacheHitBlacklistedResponse(cacheHitNumber) else copyAttemptFailedResponse(cacheHitNumber) - ejea ! copyFailureMessage + ejea ! copyAttemptFailedResponse(cacheHitNumber) expectInvalidateCallCacheActor(cacheId) eventually { ejea.stateName should be(InvalidatingCacheEntry) } - ejea.stateData should be(ResponsePendingData( - helper.backendJobDescriptor, - helper.bjeaProps, - finalHashData, - Option(helper.ejhaProbe.ref), - cacheHit, - None, - cacheHitFailureCount = 1, - failedCopyAttempts = if (expectMetric) 0 else 1 - // In this context `expectMetric` means "was blacklisted". - // If the cache hit was blacklisted there should have been no additional copy attempt so no failed copy attempt. - )) - - if (expectMetric) { - helper.serviceRegistryProbe.fishForSpecificMessage(2.seconds) { - case InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(prefix, path))) => - prefix should be(List.empty[String]) - path should be(NonEmptyList.of( - "job", - "callcaching", "read", "error", "bucketblacklisted", helper.taskName, helper.backendWorkflowDescriptor.hogGroup.value - )) + ejea.stateData should be( + ResponsePendingData( + helper.backendJobDescriptor, + helper.bjeaProps, + initialHashData, + Option(helper.ejhaProbe.ref), + cacheHit, + None, + cacheHitFailureCount = 1, + failedCopyAttempts = 1 + ) + ) + } + + s"not invalidate a call for caching if backend copying failed when invalidation is disabled, when it was going to receive $hashComboName, if call caching is $mode" in { + val invalidationDisabledOptions = + CallCachingOptions(invalidateBadCacheResults = false, workflowOptionCallCachePrefixes = None) + val cacheInvalidationDisabledMode = mode match { + case CallCachingActivity(rw, _) => CallCachingActivity(rw, invalidationDisabledOptions) + case _ => fail(s"Mode $mode not appropriate for cache invalidation tests") + } + ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, cacheInvalidationDisabledMode) + // Send the response from the copying actor + ejea ! copyAttemptFailedResponse(cacheHitNumber) + + helper.ejhaProbe.expectMsg(NextHit) + + eventually { + ejea.stateName should be(CheckingCallCache) + } + // Make sure we didn't start invalidating anything: + helper.invalidateCacheActorCreations.hasExactlyOne should be(false) + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, + helper.bjeaProps, + initialHashData, + Option(helper.ejhaProbe.ref), + cacheHit, + cacheHitFailureCount = 1, + failedCopyAttempts = 1 + ) + ) + } + + def checkInvalidateOnCopyFailure(expectMetric: Boolean) = { + val metricsExpectationString = (if (expectMetric) "and " else "but not ") + "generate a metric" + s"invalidate a call for caching $metricsExpectationString if backend copying failed (preserving any received hashes) when call caching is $mode, the EJEA has $hashComboName and then gets a success result" in { + ejea = ejeaInBackendIsCopyingCachedOutputsState(initialHashData, mode) + // Send the response from the EJHA (if there was one!): + ejhaResponse foreach { + ejea ! _ + } + + // Nothing should happen here: + helper.jobStoreProbe.expectNoMessage(awaitAlmostNothing) + + // Send the response from the copying actor + val copyFailureMessage = + if (expectMetric) cacheHitBlacklistedResponse(cacheHitNumber) + else copyAttemptFailedResponse(cacheHitNumber) + ejea ! copyFailureMessage + + expectInvalidateCallCacheActor(cacheId) + eventually { + ejea.stateName should be(InvalidatingCacheEntry) + } + ejea.stateData should be( + ResponsePendingData( + helper.backendJobDescriptor, + helper.bjeaProps, + finalHashData, + Option(helper.ejhaProbe.ref), + cacheHit, + None, + cacheHitFailureCount = 1, + failedCopyAttempts = if (expectMetric) 0 else 1 + // In this context `expectMetric` means "was blacklisted". + // If the cache hit was blacklisted there should have been no additional copy attempt so no failed copy attempt. + ) + ) + + if (expectMetric) { + helper.serviceRegistryProbe.fishForSpecificMessage(2.seconds) { + case InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(prefix, path))) => + prefix should be(List.empty[String]) + path should be( + NonEmptyList.of( + "job", + "callcaching", + "read", + "error", + "bucketblacklisted", + helper.taskName, + helper.backendWorkflowDescriptor.hogGroup.value + ) + ) + } } } } - } - checkInvalidateOnCopyFailure(expectMetric = false) - checkInvalidateOnCopyFailure(expectMetric = true) - } + checkInvalidateOnCopyFailure(expectMetric = false) + checkInvalidateOnCopyFailure(expectMetric = true) + } } } } @@ -212,10 +276,13 @@ class EjeaBackendIsCopyingCachedOutputsSpec extends EngineJobExecutionActorSpec private val cacheId: CallCachingEntryId = CallCachingEntryId(74) private val cacheHitNumber = 3 private val cacheHit = Option(EJEACacheHit(CacheHit(cacheId), hitNumber = cacheHitNumber, details = None)) - def standardResponsePendingData(hashes: Option[Try[CallCacheHashes]]) = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, hashes, Option(helper.ejhaProbe.ref), cacheHit) - def ejeaInBackendIsCopyingCachedOutputsState(initialHashes: Option[Try[CallCacheHashes]], callCachingMode: CallCachingMode, restarting: Boolean = false) = { + def standardResponsePendingData(hashes: Option[Try[CallCacheHashes]]) = + ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, hashes, Option(helper.ejhaProbe.ref), cacheHit) + def ejeaInBackendIsCopyingCachedOutputsState(initialHashes: Option[Try[CallCacheHashes]], + callCachingMode: CallCachingMode, + restarting: Boolean = false + ) = helper .buildEJEA(restarting = restarting, callCachingMode = callCachingMode) .setStateInline(data = standardResponsePendingData(initialHashes)) - } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCacheEntryExistenceSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCacheEntryExistenceSpec.scala index 922c3b0f90a..af208492ce1 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCacheEntryExistenceSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCacheEntryExistenceSpec.scala @@ -13,20 +13,23 @@ import scala.util.control.NoStackTrace class EjeaCheckingCacheEntryExistenceSpec extends EngineJobExecutionActorSpec { - override implicit val stateUnderTest = CheckingJobStore + implicit override val stateUnderTest = CheckingJobStore "An EJEA in EjeaCheckingCacheEntryExistence state should" should { "re-use the results from the cache hit" in { createCheckingCacheEntryExistenceEjea() - ejea ! CallCachingJoin(CallCachingEntry(helper.workflowId.toString, helper.jobFqn, 0, None, None, allowResultReuse = true), + ejea ! CallCachingJoin( + CallCachingEntry(helper.workflowId.toString, helper.jobFqn, 0, None, None, allowResultReuse = true), List(CallCachingHashEntry("runtime attribute: docker", "HASHVALUE")), None, List.empty, List.empty ) - helper.serviceRegistryProbe.expectMsgPF(awaitTimeout) { - case put: PutMetadataAction => put.events.find(_.key.key.endsWith("runtime attribute:docker"))flatMap(_.value.map(_.value)) shouldBe Option("HASHVALUE") + helper.serviceRegistryProbe.expectMsgPF(awaitTimeout) { case put: PutMetadataAction => + put.events.find(_.key.key.endsWith("runtime attribute:docker")) flatMap (_.value.map(_.value)) shouldBe Option( + "HASHVALUE" + ) } helper.jobStoreProbe.expectMsgClass(classOf[RegisterJobCompleted]) ejea.stateName should be(UpdatingJobStore) @@ -49,7 +52,6 @@ class EjeaCheckingCacheEntryExistenceSpec extends EngineJobExecutionActorSpec { } } - private def createCheckingCacheEntryExistenceEjea(): Unit = { + private def createCheckingCacheEntryExistenceEjea(): Unit = ejea = helper.buildEJEA().setStateInline(state = CheckingCacheEntryExistence, data = NoData) - } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCallCacheSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCallCacheSpec.scala index 6ba6141daeb..a915e62b388 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCallCacheSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingCallCacheSpec.scala @@ -1,7 +1,12 @@ package cromwell.engine.workflow.lifecycle.execution.ejea import cromwell.core.callcaching.{CallCachingActivity, CallCachingOff, ReadCache} -import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{CheckingCallCache, FetchingCachedOutputsFromDatabase, ResponsePendingData, RunningJob} +import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{ + CheckingCallCache, + FetchingCachedOutputsFromDatabase, + ResponsePendingData, + RunningJob +} import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CacheHit, CacheMiss, HashError} import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec.EnhancedTestEJEA import cromwell.services.CallCaching.CallCachingEntryId @@ -11,7 +16,7 @@ import scala.util.control.NoStackTrace class EjeaCheckingCallCacheSpec extends EngineJobExecutionActorSpec with Eventually with CanExpectFetchCachedResults { - override implicit val stateUnderTest = CheckingCallCache + implicit override val stateUnderTest = CheckingCallCache "An EJEA in CheckingCallCache mode" should { "Try to fetch the call cache outputs if it gets a CacheHit" in { @@ -21,29 +26,32 @@ class EjeaCheckingCallCacheSpec extends EngineJobExecutionActorSpec with Eventua ejea.stateName should be(FetchingCachedOutputsFromDatabase) } - RestartOrExecuteCommandTuples foreach { case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => - s"$operationName the job if it receives a cache miss and restarting is $restarting" in { - createCheckingCallCacheEjea(restarting) - ejea ! CacheMiss - helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) - ejea.stateName should be(RunningJob) - } - - s"Disabling call caching and $operationName the job if it receives a HashError and restarting is $restarting" in { - createCheckingCallCacheEjea(restarting) - ejea ! HashError(new Exception("Anticipated exception. Part of test-flow") with NoStackTrace) - eventually { - ejea.underlyingActor.checkEffectiveCallCachingMode should be(CallCachingOff) + RestartOrExecuteCommandTuples foreach { + case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => + s"$operationName the job if it receives a cache miss and restarting is $restarting" in { + createCheckingCallCacheEjea(restarting) + ejea ! CacheMiss + helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) + ejea.stateName should be(RunningJob) + } + + s"Disabling call caching and $operationName the job if it receives a HashError and restarting is $restarting" in { + createCheckingCallCacheEjea(restarting) + ejea ! HashError(new Exception("Anticipated exception. Part of test-flow") with NoStackTrace) + eventually { + ejea.underlyingActor.checkEffectiveCallCachingMode should be(CallCachingOff) + } + helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) + ejea.stateName should be(RunningJob) } - helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) - ejea.stateName should be(RunningJob) - } } } private def createCheckingCallCacheEjea(restarting: Boolean = false): Unit = { ejea = helper.buildEJEA(restarting = restarting, callCachingMode = CallCachingActivity(ReadCache)) - ejea.setStateInline(state = CheckingCallCache, data = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None)) + ejea.setStateInline(state = CheckingCallCache, + data = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None) + ) () } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingJobStoreSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingJobStoreSpec.scala index e1f1a244b38..a9d3fdd8727 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingJobStoreSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaCheckingJobStoreSpec.scala @@ -1,16 +1,24 @@ package cromwell.engine.workflow.lifecycle.execution.ejea -import cromwell.backend.BackendJobExecutionActor.{JobFailedNonRetryableResponse, JobFailedRetryableResponse, JobSucceededResponse} +import cromwell.backend.BackendJobExecutionActor.{ + JobFailedNonRetryableResponse, + JobFailedRetryableResponse, + JobSucceededResponse +} import cromwell.core.CallOutputs import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadActor.CallCacheEntryForCall import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec.EnhancedTestEJEA -import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{CheckingCacheEntryExistence, CheckingJobStore, NoData} +import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{ + CheckingCacheEntryExistence, + CheckingJobStore, + NoData +} import cromwell.jobstore.JobStoreActor.{JobComplete, JobNotComplete} import cromwell.jobstore.{JobResultFailure, JobResultSuccess} class EjeaCheckingJobStoreSpec extends EngineJobExecutionActorSpec { - override implicit val stateUnderTest = CheckingJobStore + implicit override val stateUnderTest = CheckingJobStore "An EJEA in CheckingJobStore state should" should { "send a Job SucceededResponse if the job is already complete and successful" in { @@ -21,17 +29,15 @@ class EjeaCheckingJobStoreSpec extends EngineJobExecutionActorSpec { ejea ! JobComplete(JobResultSuccess(returnCode, jobOutputs)) - helper.replyToProbe.expectMsgPF(awaitTimeout) { - case response: JobSucceededResponse => - response.returnCode shouldBe returnCode - response.jobOutputs shouldBe jobOutputs + helper.replyToProbe.expectMsgPF(awaitTimeout) { case response: JobSucceededResponse => + response.returnCode shouldBe returnCode + response.jobOutputs shouldBe jobOutputs } helper.deathwatch.expectTerminated(ejea) } List(("FailedNonRetryableResponse", false), ("FailedRetryableResponse", true)) foreach { case (name, retryable) => - s"send a $name if the job is already complete and failed" in { createCheckingJobStoreEjea() val returnCode: Option[Int] = Option(1) @@ -59,12 +65,16 @@ class EjeaCheckingJobStoreSpec extends EngineJobExecutionActorSpec { ejea.setState(CheckingJobStore) ejea ! JobNotComplete - helper.callCacheReadActorProbe.expectMsg(awaitTimeout, "expecting CallCacheEntryForCall", CallCacheEntryForCall(helper.workflowId, helper.jobDescriptorKey)) + helper.callCacheReadActorProbe.expectMsg(awaitTimeout, + "expecting CallCacheEntryForCall", + CallCacheEntryForCall(helper.workflowId, helper.jobDescriptorKey) + ) ejea.stateName should be(CheckingCacheEntryExistence) ejea.stop() } } - private def createCheckingJobStoreEjea(): Unit = { ejea = helper.buildEJEA(restarting = true).setStateInline(state = CheckingJobStore, data = NoData) } + private def createCheckingJobStoreEjea(): Unit = ejea = + helper.buildEJEA(restarting = true).setStateInline(state = CheckingJobStore, data = NoData) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaFetchingCachedOutputsFromDatabaseSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaFetchingCachedOutputsFromDatabaseSpec.scala index fab5cfa0b65..bc3f56dfe66 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaFetchingCachedOutputsFromDatabaseSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaFetchingCachedOutputsFromDatabaseSpec.scala @@ -6,7 +6,10 @@ import cromwell.core.callcaching.{CallCachingActivity, ReadAndWriteCache} import cromwell.core.simpleton.WomValueSimpleton import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CacheHit, HashError} -import cromwell.engine.workflow.lifecycle.execution.callcaching.FetchCachedResultsActor.{CachedOutputLookupFailed, CachedOutputLookupSucceeded} +import cromwell.engine.workflow.lifecycle.execution.callcaching.FetchCachedResultsActor.{ + CachedOutputLookupFailed, + CachedOutputLookupSucceeded +} import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec._ import cromwell.engine.workflow.lifecycle.execution.ejea.HasJobSuccessResponse.SuccessfulCallCacheHashes import wom.values.WomString @@ -21,10 +24,10 @@ class EjeaFetchingCachedOutputsFromDatabaseSpec extends EngineJobExecutionActorS "An EJEA in FetchingCachedOutputsFromDatabase state" should { - val possibleEjhaResponses = List( - ("no hashes", None), - ("hash results", Some(SuccessfulCallCacheHashes)), - ("hash error", Some(HashError(new Exception("blah"))))) + val possibleEjhaResponses = List(("no hashes", None), + ("hash results", Some(SuccessfulCallCacheHashes)), + ("hash error", Some(HashError(new Exception("blah")))) + ) possibleEjhaResponses foreach { case (name, ejhaResponse) => s"Correctly receive $name then begin the backend-specific output copying actor when it gets a result-fetch success" in { @@ -35,12 +38,20 @@ class EjeaFetchingCachedOutputsFromDatabaseSpec extends EngineJobExecutionActorS } // Send the response from the "Fetch" actor - val cachedSimpletons = Seq(WomValueSimpleton("a", WomString("hullo")), WomValueSimpleton("b", WomString("cheerio"))) + val cachedSimpletons = + Seq(WomValueSimpleton("a", WomString("hullo")), WomValueSimpleton("b", WomString("cheerio"))) val detritusMap = Map("stdout" -> "//somePath") val cachedReturnCode = Some(17) val sourceCacheDetails = s"${WorkflowId.randomId()}:call-someTask:1" - ejea ! CachedOutputLookupSucceeded(cachedSimpletons, detritusMap, cachedReturnCode, callCachingEntryId, sourceCacheDetails) - helper.callCacheHitCopyingProbe.expectMsg(CopyOutputsCommand(cachedSimpletons, detritusMap, callCachingEntryId, cachedReturnCode)) + ejea ! CachedOutputLookupSucceeded(cachedSimpletons, + detritusMap, + cachedReturnCode, + callCachingEntryId, + sourceCacheDetails + ) + helper.callCacheHitCopyingProbe.expectMsg( + CopyOutputsCommand(cachedSimpletons, detritusMap, callCachingEntryId, cachedReturnCode) + ) // Check we end up in the right state: ejea.stateName should be(BackendIsCopyingCachedOutputs) @@ -60,40 +71,49 @@ class EjeaFetchingCachedOutputsFromDatabaseSpec extends EngineJobExecutionActorS ejea.stateData.asInstanceOf[ResponsePendingData] shouldEqualFieldwise expectedData } - RestartOrExecuteCommandTuples foreach { case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => - // Send the response from the "Fetch" actor - val failureReason = new Exception("You can't handle the truth!") - val lookupFailedMsg = CachedOutputLookupFailed(CallCachingEntryId(90210), failureReason) - - s"Correctly receive $name then $operationName the job when it gets a ${lookupFailedMsg.getClass.getSimpleName} result-fetch failure" in { - ejea = ejeaInFetchingCachedOutputsFromDatabaseState(restarting) - // Send the response from the EJHA (if there was one!): - ejhaResponse foreach { - ejea ! _ + RestartOrExecuteCommandTuples foreach { + case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => + // Send the response from the "Fetch" actor + val failureReason = new Exception("You can't handle the truth!") + val lookupFailedMsg = CachedOutputLookupFailed(CallCachingEntryId(90210), failureReason) + + s"Correctly receive $name then $operationName the job when it gets a ${lookupFailedMsg.getClass.getSimpleName} result-fetch failure" in { + ejea = ejeaInFetchingCachedOutputsFromDatabaseState(restarting) + // Send the response from the EJHA (if there was one!): + ejhaResponse foreach { + ejea ! _ + } + + ejea ! lookupFailedMsg + + helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) + + // Check we end up in the right state: + ejea.stateName should be(RunningJob) + // Check we end up with the right data: + val expectedData = initialData.copy( + hashes = ejhaResponse map { + case SuccessfulCallCacheHashes => Success(SuccessfulCallCacheHashes) + case HashError(t) => Failure(t) + case _ => fail(s"Bad test wiring. We didn't expect $ejhaResponse here") + }, + backendJobActor = Option(helper.bjeaProbe.ref) + ) + ejea.stateData should be(expectedData) } - - ejea ! lookupFailedMsg - - helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) - - // Check we end up in the right state: - ejea.stateName should be(RunningJob) - // Check we end up with the right data: - val expectedData = initialData.copy( - hashes = ejhaResponse map { - case SuccessfulCallCacheHashes => Success(SuccessfulCallCacheHashes) - case HashError(t) => Failure(t) - case _ => fail(s"Bad test wiring. We didn't expect $ejhaResponse here") - }, - backendJobActor = Option(helper.bjeaProbe.ref) - ) - ejea.stateData should be(expectedData) - } } } } val callCachingEntryId = CallCachingEntryId(75) - def initialData = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, None, Some(EJEACacheHit(CacheHit(callCachingEntryId), 2, None)), None) - def ejeaInFetchingCachedOutputsFromDatabaseState(restarting: Boolean = false) = helper.buildEJEA(restarting = restarting, callCachingMode = CallCachingActivity(ReadAndWriteCache)).setStateInline(data = initialData) + def initialData = ResponsePendingData(helper.backendJobDescriptor, + helper.bjeaProps, + None, + None, + Some(EJEACacheHit(CacheHit(callCachingEntryId), 2, None)), + None + ) + def ejeaInFetchingCachedOutputsFromDatabaseState(restarting: Boolean = false) = helper + .buildEJEA(restarting = restarting, callCachingMode = CallCachingActivity(ReadAndWriteCache)) + .setStateInline(data = initialData) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaInvalidatingCacheEntrySpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaInvalidatingCacheEntrySpec.scala index bc860b8a7cc..f52e74017b4 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaInvalidatingCacheEntrySpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaInvalidatingCacheEntrySpec.scala @@ -4,13 +4,16 @@ import akka.actor.ActorRef import cromwell.core.callcaching.{CallCachingActivity, ReadCache} import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor.NextHit -import cromwell.engine.workflow.lifecycle.execution.callcaching.{CallCacheInvalidatedFailure, CallCacheInvalidatedSuccess} +import cromwell.engine.workflow.lifecycle.execution.callcaching.{ + CallCacheInvalidatedFailure, + CallCacheInvalidatedSuccess +} import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec._ import cromwell.services.CallCaching.CallCachingEntryId class EjeaInvalidatingCacheEntrySpec extends EngineJobExecutionActorSpec { - override implicit val stateUnderTest = InvalidatingCacheEntry + implicit override val stateUnderTest = InvalidatingCacheEntry "An EJEA in InvalidatingCacheEntry state" should { @@ -27,24 +30,38 @@ class EjeaInvalidatingCacheEntrySpec extends EngineJobExecutionActorSpec { helper.bjeaProbe.expectNoMessage(awaitAlmostNothing) helper.ejhaProbe.expectMsg(NextHit) - eventually { ejea.stateName should be(CheckingCallCache) } - ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, Option(helper.ejhaProbe.ref), None)) + eventually(ejea.stateName should be(CheckingCallCache)) + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, Option(helper.ejhaProbe.ref), None) + ) } - RestartOrExecuteCommandTuples foreach { case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => - s"$operationName a job if there is no ejha when invalidate response is $invalidateActorResponse" in { - ejea = ejeaInvalidatingCacheEntryState(None, restarting = restarting) - // Send the response from the invalidate actor - ejea ! invalidateActorResponse + RestartOrExecuteCommandTuples foreach { + case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => + s"$operationName a job if there is no ejha when invalidate response is $invalidateActorResponse" in { + ejea = ejeaInvalidatingCacheEntryState(None, restarting = restarting) + // Send the response from the invalidate actor + ejea ! invalidateActorResponse - helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) - eventually { ejea.stateName should be(RunningJob) } - ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, None, None, Option(helper.bjeaProbe.ref))) - } + helper.bjeaProbe.expectMsg(awaitTimeout, expectedMessage) + eventually(ejea.stateName should be(RunningJob)) + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, + helper.bjeaProps, + None, + None, + None, + Option(helper.bjeaProbe.ref) + ) + ) + } } } } - def standardResponsePendingData(ejha: Option[ActorRef]) = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, ejha, None, None) - def ejeaInvalidatingCacheEntryState(ejha: Option[ActorRef], restarting: Boolean = false) = helper.buildEJEA(restarting = restarting, callCachingMode = CallCachingActivity(ReadCache)).setStateInline(state = InvalidatingCacheEntry, data = standardResponsePendingData(ejha)) + def standardResponsePendingData(ejha: Option[ActorRef]) = + ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, ejha, None, None) + def ejeaInvalidatingCacheEntryState(ejha: Option[ActorRef], restarting: Boolean = false) = helper + .buildEJEA(restarting = restarting, callCachingMode = CallCachingActivity(ReadCache)) + .setStateInline(state = InvalidatingCacheEntry, data = standardResponsePendingData(ejha)) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala index cd13165370a..f19bd3b42e2 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala @@ -16,14 +16,14 @@ import EjeaMultipleCallCacheCopyAttemptsSpec.bt140Debug import akka.testkit.TestFSMRef class EjeaMultipleCallCacheCopyAttemptsSpec - extends EngineJobExecutionActorSpec + extends EngineJobExecutionActorSpec with HasJobSuccessResponse with HasCopyFailureResponses with HasJobFailureResponses with CanExpectJobStoreWrites with CanExpectFetchCachedResults { - override implicit val stateUnderTest: EngineJobExecutionActorState = BackendIsCopyingCachedOutputs + implicit override val stateUnderTest: EngineJobExecutionActorState = BackendIsCopyingCachedOutputs override val allowMultipleCacheCycles: Boolean = true "An EJEA attempting to call cache copy" should { @@ -45,19 +45,29 @@ class EjeaMultipleCallCacheCopyAttemptsSpec def fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts(copyAttemptNumber: Int) = { val callCachingEntryId = cacheEntryIdFromCopyAttempt(copyAttemptNumber) - val cachedSimpletons = Seq(WomValueSimpleton("a", WomString("hullo")), WomValueSimpleton("b", WomString("cheerio"))) + val cachedSimpletons = + Seq(WomValueSimpleton("a", WomString("hullo")), WomValueSimpleton("b", WomString("cheerio"))) val detritusMap = Map("stdout" -> "//somePath") val cachedReturnCode = Some(17) val sourceCacheDetails = s"${WorkflowId.randomId()}:call-someTask:1" - ejea ! CachedOutputLookupSucceeded(cachedSimpletons, detritusMap, cachedReturnCode, callCachingEntryId, sourceCacheDetails) - helper.callCacheHitCopyingProbe.expectMsg(CopyOutputsCommand(cachedSimpletons, detritusMap, callCachingEntryId, cachedReturnCode)) + ejea ! CachedOutputLookupSucceeded(cachedSimpletons, + detritusMap, + cachedReturnCode, + callCachingEntryId, + sourceCacheDetails + ) + helper.callCacheHitCopyingProbe.expectMsg( + CopyOutputsCommand(cachedSimpletons, detritusMap, callCachingEntryId, cachedReturnCode) + ) eventually { ejea.stateName should be(BackendIsCopyingCachedOutputs) } } def copyAttemptFailsAndEjeaLooksForNextHit(becauseBlacklisted: Boolean, copyAttemptNumber: Int) = { - val response = if (becauseBlacklisted) cacheHitBlacklistedResponse(copyAttemptNumber) else copyAttemptFailedResponse(copyAttemptNumber) + val response = + if (becauseBlacklisted) cacheHitBlacklistedResponse(copyAttemptNumber) + else copyAttemptFailedResponse(copyAttemptNumber) ejea ! response helper.ejhaProbe.expectMsg(NextHit) @@ -95,14 +105,15 @@ class EjeaMultipleCallCacheCopyAttemptsSpec // First: A long series of copy failure: bt140Debug("'keep waiting' running 'series of copy failures'") val initialCopyFailures = maxFailedCopyAttempts - 1 - 0.until(initialCopyFailures). foreach { currentCopyAttemptNumber => + 0.until(initialCopyFailures).foreach { currentCopyAttemptNumber => ejhaSendsHitIdToEjeaAndEjeaReacts(currentCopyAttemptNumber) fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts(currentCopyAttemptNumber) copyAttemptFailsAndEjeaLooksForNextHit(becauseBlacklisted = false, currentCopyAttemptNumber) } // Then: A success: - val currentCopyAttemptNumber = initialCopyFailures // because the initial 0.until(...) is non-inclusive of the argument + val currentCopyAttemptNumber = + initialCopyFailures // because the initial 0.until(...) is non-inclusive of the argument bt140Debug("'keep waiting' running 'ejhaSendsHitIdToEjeaAndEjeaReacts'") ejhaSendsHitIdToEjeaAndEjeaReacts(copyAttemptNumber = currentCopyAttemptNumber) bt140Debug("'keep waiting' running 'fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts'") @@ -120,7 +131,7 @@ class EjeaMultipleCallCacheCopyAttemptsSpec // First: A long series of copy failure: bt140Debug("'fail fast' running 'longer series of (genuine) copy failures'") val initialCopyFailures = maxFailedCopyAttempts - 1 - 0.until(initialCopyFailures). foreach { currentCopyAttemptNumber => + 0.until(initialCopyFailures).foreach { currentCopyAttemptNumber => ejhaSendsHitIdToEjeaAndEjeaReacts(currentCopyAttemptNumber) fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts(currentCopyAttemptNumber) copyAttemptFailsAndEjeaLooksForNextHit(becauseBlacklisted = false, currentCopyAttemptNumber) @@ -128,7 +139,8 @@ class EjeaMultipleCallCacheCopyAttemptsSpec // Then: Another failure: bt140Debug("'fail fast' running 'ejhaSendsHitIdToEjeaAndEjeaReacts'") - val currentCopyAttemptNumber = initialCopyFailures // because the initial 0.until(...) is non-inclusive of the argument + val currentCopyAttemptNumber = + initialCopyFailures // because the initial 0.until(...) is non-inclusive of the argument ejhaSendsHitIdToEjeaAndEjeaReacts(copyAttemptNumber = currentCopyAttemptNumber) bt140Debug("'fail fast' running 'fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts'") fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts(currentCopyAttemptNumber) @@ -145,7 +157,7 @@ class EjeaMultipleCallCacheCopyAttemptsSpec // First: A long series of (genuine) copy failures: bt140Debug("'disregard' running 'longer series of (genuine) copy failures'") val initialCopyFailures = maxFailedCopyAttempts - 1 - 0.until(initialCopyFailures). foreach { currentCopyAttemptNumber => + 0.until(initialCopyFailures).foreach { currentCopyAttemptNumber => ejhaSendsHitIdToEjeaAndEjeaReacts(currentCopyAttemptNumber) fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts(currentCopyAttemptNumber) copyAttemptFailsAndEjeaLooksForNextHit(becauseBlacklisted = false, currentCopyAttemptNumber) @@ -154,7 +166,7 @@ class EjeaMultipleCallCacheCopyAttemptsSpec // Second: An even longer series of (blacklist) copy failures: bt140Debug("'disregard' running 'longer series of (exclude list) copy failures'") val blacklistCopyFailures = maxFailedCopyAttempts + 2 - initialCopyFailures.until(initialCopyFailures + blacklistCopyFailures). foreach { currentCopyAttemptNumber => + initialCopyFailures.until(initialCopyFailures + blacklistCopyFailures).foreach { currentCopyAttemptNumber => ejhaSendsHitIdToEjeaAndEjeaReacts(currentCopyAttemptNumber) fetchCachedResultsActorSendsResultSetToEjeaAndEjeaReacts(currentCopyAttemptNumber) copyAttemptFailsAndEjeaLooksForNextHit(becauseBlacklisted = true, currentCopyAttemptNumber) @@ -171,18 +183,21 @@ class EjeaMultipleCallCacheCopyAttemptsSpec bt140Debug("'disregard' done") } - } def buildEjea(maxFailedCopyAttempts: Int): TestFSMRef[EngineJobExecutionActorState, EJEAData, MockEjea] = helper .buildEJEA( restarting = false, callCachingMode = CallCachingActivity(ReadCache, CallCachingOptions(invalidateBadCacheResults = false)), - callCachingMaxFailedCopyAttempts = maxFailedCopyAttempts) - .setStateInline(state = CheckingCallCache, data = ResponsePendingData( - jobDescriptor = helper.backendJobDescriptor, - bjeaProps = helper.bjeaProps, - ejha = Some(helper.ejhaProbe.ref))) + callCachingMaxFailedCopyAttempts = maxFailedCopyAttempts + ) + .setStateInline( + state = CheckingCallCache, + data = ResponsePendingData(jobDescriptor = helper.backendJobDescriptor, + bjeaProps = helper.bjeaProps, + ejha = Some(helper.ejhaProbe.ref) + ) + ) } @@ -194,7 +209,6 @@ object EjeaMultipleCallCacheCopyAttemptsSpec extends StrictLogging { at any point someone decides that this is not actually helping, feel free to delete all calls to this method and the debug method itself. */ - private def bt140Debug(message: String): Unit = { + private def bt140Debug(message: String): Unit = logger.info("BT-140 debug: " + message) - } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPendingSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPendingSpec.scala index 3d9ccdea718..4bbea403bfb 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPendingSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPendingSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.concurrent.Eventually class EjeaPendingSpec extends EngineJobExecutionActorSpec with CanValidateJobStoreKey with Eventually { - override implicit val stateUnderTest: EngineJobExecutionActorState = Pending + implicit override val stateUnderTest: EngineJobExecutionActorState = Pending "An EJEA in the Pending state" should { @@ -36,7 +36,8 @@ class EjeaPendingSpec extends EngineJobExecutionActorSpec with CanValidateJobSto if (restarting) { helper.jobRestartCheckTokenDispenserProbe.expectMsgClass(max = awaitTimeout, classOf[JobTokenRequest]) } else { - val tokenRequest = helper.jobExecutionTokenDispenserProbe.expectMsgClass(max = awaitTimeout, classOf[JobTokenRequest]) + val tokenRequest = + helper.jobExecutionTokenDispenserProbe.expectMsgClass(max = awaitTimeout, classOf[JobTokenRequest]) // 1 is the default hog-factor value defined in reference.conf tokenRequest.jobTokenType.hogFactor should be(1) } @@ -45,14 +46,20 @@ class EjeaPendingSpec extends EngineJobExecutionActorSpec with CanValidateJobSto s"should use hog-factor defined in backend configuration in token request (with restarting=$restarting)" in { val expectedHogFactorValue = 123 val overriddenHogFactorAttributeString = s"hog-factor: $expectedHogFactorValue" - val backendWithOverriddenHogFactorConfigDescriptor = BackendConfigurationDescriptor(backendConfig = ConfigFactory.parseString(overriddenHogFactorAttributeString), globalConfig) - ejea = helper.buildEJEA(restarting = restarting, backendConfigurationDescriptor = backendWithOverriddenHogFactorConfigDescriptor) + val backendWithOverriddenHogFactorConfigDescriptor = + BackendConfigurationDescriptor(backendConfig = ConfigFactory.parseString(overriddenHogFactorAttributeString), + globalConfig + ) + ejea = helper.buildEJEA(restarting = restarting, + backendConfigurationDescriptor = backendWithOverriddenHogFactorConfigDescriptor + ) ejea ! Execute if (restarting) { helper.jobRestartCheckTokenDispenserProbe.expectMsgClass(max = awaitTimeout, classOf[JobTokenRequest]) } else { - val tokenRequest = helper.jobExecutionTokenDispenserProbe.expectMsgClass(max = awaitTimeout, classOf[JobTokenRequest]) + val tokenRequest = + helper.jobExecutionTokenDispenserProbe.expectMsgClass(max = awaitTimeout, classOf[JobTokenRequest]) tokenRequest.jobTokenType.hogFactor should be(expectedHogFactorValue) } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPreparingJobSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPreparingJobSpec.scala index edb50af67bc..8bae0b275fe 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPreparingJobSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaPreparingJobSpec.scala @@ -5,12 +5,15 @@ import cromwell.backend.BackendJobExecutionActor.JobFailedNonRetryableResponse import cromwell.core.callcaching.{CallCachingMode, DockerWithHash} import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor._ import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec._ -import cromwell.engine.workflow.lifecycle.execution.job.preparation.CallPreparation.{BackendJobPreparationSucceeded, CallPreparationFailed} +import cromwell.engine.workflow.lifecycle.execution.job.preparation.CallPreparation.{ + BackendJobPreparationSucceeded, + CallPreparationFailed +} import org.scalatest.concurrent.Eventually class EjeaPreparingJobSpec extends EngineJobExecutionActorSpec with CanExpectHashingInitialization with Eventually { - override implicit val stateUnderTest = PreparingJob + implicit override val stateUnderTest = PreparingJob "An EJEA in PreparingJob state" should { @@ -22,30 +25,47 @@ class EjeaPreparingJobSpec extends EngineJobExecutionActorSpec with CanExpectHas ejea ! jobPrepSuccessResponse(jobDescriptor) expectHashingActorInitialization(mode, jobDescriptor) ejea.stateName should be(CheckingCallCache) - ejea.stateData should be(ResponsePendingData(jobDescriptor, helper.bjeaProps, None, Option(helper.ejhaProbe.ref))) + ejea.stateData should be( + ResponsePendingData(jobDescriptor, helper.bjeaProps, None, Option(helper.ejhaProbe.ref)) + ) } s"Not check for a cache hit when job preparation succeeds and no docker hash is available ($mode)" in { ejea = ejeaInPreparingState(mode) ejea ! jobPrepSuccessResponse(helper.backendJobDescriptor) ejea.stateName should be(RunningJob) - ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, backendJobActor = Option(helper.bjeaProbe.ref))) + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, + helper.bjeaProps, + None, + backendJobActor = Option(helper.bjeaProbe.ref) + ) + ) } } else { - RestartOrExecuteCommandTuples foreach { case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => - s"Send BJEA '$operationName' when job preparation succeeds ($mode)" in { - ejea = ejeaInPreparingState(mode = mode, restarting = restarting) - ejea ! jobPrepSuccessResponse(helper.backendJobDescriptor) - helper.bjeaProbe.expectMsg(awaitTimeout, "job preparation", expectedMessage) - ejea.stateName should be(RunningJob) - ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None, backendJobActor = Option(helper.bjeaProbe.ref))) - } + RestartOrExecuteCommandTuples foreach { + case RestartOrExecuteCommandTuple(operationName, restarting, expectedMessage) => + s"Send BJEA '$operationName' when job preparation succeeds ($mode)" in { + ejea = ejeaInPreparingState(mode = mode, restarting = restarting) + ejea ! jobPrepSuccessResponse(helper.backendJobDescriptor) + helper.bjeaProbe.expectMsg(awaitTimeout, "job preparation", expectedMessage) + ejea.stateName should be(RunningJob) + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, + helper.bjeaProps, + None, + backendJobActor = Option(helper.bjeaProbe.ref) + ) + ) + } } } s"Not proceed if Job Preparation fails ($mode)" in { - val prepActorResponse = CallPreparationFailed(helper.jobDescriptorKey, new Exception("The goggles! They do nothing!")) - val prepFailedEjeaResponse = JobFailedNonRetryableResponse(helper.jobDescriptorKey, prepActorResponse.throwable, None) + val prepActorResponse = + CallPreparationFailed(helper.jobDescriptorKey, new Exception("The goggles! They do nothing!")) + val prepFailedEjeaResponse = + JobFailedNonRetryableResponse(helper.jobDescriptorKey, prepActorResponse.throwable, None) ejea = ejeaInPreparingState(mode) ejea ! prepActorResponse helper.replyToProbe.expectMsg(prepFailedEjeaResponse) @@ -54,8 +74,11 @@ class EjeaPreparingJobSpec extends EngineJobExecutionActorSpec with CanExpectHas } } - def jobPrepSuccessResponse(jobDescriptor: BackendJobDescriptor) = BackendJobPreparationSucceeded(jobDescriptor, helper.bjeaProps) + def jobPrepSuccessResponse(jobDescriptor: BackendJobDescriptor) = + BackendJobPreparationSucceeded(jobDescriptor, helper.bjeaProps) - def ejeaInPreparingState(mode: CallCachingMode, restarting: Boolean = false) = helper.buildEJEA(restarting = restarting, callCachingMode = mode).setStateInline(state = PreparingJob, data = NoData) + def ejeaInPreparingState(mode: CallCachingMode, restarting: Boolean = false) = helper + .buildEJEA(restarting = restarting, callCachingMode = mode) + .setStateInline(state = PreparingJob, data = NoData) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingExecutionTokenSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingExecutionTokenSpec.scala index bc710d98b0f..66c6c0adf4c 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingExecutionTokenSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingExecutionTokenSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.concurrent.Eventually class EjeaRequestingExecutionTokenSpec extends EngineJobExecutionActorSpec with CanValidateJobStoreKey with Eventually { - override implicit val stateUnderTest: EngineJobExecutionActorState = RequestingExecutionToken + implicit override val stateUnderTest: EngineJobExecutionActorState = RequestingExecutionToken "An EJEA in the RequestingExecutionToken state" should { @@ -27,8 +27,14 @@ class EjeaRequestingExecutionTokenSpec extends EngineJobExecutionActorSpec with ejea = helper.buildEJEA(restarting = false) ejea ! JobTokenDispensed - helper.replyToProbe.expectMsg(max = awaitTimeout, hint = "Awaiting JobStarting message", JobStarting(helper.jobDescriptorKey)) - helper.replyToProbe.expectMsg(max = awaitTimeout, hint = "Awaiting RequestValueStore message", RequestValueStore) + helper.replyToProbe.expectMsg(max = awaitTimeout, + hint = "Awaiting JobStarting message", + JobStarting(helper.jobDescriptorKey) + ) + helper.replyToProbe.expectMsg(max = awaitTimeout, + hint = "Awaiting RequestValueStore message", + RequestValueStore + ) ejea.stateName should be(WaitingForValueStore) } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingRestartCheckTokenSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingRestartCheckTokenSpec.scala index 186b67aa1af..968378414da 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingRestartCheckTokenSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRequestingRestartCheckTokenSpec.scala @@ -5,9 +5,12 @@ import cromwell.engine.workflow.tokens.JobTokenDispenserActor.JobTokenDispensed import cromwell.jobstore.JobStoreActor.QueryJobCompletion import org.scalatest.concurrent.Eventually -class EjeaRequestingRestartCheckTokenSpec extends EngineJobExecutionActorSpec with CanValidateJobStoreKey with Eventually { +class EjeaRequestingRestartCheckTokenSpec + extends EngineJobExecutionActorSpec + with CanValidateJobStoreKey + with Eventually { - override implicit val stateUnderTest: EngineJobExecutionActorState = RequestingRestartCheckToken + implicit override val stateUnderTest: EngineJobExecutionActorState = RequestingRestartCheckToken "An EJEA in the RequestingRestartTokenCheck state" should { diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala index 774c334d098..599d0fca6c6 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala @@ -4,15 +4,27 @@ import cromwell.core.callcaching._ import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.HashError import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec.EnhancedTestEJEA import cromwell.engine.workflow.lifecycle.execution.ejea.HasJobSuccessResponse.SuccessfulCallCacheHashes -import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{FailedResponseData, ResponsePendingData, RunningJob, SucceededResponseData} +import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{ + FailedResponseData, + ResponsePendingData, + RunningJob, + SucceededResponseData +} import org.scalatest.concurrent.Eventually import scala.util.control.NoStackTrace import scala.util.{Failure, Success} -class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually with CanValidateJobStoreKey with CanExpectJobStoreWrites with CanExpectCacheWrites with HasJobSuccessResponse with HasJobFailureResponses { +class EjeaRunningJobSpec + extends EngineJobExecutionActorSpec + with Eventually + with CanValidateJobStoreKey + with CanExpectJobStoreWrites + with CanExpectCacheWrites + with HasJobSuccessResponse + with HasJobFailureResponses { - override implicit val stateUnderTest = RunningJob + implicit override val stateUnderTest = RunningJob val hashError = HashError(new Exception("ARGH!!!") with NoStackTrace) @@ -23,7 +35,6 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit "A 'RunningJob' EJEA" should { CallCachingModes foreach { mode => - /* *************************** */ /* JobAbortedResponse Handling */ /* *************************** */ @@ -43,7 +54,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving a JobAbortedResponse correctly in $mode mode with successful hashes" in { ejea = ejeaInRunningState(mode) ejea ! SuccessfulCallCacheHashes - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes))))) ejea.stateName should be(RunningJob) ejea ! abortedResponse @@ -59,7 +70,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving a JobAbortedResponse correctly in $mode mode with failed hashes" in { ejea = ejeaInRunningState(mode) ejea ! hashError - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Failure(hashError.reason)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Failure(hashError.reason))))) ejea.stateName should be(RunningJob) ejea ! abortedResponse @@ -85,7 +96,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving SuccessResponse then CallCacheHashes correctly in $mode mode" in { ejea = ejeaInRunningState(mode) ejea ! successResponse - eventually { ejea.stateData should be(SucceededResponseData(successResponse, None)) } + eventually(ejea.stateData should be(SucceededResponseData(successResponse, None))) ejea.stateName should be(RunningJob) ejea ! SuccessfulCallCacheHashes expectCacheWrite(successResponse, SuccessfulCallCacheHashes) @@ -94,7 +105,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving CallCacheHashes then SuccessResponse correctly in $mode mode" in { ejea = ejeaInRunningState(mode) ejea ! SuccessfulCallCacheHashes - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes))))) ejea.stateName should be(RunningJob) ejea ! successResponse expectCacheWrite(successResponse, SuccessfulCallCacheHashes) @@ -103,7 +114,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving SuccessResponse then HashError correctly in $mode mode" in { ejea = ejeaInRunningState(mode) ejea ! successResponse - eventually { ejea.stateData should be(SucceededResponseData(successResponse, None)) } + eventually(ejea.stateData should be(SucceededResponseData(successResponse, None))) ejea.stateName should be(RunningJob) ejea ! hashError expectJobStoreWrite(SucceededResponseData(successResponse, Some(Failure(hashError.reason)))) @@ -112,7 +123,11 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving HashError then SuccessResponse correctly in $mode mode" in { ejea = ejeaInRunningState(mode) ejea ! hashError - eventually { ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, Some(Failure(hashError.reason)))) } + eventually { + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, Some(Failure(hashError.reason))) + ) + } ejea.stateName should be(RunningJob) ejea ! successResponse expectJobStoreWrite(SucceededResponseData(successResponse, Some(Failure(hashError.reason)))) @@ -125,11 +140,13 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit val failedResponse = responseMaker() ejea = ejeaInRunningState(mode) ejea ! failedResponse - eventually { ejea.stateData should be(FailedResponseData(failedResponse, None)) } + eventually(ejea.stateData should be(FailedResponseData(failedResponse, None))) ejea.stateName should be(RunningJob) ejea ! SuccessfulCallCacheHashes // Don't expect cache write here (the job failed !) but do expect job store write - expectJobStoreWriteFailed(FailedResponseData(failedResponse, Option(Success(SuccessfulCallCacheHashes))), retryable) + expectJobStoreWriteFailed(FailedResponseData(failedResponse, Option(Success(SuccessfulCallCacheHashes))), + retryable + ) helper.callCacheWriteActorProbe.expectNoMessage(awaitAlmostNothing) } @@ -138,10 +155,12 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit ejea = ejeaInRunningState(mode) ejea.stateName should be(RunningJob) ejea ! SuccessfulCallCacheHashes - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes))))) ejea ! failedResponse // Don't expect cache write here (the job failed !) but do expect job store write - expectJobStoreWriteFailed(FailedResponseData(failedResponse, Option(Success(SuccessfulCallCacheHashes))), retryable) + expectJobStoreWriteFailed(FailedResponseData(failedResponse, Option(Success(SuccessfulCallCacheHashes))), + retryable + ) helper.callCacheWriteActorProbe.expectNoMessage(awaitAlmostNothing) } @@ -149,7 +168,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit val failedResponse = responseMaker() ejea = ejeaInRunningState(mode) ejea ! failedResponse - eventually { ejea.stateData should be(FailedResponseData(failedResponse, None)) } + eventually(ejea.stateData should be(FailedResponseData(failedResponse, None))) ejea.stateName should be(RunningJob) ejea ! hashError expectJobStoreWriteFailed(FailedResponseData(failedResponse, Some(Failure(hashError.reason))), retryable) @@ -160,7 +179,11 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit val failedResponse = responseMaker() ejea = ejeaInRunningState(mode) ejea ! hashError - eventually { ejea.stateData should be(ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, Some(Failure(hashError.reason)))) } + eventually { + ejea.stateData should be( + ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, Some(Failure(hashError.reason))) + ) + } ejea.stateName should be(RunningJob) ejea ! failedResponse expectJobStoreWriteFailed(FailedResponseData(failedResponse, Some(Failure(hashError.reason))), retryable) @@ -179,7 +202,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving a SucceededResponse correctly in $mode mode with successful hashes" in { ejea = ejeaInRunningState(mode) ejea ! SuccessfulCallCacheHashes - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes))))) ejea.stateName should be(RunningJob) ejea ! successResponse // Even if we received hashes, writeToCache is false so we go straight to job store and don't write them to the cache @@ -190,7 +213,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit s"Handle receiving a SucceededResponse correctly in $mode mode with failed hashes" in { ejea = ejeaInRunningState(mode) ejea ! hashError - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Failure(hashError.reason)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Failure(hashError.reason))))) ejea.stateName should be(RunningJob) ejea ! successResponse // Even if we received hashes, writeToCache is false so we go straight to job store and don't write them to the cache @@ -212,10 +235,12 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit val failedResponse = responseMaker() ejea = ejeaInRunningState(mode) ejea ! SuccessfulCallCacheHashes - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Success(SuccessfulCallCacheHashes))))) ejea.stateName should be(RunningJob) ejea ! failedResponse - expectJobStoreWriteFailed(FailedResponseData(failedResponse, Some(Success(SuccessfulCallCacheHashes))), retryable) + expectJobStoreWriteFailed(FailedResponseData(failedResponse, Some(Success(SuccessfulCallCacheHashes))), + retryable + ) helper.callCacheWriteActorProbe.expectNoMessage(awaitAlmostNothing) } @@ -223,7 +248,7 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit val failedResponse = responseMaker() ejea = ejeaInRunningState(mode) ejea ! hashError - eventually { ejea.stateData should be(initialData.copy(hashes = Some(Failure(hashError.reason)))) } + eventually(ejea.stateData should be(initialData.copy(hashes = Some(Failure(hashError.reason))))) ejea.stateName should be(RunningJob) ejea ! failedResponse expectJobStoreWriteFailed(FailedResponseData(failedResponse, Some(Failure(hashError.reason))), retryable) @@ -235,5 +260,6 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit } def initialData = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, None) - def ejeaInRunningState(mode: CallCachingMode = CallCachingActivity(ReadAndWriteCache)) = helper.buildEJEA(callCachingMode = mode).setStateInline(state = RunningJob, data = initialData) + def ejeaInRunningState(mode: CallCachingMode = CallCachingActivity(ReadAndWriteCache)) = + helper.buildEJEA(callCachingMode = mode).setStateInline(state = RunningJob, data = initialData) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingCallCacheSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingCallCacheSpec.scala index c12b3ffceeb..d9a273c4cc0 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingCallCacheSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingCallCacheSpec.scala @@ -8,9 +8,13 @@ import cromwell.engine.workflow.lifecycle.execution.ejea.HasJobSuccessResponse.S import scala.util.Success import scala.util.control.NoStackTrace -class EjeaUpdatingCallCacheSpec extends EngineJobExecutionActorSpec with HasJobSuccessResponse with CanExpectJobStoreWrites with HasJobFailureResponses { +class EjeaUpdatingCallCacheSpec + extends EngineJobExecutionActorSpec + with HasJobSuccessResponse + with CanExpectJobStoreWrites + with HasJobFailureResponses { - override implicit val stateUnderTest = UpdatingCallCache + implicit override val stateUnderTest = UpdatingCallCache "An EJEA in UpdatingCallCache state" should { diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala index 61cb2eb693e..cd585df22b8 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala @@ -8,9 +8,12 @@ import cromwell.engine.workflow.lifecycle.execution.ejea.HasJobSuccessResponse.S import scala.util.Success -class EjeaUpdatingJobStoreSpec extends EngineJobExecutionActorSpec with HasJobSuccessResponse with HasJobFailureResponses { +class EjeaUpdatingJobStoreSpec + extends EngineJobExecutionActorSpec + with HasJobSuccessResponse + with HasJobFailureResponses { - override implicit val stateUnderTest = UpdatingJobStore + implicit override val stateUnderTest = UpdatingJobStore "An EJEA in UpdatingJobStoreSpec" should { @@ -31,7 +34,9 @@ class EjeaUpdatingJobStoreSpec extends EngineJobExecutionActorSpec with HasJobSu s"Create a suitable failure if the JobStore write fails" in { val response = successResponse ejea = ejeaInUpdatingJobStoreState(response) - val exception = new Exception("I loved Ophelia: forty thousand brothers\\ Could not, with all their quantity of love,\\ Make up my sum. What wilt thou do for her?") + val exception = new Exception( + "I loved Ophelia: forty thousand brothers\\ Could not, with all their quantity of love,\\ Make up my sum. What wilt thou do for her?" + ) ejea ! JobStoreWriteFailure(exception) helper.replyToProbe.expectMsgPF(awaitTimeout) { case JobFailedNonRetryableResponse(jobDescriptorKey, reason, None) => @@ -43,7 +48,8 @@ class EjeaUpdatingJobStoreSpec extends EngineJobExecutionActorSpec with HasJobSu } def ejeaInUpdatingJobStoreState(response: BackendJobExecutionResponse) = { - val pendingResponseData = ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, Some(Success(SuccessfulCallCacheHashes))) + val pendingResponseData = + ResponsePendingData(helper.backendJobDescriptor, helper.bjeaProps, Some(Success(SuccessfulCallCacheHashes))) val newData = response match { case success: JobSucceededResponse => pendingResponseData.withSuccessResponse(success) case failed: BackendJobFailedResponse => pendingResponseData.withFailedResponse(failed) diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaWaitingForValueStoreSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaWaitingForValueStoreSpec.scala index 983f622a22e..304d94303f7 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaWaitingForValueStoreSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaWaitingForValueStoreSpec.scala @@ -7,17 +7,21 @@ import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore class EjeaWaitingForValueStoreSpec extends EngineJobExecutionActorSpec { - override implicit val stateUnderTest = CheckingJobStore + implicit override val stateUnderTest = CheckingJobStore "An EJEA in EjeaWaitingForValueStore state should" should { "prepare the job when receiving the output store" in { createWaitingForValueStoreEjea() val valueStore = ValueStore.empty ejea ! valueStore - helper.jobPreparationProbe.expectMsg(awaitTimeout, "expecting CallPreparation Start", CallPreparation.Start(valueStore)) + helper.jobPreparationProbe.expectMsg(awaitTimeout, + "expecting CallPreparation Start", + CallPreparation.Start(valueStore) + ) ejea.stateName should be(PreparingJob) } } - private def createWaitingForValueStoreEjea(): Unit = { ejea = helper.buildEJEA(restarting = true).setStateInline(state = WaitingForValueStore, data = NoData) } + private def createWaitingForValueStoreEjea(): Unit = ejea = + helper.buildEJEA(restarting = true).setStateInline(state = WaitingForValueStore, data = NoData) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala index e1e613a6189..24a6be01ffc 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala @@ -16,8 +16,11 @@ import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ import scala.language.postfixOps -trait EngineJobExecutionActorSpec extends AbstractEngineJobExecutionActorSpec - with Matchers with BeforeAndAfterAll with BeforeAndAfter { +trait EngineJobExecutionActorSpec + extends AbstractEngineJobExecutionActorSpec + with Matchers + with BeforeAndAfterAll + with BeforeAndAfter { // If we WANT something to happen, make sure it happens within this window: val awaitTimeout: FiniteDuration = 10 seconds @@ -28,7 +31,8 @@ trait EngineJobExecutionActorSpec extends AbstractEngineJobExecutionActorSpec val allowMultipleCacheCycles = false - implicit override val patienceConfig: PatienceConfig = PatienceConfig(timeout = scaled(awaitTimeout), interval = scaled(awaitAlmostNothing)) + implicit override val patienceConfig: PatienceConfig = + PatienceConfig(timeout = scaled(awaitTimeout), interval = scaled(awaitAlmostNothing)) // The default values for these are "null". The helper is created in "before", the ejea is up to the test cases private[ejea] var helper: PerTestHelper = _ @@ -42,7 +46,8 @@ trait EngineJobExecutionActorSpec extends AbstractEngineJobExecutionActorSpec List( ("FetchCachedResultsActor", helper.fetchCachedResultsActorCreations), ("JobHashingActor", helper.jobHashingInitializations), - ("CallCacheInvalidateActor", helper.invalidateCacheActorCreations)) foreach { + ("CallCacheInvalidateActor", helper.invalidateCacheActorCreations) + ) foreach { case (name, GotTooMany(list)) if !allowMultipleCacheCycles => fail(s"Too many $name creations (${list.size})") case _ => // Fine. } @@ -51,15 +56,23 @@ trait EngineJobExecutionActorSpec extends AbstractEngineJobExecutionActorSpec } // Some helper lists - val CallCachingModes = List(CallCachingOff, CallCachingActivity(ReadCache), CallCachingActivity(WriteCache), CallCachingActivity(ReadAndWriteCache)) - case class RestartOrExecuteCommandTuple(operationName: String, restarting: Boolean, expectedMessageToBjea: BackendJobExecutionActorCommand) + val CallCachingModes = List(CallCachingOff, + CallCachingActivity(ReadCache), + CallCachingActivity(WriteCache), + CallCachingActivity(ReadAndWriteCache) + ) + case class RestartOrExecuteCommandTuple(operationName: String, + restarting: Boolean, + expectedMessageToBjea: BackendJobExecutionActorCommand + ) val RestartOrExecuteCommandTuples = List( RestartOrExecuteCommandTuple("execute", restarting = false, BackendJobExecutionActor.ExecuteJobCommand), - RestartOrExecuteCommandTuple("restart", restarting = true, BackendJobExecutionActor.RecoverJobCommand)) + RestartOrExecuteCommandTuple("restart", restarting = true, BackendJobExecutionActor.RecoverJobCommand) + ) } object EngineJobExecutionActorSpec { - implicit class EnhancedTestEJEA[S,D,T <: Actor](me: TestFSMRef[S, D, T]) { + implicit class EnhancedTestEJEA[S, D, T <: Actor](me: TestFSMRef[S, D, T]) { // Like setState, but mirrors back the EJEA (for easier inlining) def setStateInline(state: S = me.stateName, data: D = me.stateData): TestFSMRef[S, D, T] = { me.setState(state, data) @@ -127,8 +140,16 @@ object AbstractEngineJobExecutionActorSpec { private lazy val testConfig = ConfigFactory.parseString(ConfigText) } -abstract class AbstractEngineJobExecutionActorSpec extends TestKitSuite - with DefaultTimeout with ImplicitSender with Matchers with ScalaFutures with Eventually with Suite - with OneInstancePerTest with BeforeAndAfterAll with AnyWordSpecLike { +abstract class AbstractEngineJobExecutionActorSpec + extends TestKitSuite + with DefaultTimeout + with ImplicitSender + with Matchers + with ScalaFutures + with Eventually + with Suite + with OneInstancePerTest + with BeforeAndAfterAll + with AnyWordSpecLike { override protected lazy val actorSystemConfig: Config = AbstractEngineJobExecutionActorSpec.testConfig } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpecUtil.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpecUtil.scala index bf4722c6370..d9917f37404 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpecUtil.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpecUtil.scala @@ -1,11 +1,16 @@ package cromwell.engine.workflow.lifecycle.execution.ejea -import cromwell.backend.BackendCacheHitCopyingActor.{CopyingOutputsFailedResponse, CopyAttemptError, BlacklistSkip} +import cromwell.backend.BackendCacheHitCopyingActor.{BlacklistSkip, CopyAttemptError, CopyingOutputsFailedResponse} import cromwell.backend.{BackendJobDescriptor, MetricableCacheCopyErrorCategory} import cromwell.backend.BackendJobExecutionActor._ import cromwell.core.callcaching._ import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CallCacheHashes, FileHashes} -import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{EJEAData, SucceededResponseData, UpdatingCallCache, UpdatingJobStore} +import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{ + EJEAData, + SucceededResponseData, + UpdatingCallCache, + UpdatingJobStore +} import cromwell.jobstore.JobStoreActor.RegisterJobCompleted import cromwell.jobstore.{JobResultFailure, JobResultSuccess, JobStoreKey} import cromwell.services.CallCaching.CallCachingEntryId @@ -27,13 +32,14 @@ private[ejea] trait CanValidateJobStoreKey { self: EngineJobExecutionActorSpec = private[ejea] trait CanExpectCacheWrites extends Eventually { self: EngineJobExecutionActorSpec => def expectCacheWrite(expectedResponse: JobSucceededResponse, expectedCallCacheHashes: CallCacheHashes): Unit = { - eventually { ejea.stateName should be(UpdatingCallCache) } + eventually(ejea.stateName should be(UpdatingCallCache)) ejea.stateData should be(SucceededResponseData(expectedResponse, Some(Success(expectedCallCacheHashes)))) () } } -private[ejea] trait CanExpectJobStoreWrites extends CanValidateJobStoreKey { self: EngineJobExecutionActorSpec with HasJobSuccessResponse with HasJobFailureResponses => +private[ejea] trait CanExpectJobStoreWrites extends CanValidateJobStoreKey { + self: EngineJobExecutionActorSpec with HasJobSuccessResponse with HasJobFailureResponses => def expectJobStoreWrite(expectedData: EJEAData): Unit = { helper.jobStoreProbe.expectMsgPF(max = awaitTimeout, hint = "Job Store Write") { @@ -63,7 +69,7 @@ private[ejea] trait CanExpectJobStoreWrites extends CanValidateJobStoreKey { sel private[ejea] trait CanExpectHashingInitialization extends Eventually { self: EngineJobExecutionActorSpec => def expectHashingActorInitialization(mode: CallCachingMode, jobDescriptor: BackendJobDescriptor): Unit = { - eventually { helper.jobHashingInitializations.hasExactlyOne should be(true) } + eventually(helper.jobHashingInitializations.hasExactlyOne should be(true)) helper.jobHashingInitializations.checkIt { initialization => initialization._1 should be(jobDescriptor) initialization._2 should be(mode) @@ -86,7 +92,7 @@ private[ejea] trait CanExpectFetchCachedResults extends Eventually { self: Engin private[ejea] trait CanExpectCacheInvalidation extends Eventually { self: EngineJobExecutionActorSpec => def expectInvalidateCallCacheActor(expectedCacheId: CallCachingEntryId): Unit = { - eventually { helper.invalidateCacheActorCreations.hasExactlyOne should be(true) } + eventually(helper.invalidateCacheActorCreations.hasExactlyOne should be(true)) helper.invalidateCacheActorCreations.checkIt { cacheId => cacheId shouldBe expectedCacheId } @@ -96,16 +102,25 @@ private[ejea] trait CanExpectCacheInvalidation extends Eventually { self: Engine private[ejea] trait HasJobSuccessResponse { self: EngineJobExecutionActorSpec => val successRc = Option(171) val successOutputs = WomMocks.mockOutputExpectations(Map("a" -> WomInteger(3), "b" -> WomString("bee"))) - def successResponse = JobSucceededResponse(helper.jobDescriptorKey, successRc, successOutputs, None, Seq.empty, None, resultGenerationMode = RunOnBackend) + def successResponse = JobSucceededResponse(helper.jobDescriptorKey, + successRc, + successOutputs, + None, + Seq.empty, + None, + resultGenerationMode = RunOnBackend + ) } private[ejea] object HasJobSuccessResponse { val SuccessfulCallCacheHashes = CallCacheHashes( Set(HashResult(HashKey("whatever you want"), HashValue("whatever you need"))), "initialHash", - Option(FileHashes( - Set(HashResult(HashKey("whatever file you want"), HashValue("whatever file you need"))), - "fileHash" - )) + Option( + FileHashes( + Set(HashResult(HashKey("whatever file you want"), HashValue("whatever file you need"))), + "fileHash" + ) + ) ) } @@ -123,6 +138,11 @@ private[ejea] trait HasCopyFailureResponses { self: EngineJobExecutionActorSpec new Exception("Deliberate failure for test case: failed to copy cache outputs!") with NoStackTrace // Need to delay making the response because job descriptors come from the per-test "helper", which is null outside tests! - def copyAttemptFailedResponse(attemptNumber: Int) = CopyingOutputsFailedResponse(helper.jobDescriptorKey, attemptNumber, CopyAttemptError(copyFailureReason)) - def cacheHitBlacklistedResponse(attemptNumber: Int) = CopyingOutputsFailedResponse(helper.jobDescriptorKey, attemptNumber, BlacklistSkip(MetricableCacheCopyErrorCategory.BucketBlacklisted)) + def copyAttemptFailedResponse(attemptNumber: Int) = + CopyingOutputsFailedResponse(helper.jobDescriptorKey, attemptNumber, CopyAttemptError(copyFailureReason)) + def cacheHitBlacklistedResponse(attemptNumber: Int) = CopyingOutputsFailedResponse( + helper.jobDescriptorKey, + attemptNumber, + BlacklistSkip(MetricableCacheCopyErrorCategory.BucketBlacklisted) + ) } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorTransitionsSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorTransitionsSpec.scala index c4b532f9453..dad095c7002 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorTransitionsSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorTransitionsSpec.scala @@ -20,13 +20,13 @@ class EngineJobExecutionActorTransitionsSpec extends AnyFlatSpec with CromwellTi val cacheReadCycles = 5 - val longCallCachingCycleStateSequence = List( - Pending, - RequestingExecutionToken, - CheckingJobStore, - CheckingCallCache, - FetchingCachedOutputsFromDatabase, - CheckingCacheEntryExistence) ++ callCachingStateCycle * cacheReadCycles ++ List( + val longCallCachingCycleStateSequence = List(Pending, + RequestingExecutionToken, + CheckingJobStore, + CheckingCallCache, + FetchingCachedOutputsFromDatabase, + CheckingCacheEntryExistence + ) ++ callCachingStateCycle * cacheReadCycles ++ List( WaitingForValueStore, PreparingJob, RunningJob, @@ -39,26 +39,28 @@ class EngineJobExecutionActorTransitionsSpec extends AnyFlatSpec with CromwellTi val transitionSequence = longCallCachingCycleStateSequence.sliding(2) map { case fromState :: toState :: _ => EngineJobExecutionActorState.transitionEventString(fromState, toState) case _ => fail("Programmer blunder. This test writer had one job to do...") - } collect { - case Some(stateName) => stateName + } collect { case Some(stateName) => + stateName } - transitionSequence.toList should be(List( - // "Pending", <-- NB: There's no transition into "Pending" because that was the start state - "RequestingExecutionToken", - "CheckingJobStore", - "CallCacheReading", - "WaitingForValueStore", - "PreparingJob", - "RunningJob", - "UpdatingCallCache", - "UpdatingJobStore", - )) + transitionSequence.toList should be( + List( + // "Pending", <-- NB: There's no transition into "Pending" because that was the start state + "RequestingExecutionToken", + "CheckingJobStore", + "CallCacheReading", + "WaitingForValueStore", + "PreparingJob", + "RunningJob", + "UpdatingCallCache", + "UpdatingJobStore" + ) + ) } } object EngineJobExecutionActorTransitionsSpec { implicit class MultipliableList[A](val list: List[A]) extends AnyVal { - final def *(i: Int): List[A] = if (i == 0 ) List.empty else if (i == 1) list else list ++ (list * (i - 1)) + final def *(i: Int): List[A] = if (i == 0) List.empty else if (i == 1) list else list ++ (list * (i - 1)) } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/ExpectOne.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/ExpectOne.scala index efc5f5966fd..ff142e48a59 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/ExpectOne.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/ExpectOne.scala @@ -1,8 +1,12 @@ package cromwell.engine.workflow.lifecycle.execution.ejea -private[ejea] sealed trait ExpectOne[+A] { - def checkIt(block: A => Any): Unit = throw new IllegalStateException("This ExpectOne must have exactly one element for checkIt to work") - def checkLatest(block: A => Any): Unit = throw new IllegalStateException("This ExpectOne must have at least one element for checkLatest to work") +sealed private[ejea] trait ExpectOne[+A] { + def checkIt(block: A => Any): Unit = throw new IllegalStateException( + "This ExpectOne must have exactly one element for checkIt to work" + ) + def checkLatest(block: A => Any): Unit = throw new IllegalStateException( + "This ExpectOne must have at least one element for checkLatest to work" + ) def hasAtLeastOne: Boolean def hasExactlyOne: Boolean def foundOne[B >: A](theFoundOne: B) = this match { @@ -25,7 +29,7 @@ private[ejea] case class GotOne[+A](theOne: A) extends ExpectOne[A] { } private[ejea] case class GotTooMany[+A](theOnes: List[A]) extends ExpectOne[A] { - override def checkLatest(block: A => Any): Unit = { block(theOnes.last); () } + override def checkLatest(block: A => Any): Unit = { block(theOnes.last); () } override def hasAtLeastOne = true override def hasExactlyOne = false -} \ No newline at end of file +} diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala index e1176318fc6..e72c8a90a45 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala @@ -10,7 +10,12 @@ import cromwell.core.{CallOutputs, HogGroup, WorkflowId, WorkflowOptions} import cromwell.engine.EngineWorkflowDescriptor import cromwell.engine.workflow.lifecycle.execution.ejea.EngineJobExecutionActorSpec._ import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor -import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{CallCachingParameters, EJEAData, EngineJobExecutionActorState, ResponsePendingData} +import cromwell.engine.workflow.lifecycle.execution.job.EngineJobExecutionActor.{ + CallCachingParameters, + EJEAData, + EngineJobExecutionActorState, + ResponsePendingData +} import cromwell.engine.workflow.mocks.{DeclarationMock, TaskMock, WdlWomExpressionMock} import cromwell.services.CallCaching.CallCachingEntryId import cromwell.util.AkkaTestUtil._ @@ -26,7 +31,9 @@ import scala.concurrent.duration.FiniteDuration import scala.util.{Success, Try} private[ejea] class PerTestHelper(implicit val system: ActorSystem) - extends TaskMock with WdlWomExpressionMock with DeclarationMock { + extends TaskMock + with WdlWomExpressionMock + with DeclarationMock { val workflowId: WorkflowId = WorkflowId.randomId() val workflowName = "wf" @@ -35,22 +42,26 @@ private[ejea] class PerTestHelper(implicit val system: ActorSystem) val jobIndex: Option[Int] = Option(1) val jobAttempt = 1 - val task: CallableTaskDefinition = WomMocks.mockTaskDefinition(taskName).copy( - inputs = List(OverridableInputDefinitionWithDefault("inInt", WomIntegerType, mockIntExpression(543))), - outputs = List(OutputDefinition("outString", WomStringType, mockStringExpression("hello"))) - ) + val task: CallableTaskDefinition = WomMocks + .mockTaskDefinition(taskName) + .copy( + inputs = List(OverridableInputDefinitionWithDefault("inInt", WomIntegerType, mockIntExpression(543))), + outputs = List(OutputDefinition("outString", WomStringType, mockStringExpression("hello"))) + ) val call: CommandCallNode = WomMocks.mockTaskCall(WomIdentifier(taskName, jobFqn), task) val jobDescriptorKey: BackendJobDescriptorKey = BackendJobDescriptorKey(call, jobIndex, jobAttempt) val backendWorkflowDescriptor: BackendWorkflowDescriptor = BackendWorkflowDescriptor(id = workflowId, - callable = null, - knownValues = null, - workflowOptions = WorkflowOptions.empty, - customLabels = null, - hogGroup = HogGroup("foo"), - List.empty, - None) + callable = null, + knownValues = null, + workflowOptions = + WorkflowOptions.empty, + customLabels = null, + hogGroup = HogGroup("foo"), + List.empty, + None + ) val backendJobDescriptor: BackendJobDescriptor = BackendJobDescriptor( workflowDescriptor = backendWorkflowDescriptor, @@ -59,7 +70,7 @@ private[ejea] class PerTestHelper(implicit val system: ActorSystem) evaluatedTaskInputs = Map.empty, maybeCallCachingEligible = FloatingDockerTagWithoutHash("ubuntu:latest"), dockerSize = None, - prefetchedKvStoreEntries = Map.empty, + prefetchedKvStoreEntries = Map.empty ) var fetchCachedResultsActorCreations: ExpectOne[(CallCachingEntryId, Seq[OutputDefinition])] = NothingYet @@ -83,55 +94,75 @@ private[ejea] class PerTestHelper(implicit val system: ActorSystem) val jobExecutionTokenDispenserProbe: TestProbe = TestProbe() val ejhaProbe: TestProbe = TestProbe() - def buildFactory(backendConfigurationDescriptor: BackendConfigurationDescriptor): BackendLifecycleActorFactory = new BackendLifecycleActorFactory { + def buildFactory(backendConfigurationDescriptor: BackendConfigurationDescriptor): BackendLifecycleActorFactory = + new BackendLifecycleActorFactory { - override val name = "PerTestHelper" + override val name = "PerTestHelper" - override val configurationDescriptor: BackendConfigurationDescriptor = backendConfigurationDescriptor + override val configurationDescriptor: BackendConfigurationDescriptor = backendConfigurationDescriptor - override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor, - initializationData: Option[BackendInitializationData], - serviceRegistryActor: ActorRef, - ioActor: ActorRef, - backendSingletonActor: Option[ActorRef]): Props = bjeaProps + override def jobExecutionActorProps(jobDescriptor: BackendJobDescriptor, + initializationData: Option[BackendInitializationData], + serviceRegistryActor: ActorRef, + ioActor: ActorRef, + backendSingletonActor: Option[ActorRef] + ): Props = bjeaProps - override def cacheHitCopyingActorProps: Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Int, Option[BlacklistCache]) => Props] = Option((_, _, _, _, _, _) => callCacheHitCopyingProbe.props) + override def cacheHitCopyingActorProps: Option[ + (BackendJobDescriptor, + Option[BackendInitializationData], + ActorRef, + ActorRef, + Int, + Option[BlacklistCache] + ) => Props + ] = Option((_, _, _, _, _, _) => callCacheHitCopyingProbe.props) - override def expressionLanguageFunctions(workflowDescriptor: BackendWorkflowDescriptor, - jobKey: BackendJobDescriptorKey, - initializationData: Option[BackendInitializationData], - ioActorProxy: ActorRef, - ec: ExecutionContext): IoFunctionSet = { - NoIoFunctionSet - } + override def expressionLanguageFunctions(workflowDescriptor: BackendWorkflowDescriptor, + jobKey: BackendJobDescriptorKey, + initializationData: Option[BackendInitializationData], + ioActorProxy: ActorRef, + ec: ExecutionContext + ): IoFunctionSet = + NoIoFunctionSet - override def fileHashingActorProps: - Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props] = { - Option((_, _, _, _, _) => Props.empty) - } + override def fileHashingActorProps: Option[ + (BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props + ] = + Option((_, _, _, _, _) => Props.empty) - // These two factory methods should never be called from EJEA or any of its descendants: - override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, - ioActor: ActorRef, - calls: Set[CommandCallNode], - jobExecutionMap: JobExecutionMap, - workflowOutputs: CallOutputs, - initializationData: Option[BackendInitializationData]): Option[Props] = throw new UnsupportedOperationException("Unexpected finalization actor creation!") - override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, + // These two factory methods should never be called from EJEA or any of its descendants: + override def workflowFinalizationActorProps(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, - restarting: Boolean): Option[Props] = throw new UnsupportedOperationException("Unexpected finalization actor creation!") - } + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs, + initializationData: Option[BackendInitializationData] + ): Option[Props] = throw new UnsupportedOperationException("Unexpected finalization actor creation!") + override def workflowInitializationActorProps(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restarting: Boolean + ): Option[Props] = throw new UnsupportedOperationException("Unexpected finalization actor creation!") + } def buildEJEA(restarting: Boolean = true, callCachingMode: CallCachingMode = CallCachingOff, callCachingMaxFailedCopyAttempts: Int = 1000000, - backendConfigurationDescriptor: BackendConfigurationDescriptor = TestConfig.emptyBackendConfigDescriptor) - (implicit startingState: EngineJobExecutionActorState): TestFSMRef[EngineJobExecutionActorState, EJEAData, MockEjea] = { + backendConfigurationDescriptor: BackendConfigurationDescriptor = TestConfig.emptyBackendConfigDescriptor + )(implicit + startingState: EngineJobExecutionActorState + ): TestFSMRef[EngineJobExecutionActorState, EJEAData, MockEjea] = { val factory: BackendLifecycleActorFactory = buildFactory(backendConfigurationDescriptor) - val descriptor = EngineWorkflowDescriptor(WomMocks.mockWorkflowDefinition(workflowName), backendWorkflowDescriptor, null, null, null, callCachingMode) + val descriptor = EngineWorkflowDescriptor(WomMocks.mockWorkflowDefinition(workflowName), + backendWorkflowDescriptor, + null, + null, + null, + callCachingMode + ) val callCachingParameters = CallCachingParameters( mode = callCachingMode, @@ -143,23 +174,30 @@ private[ejea] class PerTestHelper(implicit val system: ActorSystem) fileHashBatchSize = 100 ) - val myBrandNewEjea = new TestFSMRef[EngineJobExecutionActorState, EJEAData, MockEjea](system, Props(new MockEjea( - helper = this, - jobPreparationProbe = jobPreparationProbe, - replyTo = replyToProbe.ref, - jobDescriptorKey = jobDescriptorKey, - workflowDescriptor = descriptor, - factory = factory, - initializationData = None, - restarting = restarting, - serviceRegistryActor = serviceRegistryProbe.ref, - ioActor = ioActorProbe.ref, - jobStoreActor = jobStoreProbe.ref, - dockerHashActor = dockerHashActorProbe.ref, - jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserProbe.ref, - jobExecutionTokenDispenserActor = jobExecutionTokenDispenserProbe.ref, - callCachingParameters = callCachingParameters - )), parentProbe.ref, s"EngineJobExecutionActorSpec-$workflowId") + val myBrandNewEjea = new TestFSMRef[EngineJobExecutionActorState, EJEAData, MockEjea]( + system, + Props( + new MockEjea( + helper = this, + jobPreparationProbe = jobPreparationProbe, + replyTo = replyToProbe.ref, + jobDescriptorKey = jobDescriptorKey, + workflowDescriptor = descriptor, + factory = factory, + initializationData = None, + restarting = restarting, + serviceRegistryActor = serviceRegistryProbe.ref, + ioActor = ioActorProbe.ref, + jobStoreActor = jobStoreProbe.ref, + dockerHashActor = dockerHashActorProbe.ref, + jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserProbe.ref, + jobExecutionTokenDispenserActor = jobExecutionTokenDispenserProbe.ref, + callCachingParameters = callCachingParameters + ) + ), + parentProbe.ref, + s"EngineJobExecutionActorSpec-$workflowId" + ) deathwatch watch myBrandNewEjea myBrandNewEjea.setStateInline(state = startingState) @@ -180,38 +218,41 @@ private[ejea] class MockEjea(helper: PerTestHelper, dockerHashActor: ActorRef, jobRestartCheckTokenDispenserActor: ActorRef, jobExecutionTokenDispenserActor: ActorRef, - callCachingParameters: EngineJobExecutionActor.CallCachingParameters) extends EngineJobExecutionActor( - replyTo = replyTo, - jobDescriptorKey = jobDescriptorKey, - workflowDescriptor = workflowDescriptor, - backendLifecycleActorFactory = factory, - initializationData = initializationData, - restarting = restarting, - serviceRegistryActor = serviceRegistryActor, - ioActor = ioActor, - jobStoreActor = jobStoreActor, - workflowDockerLookupActor = dockerHashActor, - jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, - jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, - backendSingletonActor = None, - command = if (restarting) RecoverJobCommand else ExecuteJobCommand, - callCachingParameters = callCachingParameters) { + callCachingParameters: EngineJobExecutionActor.CallCachingParameters +) extends EngineJobExecutionActor( + replyTo = replyTo, + jobDescriptorKey = jobDescriptorKey, + workflowDescriptor = workflowDescriptor, + backendLifecycleActorFactory = factory, + initializationData = initializationData, + restarting = restarting, + serviceRegistryActor = serviceRegistryActor, + ioActor = ioActor, + jobStoreActor = jobStoreActor, + workflowDockerLookupActor = dockerHashActor, + jobRestartCheckTokenDispenserActor = jobRestartCheckTokenDispenserActor, + jobExecutionTokenDispenserActor = jobExecutionTokenDispenserActor, + backendSingletonActor = None, + command = if (restarting) RecoverJobCommand else ExecuteJobCommand, + callCachingParameters = callCachingParameters + ) { implicit val system: ActorSystem = context.system - override def makeFetchCachedResultsActor(cacheId: CallCachingEntryId): Unit = { - helper.fetchCachedResultsActorCreations = - helper.fetchCachedResultsActorCreations.foundOne((cacheId, null)) - } + override def makeFetchCachedResultsActor(cacheId: CallCachingEntryId): Unit = + helper.fetchCachedResultsActorCreations = helper.fetchCachedResultsActorCreations.foundOne((cacheId, null)) override def initializeJobHashing(jobDescriptor: BackendJobDescriptor, activity: CallCachingActivity, - callCachingEligible: CallCachingEligible): Try[ActorRef] = { + callCachingEligible: CallCachingEligible + ): Try[ActorRef] = { helper.jobHashingInitializations = helper.jobHashingInitializations.foundOne((jobDescriptor, activity)) Success(helper.ejhaProbe.ref) } override def createBackendJobExecutionActor(data: ResponsePendingData): ActorRef = helper.bjeaProbe.ref - override def invalidateCacheHit(cacheId: CallCachingEntryId): Unit = { helper.invalidateCacheActorCreations = helper.invalidateCacheActorCreations.foundOne(cacheId) } + override def invalidateCacheHit(cacheId: CallCachingEntryId): Unit = helper.invalidateCacheActorCreations = + helper.invalidateCacheActorCreations.foundOne(cacheId) override def createJobPreparationActor(jobPrepProps: Props, name: String): ActorRef = jobPreparationProbe.ref override def onTimedTransition(from: EngineJobExecutionActorState, to: EngineJobExecutionActorState, - duration: FiniteDuration): Unit = {} + duration: FiniteDuration + ): Unit = {} } diff --git a/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala b/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala index e2734b68fef..b1133d80e1d 100644 --- a/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala +++ b/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala @@ -24,8 +24,12 @@ object JobStoreServiceSpec { private val EmptyExpression = PlaceholderWomExpression(Set.empty, WomStringType) } -class JobStoreServiceSpec extends CromwellTestKitWordSpec with Matchers with CoordinatedWorkflowStoreActorBuilder - with SqlWorkflowStoreBuilder with CromwellTimeoutSpec { +class JobStoreServiceSpec + extends CromwellTestKitWordSpec + with Matchers + with CoordinatedWorkflowStoreActorBuilder + with SqlWorkflowStoreBuilder + with CromwellTimeoutSpec { "JobStoreService" should { "register Job and Workflow completions and read back (query) the result" in { @@ -33,18 +37,18 @@ class JobStoreServiceSpec extends CromwellTestKitWordSpec with Matchers with Coo lazy val jobStore: JobStore = new SqlJobStore(EngineServicesStore.engineDatabaseInterface) val jobStoreService = system.actorOf( - props = - JobStoreActor.props( - database = jobStore, - registryActor = dummyServiceRegistryActor, - workflowStoreAccess = access("coordinatedAccessActor-register")(workflowStore) - ), - name = "jobStoreService-register", + props = JobStoreActor.props( + database = jobStore, + registryActor = dummyServiceRegistryActor, + workflowStoreAccess = access("coordinatedAccessActor-register")(workflowStore) + ), + name = "jobStoreService-register" ) val workflowId = WorkflowId.randomId() - val mockTask = WomMocks.mockTaskDefinition("bar") - .copy(outputs = List(OutputDefinition("baz", WomStringType, EmptyExpression))) + val mockTask = WomMocks + .mockTaskDefinition("bar") + .copy(outputs = List(OutputDefinition("baz", WomStringType, EmptyExpression))) val successCall = WomMocks.mockTaskCall(WomIdentifier("bar"), definition = mockTask) val successKey = BackendJobDescriptorKey(successCall, None, 1).toJobStoreKey(workflowId) @@ -68,12 +72,16 @@ class JobStoreServiceSpec extends CromwellTestKitWordSpec with Matchers with Coo jobStoreService ! QueryJobCompletion(failureKey, mockTask.outputs map WomMocks.mockOutputPort) expectMsgType[JobNotComplete.type](MaxWait) - jobStoreService ! RegisterJobCompleted(failureKey, JobResultFailure(Option(11), new IllegalArgumentException("Insufficient funds"), retryable = false)) + jobStoreService ! RegisterJobCompleted(failureKey, + JobResultFailure(Option(11), + new IllegalArgumentException("Insufficient funds"), + retryable = false + ) + ) expectMsgType[JobStoreWriteSuccess](MaxWait) jobStoreService ! QueryJobCompletion(failureKey, mockTask.outputs map WomMocks.mockOutputPort) - expectMsgPF(MaxWait) { - case JobComplete(JobResultFailure(Some(11), _, false)) => + expectMsgPF(MaxWait) { case JobComplete(JobResultFailure(Some(11), _, false)) => } jobStoreService ! RegisterWorkflowCompleted(workflowId) diff --git a/server/src/test/scala/cromwell/jobstore/JobStoreWriterSpec.scala b/server/src/test/scala/cromwell/jobstore/JobStoreWriterSpec.scala index f8ba551dab5..50ee1c4281b 100644 --- a/server/src/test/scala/cromwell/jobstore/JobStoreWriterSpec.scala +++ b/server/src/test/scala/cromwell/jobstore/JobStoreWriterSpec.scala @@ -8,7 +8,12 @@ import cromwell.core.actor.BatchActor.{BatchActorState, CommandAndReplyTo, Proce import cromwell.core.{CallOutputs, WorkflowId} import cromwell.engine.workflow.{CoordinatedWorkflowStoreActorBuilder, SqlWorkflowStoreBuilder} import cromwell.jobstore.JobStore.{JobCompletion, WorkflowCompletion} -import cromwell.jobstore.JobStoreActor.{JobStoreWriteSuccess, JobStoreWriterCommand, RegisterJobCompleted, RegisterWorkflowCompleted} +import cromwell.jobstore.JobStoreActor.{ + JobStoreWriterCommand, + JobStoreWriteSuccess, + RegisterJobCompleted, + RegisterWorkflowCompleted +} import org.scalatest.BeforeAndAfter import org.scalatest.matchers.should.Matchers import wom.graph.GraphNodePort.OutputPort @@ -17,7 +22,13 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future, Promise} import scala.language.postfixOps -class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBuilder with CoordinatedWorkflowStoreActorBuilder with Matchers with BeforeAndAfter with CromwellTimeoutSpec { +class JobStoreWriterSpec + extends CromwellTestKitWordSpec + with SqlWorkflowStoreBuilder + with CoordinatedWorkflowStoreActorBuilder + with Matchers + with BeforeAndAfter + with CromwellTimeoutSpec { var database: WriteCountingJobStore = _ var workflowId: WorkflowId = _ @@ -29,9 +40,13 @@ class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBu workflowId = WorkflowId.randomId() } - private def sendRegisterCompletion(jobStoreWriter: TestFSMRef[BatchActorState, WeightedQueue[CommandAndReplyTo[JobStoreWriterCommand], Int], JobStoreWriterActor])(attempt: Int): Unit = { + private def sendRegisterCompletion( + jobStoreWriter: TestFSMRef[BatchActorState, + WeightedQueue[CommandAndReplyTo[JobStoreWriterCommand], Int], + JobStoreWriterActor + ] + )(attempt: Int): Unit = jobStoreWriter ! RegisterJobCompleted(jobKey(attempt), successResult) - } private def jobKey(attempt: Int): JobStoreKey = JobStoreKey(workflowId, s"call.fqn", None, attempt) @@ -43,7 +58,7 @@ class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBu () } - //noinspection SameParameterValue + // noinspection SameParameterValue private def assertDb(totalWritesCalled: Int, jobCompletionsRecorded: Int, workflowCompletionsRecorded: Int): Unit = { database.totalWritesCalled shouldBe totalWritesCalled database.jobCompletionsRecorded shouldBe jobCompletionsRecorded @@ -51,7 +66,7 @@ class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBu () } - //noinspection SameParameterValue + // noinspection SameParameterValue private def assertReceived(expectedJobStoreWriteAcks: Int): Unit = { val received = receiveN(expectedJobStoreWriteAcks, 10 seconds) received foreach { @@ -73,9 +88,9 @@ class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBu flushRate = flushFrequency, serviceRegistryActor = TestProbe("serviceRegistryActor-collapse").ref, threshold = 1000, - workflowStoreAccess = access("coordinatedAccessActor-collapse")(workflowStore), + workflowStoreAccess = access("coordinatedAccessActor-collapse")(workflowStore) ), - name = "jobStoreWriter-collapse", + name = "jobStoreWriter-collapse" ) // Send a job completion. The database will hang. @@ -111,9 +126,9 @@ class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBu flushRate = flushFrequency, serviceRegistryActor = TestProbe("serviceRegistryActor-skip").ref, threshold = 1000, - workflowStoreAccess = access("coordinatedAccessActor-skip")(workflowStore), + workflowStoreAccess = access("coordinatedAccessActor-skip")(workflowStore) ), - "jobStoreWriter-skip", + "jobStoreWriter-skip" ) // Send a job completion. The database will hang. @@ -143,7 +158,10 @@ class JobStoreWriterSpec extends CromwellTestKitWordSpec with SqlWorkflowStoreBu } } -class WriteCountingJobStore(var totalWritesCalled: Int, var jobCompletionsRecorded: Int, var workflowCompletionsRecorded: Int) extends JobStore { +class WriteCountingJobStore(var totalWritesCalled: Int, + var jobCompletionsRecorded: Int, + var workflowCompletionsRecorded: Int +) extends JobStore { // A Promise so that the calling tests can hang the writer on the db write. Once the promise is completed the writer is // released and all further messages will be written immediately. @@ -154,8 +172,10 @@ class WriteCountingJobStore(var totalWritesCalled: Int, var jobCompletionsRecord () } - override def writeToDatabase(workflowCompletions: Seq[WorkflowCompletion], jobCompletions: Seq[JobCompletion], batchSize: Int) - (implicit ec: ExecutionContext): Future[Unit] = { + override def writeToDatabase(workflowCompletions: Seq[WorkflowCompletion], + jobCompletions: Seq[JobCompletion], + batchSize: Int + )(implicit ec: ExecutionContext): Future[Unit] = { totalWritesCalled += 1 jobCompletionsRecorded += jobCompletions.size @@ -163,7 +183,9 @@ class WriteCountingJobStore(var totalWritesCalled: Int, var jobCompletionsRecord writePromise.future } - override def readJobResult(jobStoreKey: JobStoreKey, taskOutputs: Seq[OutputPort])(implicit ec: ExecutionContext): Future[Option[JobResult]] = throw new UnsupportedOperationException() + override def readJobResult(jobStoreKey: JobStoreKey, taskOutputs: Seq[OutputPort])(implicit + ec: ExecutionContext + ): Future[Option[JobResult]] = throw new UnsupportedOperationException() } object WriteCountingJobStore { diff --git a/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala b/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala index c52f356c87c..0b8aca0e890 100644 --- a/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala +++ b/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala @@ -29,14 +29,18 @@ object SubWorkflowStoreSpec { val EmptyExpression = WdlExpression.fromString(""" "" """) } -class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkflowStoreActorBuilder - with CromwellTimeoutSpec with Matchers { +class SubWorkflowStoreSpec + extends CromwellTestKitWordSpec + with CoordinatedWorkflowStoreActorBuilder + with CromwellTimeoutSpec + with Matchers { "SubWorkflowStore" should { "work" in { lazy val subWorkflowStore = new SqlSubWorkflowStore(EngineServicesStore.engineDatabaseInterface) val subWorkflowStoreService = system.actorOf(SubWorkflowStoreActor.props(subWorkflowStore)) - lazy val workflowStore = SqlWorkflowStore(EngineServicesStore.engineDatabaseInterface, MetadataServicesStore.metadataDatabaseInterface) + lazy val workflowStore = + SqlWorkflowStore(EngineServicesStore.engineDatabaseInterface, MetadataServicesStore.metadataDatabaseInterface) val workflowHeartbeatConfig = WorkflowHeartbeatConfig(CromwellTestKitSpec.DefaultConfig) val workflowStoreService = system.actorOf( WorkflowStoreActor.props( @@ -64,17 +68,19 @@ class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkf override def tag: String = "foobar" } - workflowStoreService ! SubmitWorkflow(WorkflowSourceFilesWithoutImports( - workflowSource = Option(""), - workflowUrl = None, - workflowRoot = None, - workflowType = Option("WDL"), - workflowTypeVersion = None, - inputsJson = "{}", - workflowOptions = WorkflowOptions.empty, - labelsJson = "{}", - warnings = Vector.empty, - requestedWorkflowId = None) + workflowStoreService ! SubmitWorkflow( + WorkflowSourceFilesWithoutImports( + workflowSource = Option(""), + workflowUrl = None, + workflowRoot = None, + workflowType = Option("WDL"), + workflowTypeVersion = None, + inputsJson = "{}", + workflowOptions = WorkflowOptions.empty, + labelsJson = "{}", + warnings = Vector.empty, + requestedWorkflowId = None + ) ) val rootWorkflowId = expectMsgType[WorkflowSubmittedToStore](10 seconds).workflowId @@ -95,7 +101,7 @@ class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkf callIndex = jobKey.index.fromIndex, callAttempt = jobKey.attempt, subWorkflowExecutionUuid = subWorkflowId.toString, - subWorkflowStoreEntryId = Option(0), + subWorkflowStoreEntryId = Option(0) ) expectMsg[SubWorkflowFound](SubWorkflowFound(subWorkflowEntry)) @@ -112,7 +118,7 @@ class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkf callIndex = jobKey.index.fromIndex, callAttempt = jobKey.attempt, subWorkflowExecutionUuid = subSubWorkflowId.toString, - subWorkflowStoreEntryId = Option(1), + subWorkflowStoreEntryId = Option(1) ) expectMsg[SubWorkflowFound](SubWorkflowFound(subSubWorkflowEntry)) diff --git a/services/src/main/scala/cromwell/services/EnhancedBatchActor.scala b/services/src/main/scala/cromwell/services/EnhancedBatchActor.scala index 6e6815f3f82..771b4fc388d 100644 --- a/services/src/main/scala/cromwell/services/EnhancedBatchActor.scala +++ b/services/src/main/scala/cromwell/services/EnhancedBatchActor.scala @@ -10,7 +10,7 @@ import scala.concurrent.duration.FiniteDuration * A BatchActor with instrumentation and load control traits mixed in to remove some boilerplate */ abstract class EnhancedBatchActor[C](flushRate: FiniteDuration, batchSize: Int) - extends BatchActor[C](flushRate, batchSize) + extends BatchActor[C](flushRate, batchSize) with InstrumentedBatchActor[C] with CromwellInstrumentationActor with LoadControlledBatchActor[C] { diff --git a/services/src/main/scala/cromwell/services/EnhancedThrottlerActor.scala b/services/src/main/scala/cromwell/services/EnhancedThrottlerActor.scala index f0d127c93bc..2ae7ac5bcfe 100644 --- a/services/src/main/scala/cromwell/services/EnhancedThrottlerActor.scala +++ b/services/src/main/scala/cromwell/services/EnhancedThrottlerActor.scala @@ -8,9 +8,9 @@ import cromwell.services.loadcontroller.LoadControlledBatchActor * A ThrottlerActor with instrumentation and load control traits mixed in to remove some boilerplate */ abstract class EnhancedThrottlerActor[C] - extends ThrottlerActor[C] - with InstrumentedBatchActor[C] - with CromwellInstrumentationActor - with LoadControlledBatchActor[C] { + extends ThrottlerActor[C] + with InstrumentedBatchActor[C] + with CromwellInstrumentationActor + with LoadControlledBatchActor[C] { protected def enhancedReceive: Receive = loadControlReceive.orElse(instrumentationReceive).orElse(super.receive) } diff --git a/services/src/main/scala/cromwell/services/IoActorRequester.scala b/services/src/main/scala/cromwell/services/IoActorRequester.scala index 42c9d8eefb6..456c3ec392a 100644 --- a/services/src/main/scala/cromwell/services/IoActorRequester.scala +++ b/services/src/main/scala/cromwell/services/IoActorRequester.scala @@ -34,9 +34,10 @@ trait IoActorRequester extends StrictLogging { this: Actor => promise.complete(Success(actorRef)) case Success(NoIoActorRefAvailable) => logger.warn(s"No IoActorRef available for ${self.path} yet. Retrying in $backoffInterval.") - context.system.scheduler.scheduleOnce(backoffInterval) { requestIoActorInner(promise, backoffInterval) } + context.system.scheduler.scheduleOnce(backoffInterval)(requestIoActorInner(promise, backoffInterval)) case Success(other) => - val message = s"Programmer Error: Unexpected response to a RequestIoActor message in ${self.path}'s IoActorRequester: $other" + val message = + s"Programmer Error: Unexpected response to a RequestIoActor message in ${self.path}'s IoActorRequester: $other" logger.error(message) promise.failure(new Exception(message)) case Failure(reason) => diff --git a/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala b/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala index 2e43e651b2b..5cd6c84df7a 100644 --- a/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala +++ b/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala @@ -31,34 +31,47 @@ object ServiceRegistryActor { def props(config: Config) = Props(new ServiceRegistryActor(config)).withDispatcher(ServiceDispatcher) // To enable testing, this lets us override a config value with a Props of our choice: - def props(config: Config, overrides: Map[String, Props]) = { + def props(config: Config, overrides: Map[String, Props]) = Props(new ServiceRegistryActor(config) { override def serviceProps = super.serviceProps ++ overrides }).withDispatcher(ServiceDispatcher) - } def serviceNameToPropsMap(globalConfig: Config, registryActor: ActorRef): Map[String, Props] = { - val serviceNamesToConfigStanzas = globalConfig.getObject("services").entrySet.asScala.map(x => x.getKey -> x.getValue).toMap + val serviceNamesToConfigStanzas = + globalConfig.getObject("services").entrySet.asScala.map(x => x.getKey -> x.getValue).toMap serviceNamesToConfigStanzas map { - case (serviceName, config: ConfigObject) => serviceName -> serviceProps(serviceName, globalConfig, config.toConfig, registryActor) + case (serviceName, config: ConfigObject) => + serviceName -> serviceProps(serviceName, globalConfig, config.toConfig, registryActor) case (serviceName, _) => throw new Exception(s"Invalid configuration for service $serviceName") } } - private def serviceProps(serviceName: String, globalConfig: Config, serviceStanza: Config, registryActor: ActorRef): Props = { + private def serviceProps(serviceName: String, + globalConfig: Config, + serviceStanza: Config, + registryActor: ActorRef + ): Props = { val serviceConfigStanza = serviceStanza.as[Option[Config]]("config").getOrElse(ConfigFactory.parseString("")) val dispatcher = serviceStanza.as[Option[String]]("dispatcher").getOrElse(ServiceDispatcher) - val className = serviceStanza.as[Option[String]]("class").getOrElse( - throw new IllegalArgumentException(s"Invalid configuration for service $serviceName: missing 'class' definition") - ) - - try { - Props.create(Class.forName(className), serviceConfigStanza, globalConfig, registryActor).withDispatcher(dispatcher) - } catch { - case e: ClassNotFoundException => throw new RuntimeException( - s"Class $className for service $serviceName cannot be found in the class path.", e + val className = serviceStanza + .as[Option[String]]("class") + .getOrElse( + throw new IllegalArgumentException( + s"Invalid configuration for service $serviceName: missing 'class' definition" + ) ) + + try + Props + .create(Class.forName(className), serviceConfigStanza, globalConfig, registryActor) + .withDispatcher(dispatcher) + catch { + case e: ClassNotFoundException => + throw new RuntimeException( + s"Class $className for service $serviceName cannot be found in the class path.", + e + ) } } } @@ -71,8 +84,8 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging // When the IO actor starts up, it can register itself here for other service registry actors to make use of it var ioActor: Option[ActorRef] = None - val services: Map[String, ActorRef] = serviceProps map { - case (name, props) => name -> context.actorOf(props, name) + val services: Map[String, ActorRef] = serviceProps map { case (name, props) => + name -> context.actorOf(props, name) } private def transform(message: Any, from: ActorRef): Any = message match { @@ -87,18 +100,28 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging debugLogLoadMessages(msg, sender()) ref.tell(transform(msg, sender()), sender()) case None => - log.error("Received ServiceRegistryMessage requesting service '{}' for which no service is configured. Message: {}", msg.serviceName, msg) + log.error( + "Received ServiceRegistryMessage requesting service '{}' for which no service is configured. Message: {}", + msg.serviceName, + msg + ) sender() ! ServiceRegistryFailure(msg.serviceName) } - case meta: ServiceRegistryMetaRequest => meta match { - case RequestIoActorRef => ioActor match { - case Some(ref) => sender() ! IoActorRef(ref) - case None => sender() ! NoIoActorRefAvailable + case meta: ServiceRegistryMetaRequest => + meta match { + case RequestIoActorRef => + ioActor match { + case Some(ref) => sender() ! IoActorRef(ref) + case None => sender() ! NoIoActorRefAvailable + } + case IoActorRef(ref) => + if (ioActor.isEmpty) { ioActor = Option(ref) } + else { + log.error( + s"Programmer Error: More than one IoActor is trying to register itself in the service registry ($ref will *NOT* replace the existing $ioActor)" + ) + } } - case IoActorRef(ref) => - if (ioActor.isEmpty) { ioActor = Option(ref) } - else { log.error(s"Programmer Error: More than one IoActor is trying to register itself in the service registry ($ref will *NOT* replace the existing $ioActor)") } - } case ShutdownCommand => services.values.toList match { case Nil => context stop self @@ -110,14 +133,13 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging sender() ! ServiceRegistryFailure("Message is not a ServiceRegistryMessage: " + fool) } - private def debugLogLoadMessages(msg: ServiceRegistryMessage, sender: ActorRef): Unit = { + private def debugLogLoadMessages(msg: ServiceRegistryMessage, sender: ActorRef): Unit = msg match { case msg: LoadMetric => log.debug(s"Service Registry Actor receiving $msg message from $sender") case _ => () } - } /** * Set the supervision strategy such that any of the individual service actors fails to initialize that we'll pass diff --git a/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala b/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala index 4d4a12e77d0..39d020bb701 100644 --- a/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala +++ b/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala @@ -34,11 +34,18 @@ trait ProtoHealthMonitorServiceActor extends Actor with LazyLogging with Timers implicit val ec: ExecutionContext = context.system.dispatcher - lazy val sweepInterval = serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.check-refresh-time").getOrElse(DefaultSweepTime) - lazy val futureTimeout: FiniteDuration = serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.check-timeout").getOrElse(DefaultFutureTimeout) - lazy val staleThreshold: FiniteDuration = serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.status-ttl").getOrElse(DefaultStaleThreshold) - lazy val failureRetryCount: Int = serviceConfig.as[Option[Int]]("services.HealthMonitor.check-failure-retry-count").getOrElse(DefaultFailureRetryCount) - lazy val failureRetryInterval: FiniteDuration = serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.check-failure-retry-interval").getOrElse(DefaultFailureRetryInterval) + lazy val sweepInterval = + serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.check-refresh-time").getOrElse(DefaultSweepTime) + lazy val futureTimeout: FiniteDuration = + serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.check-timeout").getOrElse(DefaultFutureTimeout) + lazy val staleThreshold: FiniteDuration = + serviceConfig.as[Option[FiniteDuration]]("services.HealthMonitor.status-ttl").getOrElse(DefaultStaleThreshold) + lazy val failureRetryCount: Int = serviceConfig + .as[Option[Int]]("services.HealthMonitor.check-failure-retry-count") + .getOrElse(DefaultFailureRetryCount) + lazy val failureRetryInterval: FiniteDuration = serviceConfig + .as[Option[FiniteDuration]]("services.HealthMonitor.check-failure-retry-interval") + .getOrElse(DefaultFailureRetryInterval) /** * Contains each subsystem status along with a timestamp of when the entry was made so we know when the status @@ -49,24 +56,20 @@ trait ProtoHealthMonitorServiceActor extends Actor with LazyLogging with Timers subsystems.map((_, CachedSubsystemStatus(UnknownStatus, now))).toMap } - private[healthmonitor] def initialize(): Unit = { + private[healthmonitor] def initialize(): Unit = subsystems foreach { s => logger.info(s"Availability of '${s.name}' will be monitored and reported via the '/engine/v1/status' API") self ! Check(s, failureRetryCount) } - } - private def check(subsystem: MonitoredSubsystem, after: FiniteDuration, withRetriesLeft: Int): Unit = { + private def check(subsystem: MonitoredSubsystem, after: FiniteDuration, withRetriesLeft: Int): Unit = timers.startSingleTimer(UUID.randomUUID(), Check(subsystem, withRetriesLeft), after) - } - private[healthmonitor] def scheduleFailedRetryCheck(subsystem: MonitoredSubsystem, retriesLeft: Int): Unit = { + private[healthmonitor] def scheduleFailedRetryCheck(subsystem: MonitoredSubsystem, retriesLeft: Int): Unit = check(subsystem, after = failureRetryInterval, withRetriesLeft = retriesLeft - 1) - } - private[healthmonitor] def scheduleSweepCheck(subsystem: MonitoredSubsystem): Unit = { + private[healthmonitor] def scheduleSweepCheck(subsystem: MonitoredSubsystem): Unit = check(subsystem, after = sweepInterval, withRetriesLeft = failureRetryCount) - } initialize() @@ -138,15 +141,21 @@ object ProtoHealthMonitorServiceActor { final case class MonitoredSubsystem(name: String, check: () => Future[SubsystemStatus]) final case class SubsystemStatus(ok: Boolean, messages: Option[List[String]]) - final case class CachedSubsystemStatus(status: SubsystemStatus, created: Long) // created is time in millis when status was captured + final case class CachedSubsystemStatus(status: SubsystemStatus, + created: Long + ) // created is time in millis when status was captured sealed abstract class HealthMonitorServiceActorRequest case class Check(subsystem: MonitoredSubsystem, retriesLeft: Int) extends HealthMonitorServiceActorRequest - final case class Store(subsystem: MonitoredSubsystem, status: SubsystemStatus) extends HealthMonitorServiceActorRequest - case object GetCurrentStatus extends HealthMonitorServiceActorRequest with ServiceRegistryMessage { override val serviceName = "HealthMonitor" } + final case class Store(subsystem: MonitoredSubsystem, status: SubsystemStatus) + extends HealthMonitorServiceActorRequest + case object GetCurrentStatus extends HealthMonitorServiceActorRequest with ServiceRegistryMessage { + override val serviceName = "HealthMonitor" + } sealed abstract class HealthMonitorServiceActorResponse - final case class StatusCheckResponse(ok: Boolean, systems: Map[String, SubsystemStatus]) extends HealthMonitorServiceActorResponse + final case class StatusCheckResponse(ok: Boolean, systems: Map[String, SubsystemStatus]) + extends HealthMonitorServiceActorResponse /** * Adds non-blocking timeout support to futures. @@ -157,8 +166,10 @@ object ProtoHealthMonitorServiceActor { * // returns in 5 seconds * }}} */ - private implicit class FutureWithTimeout[A](f: Future[A]) { - def withTimeout(duration: FiniteDuration, errMsg: String, scheduler: Scheduler)(implicit ec: ExecutionContext): Future[A] = + implicit private class FutureWithTimeout[A](f: Future[A]) { + def withTimeout(duration: FiniteDuration, errMsg: String, scheduler: Scheduler)(implicit + ec: ExecutionContext + ): Future[A] = Future.firstCompletedOf(List(f, after(duration, scheduler)(Future.failed(new TimeoutException(errMsg))))) } } diff --git a/services/src/main/scala/cromwell/services/healthmonitor/impl/HealthMonitorServiceActor.scala b/services/src/main/scala/cromwell/services/healthmonitor/impl/HealthMonitorServiceActor.scala index 908f0706551..5c5e13b0615 100644 --- a/services/src/main/scala/cromwell/services/healthmonitor/impl/HealthMonitorServiceActor.scala +++ b/services/src/main/scala/cromwell/services/healthmonitor/impl/HealthMonitorServiceActor.scala @@ -6,8 +6,8 @@ import cromwell.services.healthmonitor.ProtoHealthMonitorServiceActor import cromwell.services.healthmonitor.impl.workbench.WorkbenchHealthMonitorServiceActor final class HealthMonitorServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends WorkbenchHealthMonitorServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { - override implicit val system = context.system + extends WorkbenchHealthMonitorServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { + implicit override val system = context.system override lazy val subsystems: Set[ProtoHealthMonitorServiceActor.MonitoredSubsystem] = { @@ -18,7 +18,7 @@ final class HealthMonitorServiceActor(serviceConfig: Config, globalConfig: Confi Set( dockerHubSubsystemOption, engineDatabaseSubsystemOption, - gcsSubsystemOption, + gcsSubsystemOption ).flatten ++ PapiSubsystems } } diff --git a/services/src/main/scala/cromwell/services/healthmonitor/impl/common/DockerHubMonitor.scala b/services/src/main/scala/cromwell/services/healthmonitor/impl/common/DockerHubMonitor.scala index e56fbb98bdf..9170b6edd3a 100644 --- a/services/src/main/scala/cromwell/services/healthmonitor/impl/common/DockerHubMonitor.scala +++ b/services/src/main/scala/cromwell/services/healthmonitor/impl/common/DockerHubMonitor.scala @@ -24,7 +24,8 @@ trait DockerHubMonitor { implicit val timeout = Timeout(5.seconds) lazy val dockerHubFlow = List(new DockerHubRegistry(DockerRegistryConfig.default)) - lazy val dockerHashActor = system.actorOf(DockerInfoActor.props(dockerHubFlow, 500, 0.minutes, 0), "HealthMonitorDockerHashActor") + lazy val dockerHashActor = + system.actorOf(DockerInfoActor.props(dockerHubFlow, 500, 0.minutes, 0), "HealthMonitorDockerHashActor") lazy val DockerHub = MonitoredSubsystem("DockerHub", checkDockerhub _) @@ -32,13 +33,15 @@ trait DockerHubMonitor { * Demonstrates connectivity to Docker Hub by periodically pulling the hash of an image. If the hash is not returned * we assume that there is no connection to Docker Hub. */ - private def checkDockerhub(): Future[SubsystemStatus] = { + private def checkDockerhub(): Future[SubsystemStatus] = dockerHashActor.ask(UbuntuLatestHashRequest).mapTo[DockerInfoResponse] map { case _: DockerInfoSuccessResponse => OkStatus case f: DockerInfoFailedResponse => throw f.failure - case huh => throw new RuntimeException("Encountered unexpected error when trying to contact DockerHub: " + huh.getClass.getCanonicalName) + case huh => + throw new RuntimeException( + "Encountered unexpected error when trying to contact DockerHub: " + huh.getClass.getCanonicalName + ) } - } } object DockerHubMonitor { diff --git a/services/src/main/scala/cromwell/services/healthmonitor/impl/common/EngineDatabaseMonitor.scala b/services/src/main/scala/cromwell/services/healthmonitor/impl/common/EngineDatabaseMonitor.scala index be518bf001a..a7707bf86fd 100644 --- a/services/src/main/scala/cromwell/services/healthmonitor/impl/common/EngineDatabaseMonitor.scala +++ b/services/src/main/scala/cromwell/services/healthmonitor/impl/common/EngineDatabaseMonitor.scala @@ -1,7 +1,7 @@ package cromwell.services.healthmonitor.impl.common import cromwell.services.EngineServicesStore -import cromwell.services.healthmonitor.ProtoHealthMonitorServiceActor.{MonitoredSubsystem, SubsystemStatus, OkStatus} +import cromwell.services.healthmonitor.ProtoHealthMonitorServiceActor.{MonitoredSubsystem, OkStatus, SubsystemStatus} import cats.syntax.functor._ import cats.instances.future._ import scala.concurrent.{ExecutionContext, Future} @@ -19,7 +19,6 @@ trait EngineDatabaseMonitor { /** * Demonstrates connectivity to the engine database by periodically making a small query */ - private def checkEngineDb(): Future[SubsystemStatus] = { + private def checkEngineDb(): Future[SubsystemStatus] = EngineServicesStore.engineDatabaseInterface.queryDockerHashStoreEntries("DOESNOTEXIST") as OkStatus - } } diff --git a/services/src/main/scala/cromwell/services/healthmonitor/impl/workbench/WorkbenchHealthMonitorServiceActor.scala b/services/src/main/scala/cromwell/services/healthmonitor/impl/workbench/WorkbenchHealthMonitorServiceActor.scala index 6bbef784dfa..43a5c597c8a 100644 --- a/services/src/main/scala/cromwell/services/healthmonitor/impl/workbench/WorkbenchHealthMonitorServiceActor.scala +++ b/services/src/main/scala/cromwell/services/healthmonitor/impl/workbench/WorkbenchHealthMonitorServiceActor.scala @@ -31,17 +31,20 @@ import scala.concurrent.{ExecutionContext, Future} * as GCS and PAPI. This implementation makes some assumptions of Cromwell's configuration which will be true * in a Workbench scenario but YMMV otherwise. Caveat emptor and all of that fun stuff. */ -abstract class WorkbenchHealthMonitorServiceActor(val serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends ProtoHealthMonitorServiceActor +abstract class WorkbenchHealthMonitorServiceActor(val serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef +) extends ProtoHealthMonitorServiceActor with DockerHubMonitor with EngineDatabaseMonitor { - override implicit val system = context.system + implicit override val system = context.system - private lazy val papiBackendConfigurations = serviceConfig.as[Set[String]]("check-papi-backends").map(WorkbenchHealthMonitorServiceActor.PapiConfiguration.fromBackendNameValue(_, serviceConfig, globalConfig)) + private lazy val papiBackendConfigurations = serviceConfig + .as[Set[String]]("check-papi-backends") + .map(WorkbenchHealthMonitorServiceActor.PapiConfiguration.fromBackendNameValue(_, serviceConfig, globalConfig)) - def papiMonitoredSubsystem(papiConfiguration: PapiConfiguration): MonitoredSubsystem = { + def papiMonitoredSubsystem(papiConfiguration: PapiConfiguration): MonitoredSubsystem = MonitoredSubsystem(papiConfiguration.backendName, () => checkPapi(papiConfiguration)) - } protected lazy val Gcs = MonitoredSubsystem("GCS", () => checkGcs()) protected lazy val PapiSubsystems = papiBackendConfigurations map papiMonitoredSubsystem @@ -54,7 +57,8 @@ abstract class WorkbenchHealthMonitorServiceActor(val serviceConfig: Config, glo private def getGoogleAuthConfigurationOrFail(googleAuthName: String): GoogleAuthMode = googleConfig.auth(googleAuthName) match { case Valid(a) => a - case Invalid(e) => throw new IllegalArgumentException("Unable to configure WorkbenchHealthMonitor: " + e.toList.mkString(", ")) + case Invalid(e) => + throw new IllegalArgumentException("Unable to configure WorkbenchHealthMonitor: " + e.toList.mkString(", ")) } /** @@ -80,12 +84,19 @@ abstract class WorkbenchHealthMonitorServiceActor(val serviceConfig: Config, glo val check = for { credentials <- Future(googleAuth.credentials(List(CloudLifeSciencesScopes.CLOUD_PLATFORM))) actorFactoryName = papiProviderConfig.as[String]("actor-factory") - genomicsChecker = if (actorFactoryName.contains("v2beta")) { - val location = papiConfig.as[String]("genomics.location") - GenomicsCheckerV2Beta(googleConfig.applicationName, googleAuth, endpointUrl, location, credentials, papiProjectId) - } else { - GenomicsCheckerV2Alpha1(googleConfig.applicationName, googleAuth, endpointUrl, credentials, papiProjectId) - } + genomicsChecker = + if (actorFactoryName.contains("v2beta")) { + val location = papiConfig.as[String]("genomics.location") + GenomicsCheckerV2Beta(googleConfig.applicationName, + googleAuth, + endpointUrl, + location, + credentials, + papiProjectId + ) + } else { + GenomicsCheckerV2Alpha1(googleConfig.applicationName, googleAuth, endpointUrl, credentials, papiProjectId) + } checked <- genomicsChecker.check } yield checked @@ -98,9 +109,8 @@ object WorkbenchHealthMonitorServiceActor { protected def httpInitializer(credentials: Credentials) = { val delegate = new HttpCredentialsAdapter(credentials) new HttpRequestInitializer() { - def initialize(httpRequest: HttpRequest) = { + def initialize(httpRequest: HttpRequest) = delegate.initialize(httpRequest) - } } } @@ -112,34 +122,42 @@ object WorkbenchHealthMonitorServiceActor { endpointUrl: URL, location: String, credentials: Credentials, - papiProjectId: String)(implicit val ec: ExecutionContext) extends GenomicsChecker { - val lifeSciences = new CloudLifeSciences.Builder( - GoogleAuthMode.httpTransport, - GoogleAuthMode.jsonFactory, - httpInitializer(credentials)) + papiProjectId: String + )(implicit val ec: ExecutionContext) + extends GenomicsChecker { + val lifeSciences = new CloudLifeSciences.Builder(GoogleAuthMode.httpTransport, + GoogleAuthMode.jsonFactory, + httpInitializer(credentials) + ) .setApplicationName(applicationName) .setRootUrl(endpointUrl.toString) .build override def check = Future { // https://cloud.google.com/life-sciences/docs/reference/rest/v2beta/projects.locations.operations - lifeSciences.projects().locations().operations().list(s"projects/$papiProjectId/locations/$location").setPageSize(1).execute() + lifeSciences + .projects() + .locations() + .operations() + .list(s"projects/$papiProjectId/locations/$location") + .setPageSize(1) + .execute() () } } case class GenomicsCheckerV2Alpha1(applicationName: String, - authMode: GoogleAuthMode, - endpointUrl: URL, - credentials: Credentials, - papiProjectId: String)(implicit val ec: ExecutionContext) extends GenomicsChecker { - val genomics = new Genomics.Builder( - GoogleAuthMode.httpTransport, - GoogleAuthMode.jsonFactory, - httpInitializer(credentials)) - .setApplicationName(applicationName) - .setRootUrl(endpointUrl.toString) - .build + authMode: GoogleAuthMode, + endpointUrl: URL, + credentials: Credentials, + papiProjectId: String + )(implicit val ec: ExecutionContext) + extends GenomicsChecker { + val genomics = + new Genomics.Builder(GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, httpInitializer(credentials)) + .setApplicationName(applicationName) + .setRootUrl(endpointUrl.toString) + .build override def check = Future { // https://cloud.google.com/genomics/reference/rest/#rest-resource-v2alpha1projectsoperations @@ -153,15 +171,16 @@ object WorkbenchHealthMonitorServiceActor { object PapiConfiguration { def fromBackendNameKey(backendNameKey: String, serviceConfig: Config, - globalConfig: Config): Option[PapiConfiguration] = { + globalConfig: Config + ): Option[PapiConfiguration] = serviceConfig.as[Option[String]](backendNameKey) map { fromBackendNameValue(_, serviceConfig, globalConfig) } - } def fromBackendNameValue(papiBackendName: String, serviceConfig: Config, - globalConfig: Config): PapiConfiguration = { + globalConfig: Config + ): PapiConfiguration = { val papiProviderConfig: Config = globalConfig.as[Config](s"backend.providers.$papiBackendName") val papiConfig: Config = papiProviderConfig.as[Config]("config") PapiConfiguration(papiBackendName, papiConfig, papiProviderConfig) diff --git a/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala b/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala index a079e1e7fb0..cf8df945532 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala @@ -9,8 +9,8 @@ import scala.concurrent.{ExecutionContext, Future} class AsynchronousThrottlingGaugeMetricActor(metricPath: NonEmptyList[String], instrumentationPrefix: Option[String], - override val serviceRegistryActor: ActorRef) - extends LoggingFSM[AsynchronousThrottlingGaugeMetricActorState, Unit] + override val serviceRegistryActor: ActorRef +) extends LoggingFSM[AsynchronousThrottlingGaugeMetricActorState, Unit] with CromwellInstrumentation { implicit val ec = context.dispatcher @@ -42,10 +42,11 @@ class AsynchronousThrottlingGaugeMetricActor(metricPath: NonEmptyList[String], goto(WaitingForMetricCalculationRequestOrMetricValue) } - whenUnhandled { - case Event(unexpected, _) => - log.warning(s"Programmer error: this actor should not receive message $unexpected from ${sender().path} while in state $stateName") - stay() + whenUnhandled { case Event(unexpected, _) => + log.warning( + s"Programmer error: this actor should not receive message $unexpected from ${sender().path} while in state $stateName" + ) + stay() } private def sendGaugeAndStay(metricValue: Long): State = { diff --git a/services/src/main/scala/cromwell/services/instrumentation/CromwellInstrumentation.scala b/services/src/main/scala/cromwell/services/instrumentation/CromwellInstrumentation.scala index 1c0bf4fd57f..cd7adef9c41 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/CromwellInstrumentation.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/CromwellInstrumentation.scala @@ -1,6 +1,6 @@ package cromwell.services.instrumentation -import java.time.{OffsetDateTime, Duration => JDuration} +import java.time.{Duration => JDuration, OffsetDateTime} import java.util.concurrent.TimeUnit import akka.actor.{Actor, ActorRef, Timers} @@ -16,7 +16,8 @@ import scala.language.implicitConversions object CromwellInstrumentation { - val InstrumentationRate = ConfigFactory.load() + val InstrumentationRate = ConfigFactory + .load() .getConfig("system") .as[Option[FiniteDuration]]("instrumentation-rate") .getOrElse(5.seconds) @@ -30,9 +31,8 @@ object CromwellInstrumentation { .map(c => path.concatNel(NonEmptyList.of(c.toString))) .getOrElse(path) - def withThrowable(failure: Throwable, statusCodeExtractor: Throwable => Option[Int]) = { + def withThrowable(failure: Throwable, statusCodeExtractor: Throwable => Option[Int]) = path.withStatusCodeFailure(statusCodeExtractor(failure)) - } } } @@ -50,70 +50,64 @@ trait CromwellInstrumentation { * The cromwell bucket prefix is always prepended: * cromwell.[prefix].path */ - final private def makeBucket(path: InstrumentationPath, prefix: Option[String]): CromwellBucket = { + final private def makeBucket(path: InstrumentationPath, prefix: Option[String]): CromwellBucket = CromwellBucket(prefix.toList, path) - } /** * Creates an increment message for the given bucket */ - private final def countMessage(path: InstrumentationPath, count: Long, prefix: Option[String]): InstrumentationServiceMessage = { + final private def countMessage(path: InstrumentationPath, + count: Long, + prefix: Option[String] + ): InstrumentationServiceMessage = InstrumentationServiceMessage(CromwellCount(makeBucket(path, prefix), count)) - } /** * Increment the counter for the given bucket */ - protected final def count(path: InstrumentationPath, count: Long, prefix: Option[String] = None): Unit = { + final protected def count(path: InstrumentationPath, count: Long, prefix: Option[String] = None): Unit = serviceRegistryActor.tell(countMessage(path, count, prefix), instrumentationSender) - } /** * Creates an increment message for the given bucket */ - private final def incrementMessage(path: InstrumentationPath, prefix: Option[String]): InstrumentationServiceMessage = { + final private def incrementMessage(path: InstrumentationPath, prefix: Option[String]): InstrumentationServiceMessage = InstrumentationServiceMessage(CromwellIncrement(makeBucket(path, prefix))) - } /** * Increment the counter for the given bucket */ - protected final def increment(path: InstrumentationPath, prefix: Option[String] = None): Unit = { + final protected def increment(path: InstrumentationPath, prefix: Option[String] = None): Unit = serviceRegistryActor.tell(incrementMessage(path, prefix), instrumentationSender) - } /** * Creates a gauge message for the given bucket */ - private final def gaugeMessage(path: InstrumentationPath, value: Long, prefix: Option[String]) = { + final private def gaugeMessage(path: InstrumentationPath, value: Long, prefix: Option[String]) = InstrumentationServiceMessage(CromwellGauge(makeBucket(path, prefix), value)) - } /** * Set the bucket to the gauge value */ - protected final def sendGauge(path: InstrumentationPath, value: Long, prefix: Option[String] = None): Unit = { + final protected def sendGauge(path: InstrumentationPath, value: Long, prefix: Option[String] = None): Unit = serviceRegistryActor.tell(gaugeMessage(path, value, prefix), instrumentationSender) - } /** * Creates a timing message for the given bucket and duration */ - private final def timingMessage(path: InstrumentationPath, duration: FiniteDuration, prefix: Option[String]) = { + final private def timingMessage(path: InstrumentationPath, duration: FiniteDuration, prefix: Option[String]) = InstrumentationServiceMessage(CromwellTiming(makeBucket(path, prefix), duration)) - } /** * Add a timing information for the given bucket */ - protected final def sendTiming(path: InstrumentationPath, duration: FiniteDuration, prefix: Option[String] = None) = { + final protected def sendTiming(path: InstrumentationPath, duration: FiniteDuration, prefix: Option[String] = None) = serviceRegistryActor.tell(timingMessage(path, duration, prefix), instrumentationSender) - } - def calculateTimeDifference(startTime: OffsetDateTime, endTime: OffsetDateTime): FiniteDuration = { + def calculateTimeDifference(startTime: OffsetDateTime, endTime: OffsetDateTime): FiniteDuration = FiniteDuration(JDuration.between(startTime, endTime).toMillis, TimeUnit.MILLISECONDS) - } - def calculateTimeSince(startTime: OffsetDateTime): FiniteDuration = calculateTimeDifference(startTime, OffsetDateTime.now()) + def calculateTimeSince(startTime: OffsetDateTime): FiniteDuration = + calculateTimeDifference(startTime, OffsetDateTime.now()) } /** @@ -123,9 +117,8 @@ trait CromwellInstrumentationScheduler { this: Actor with Timers => private case object InstrumentationTimerKey private case object InstrumentationTimerAction extends ControlMessage - def startInstrumentationTimer() = { + def startInstrumentationTimer() = timers.startSingleTimer(InstrumentationTimerKey, InstrumentationTimerAction, InstrumentationRate) - } protected def instrumentationReceive(instrumentationAction: () => Unit): Receive = { case InstrumentationTimerAction => diff --git a/services/src/main/scala/cromwell/services/instrumentation/CromwellMetric.scala b/services/src/main/scala/cromwell/services/instrumentation/CromwellMetric.scala index 21c9dba1cd8..dbdc4525feb 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/CromwellMetric.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/CromwellMetric.scala @@ -25,12 +25,15 @@ sealed trait CromwellMetric object CromwellCount { def apply(bucket: CromwellBucket, value: Long): CromwellCount = new CromwellCount(bucket, value) - def unapply(count: CromwellCount): Option[(CromwellBucket, Long, Double)] = Option((count.bucket, count.value, count.sampling)) + def unapply(count: CromwellCount): Option[(CromwellBucket, Long, Double)] = Option( + (count.bucket, count.value, count.sampling) + ) } + /** * Count occurrences of an event */ -class CromwellCount(val bucket: CromwellBucket, val value: Long, val sampling: Double = 1.0D) extends CromwellMetric { +class CromwellCount(val bucket: CromwellBucket, val value: Long, val sampling: Double = 1.0d) extends CromwellMetric { override def toString: String = s"CromwellCount($bucket, $value, $sampling)" } @@ -42,7 +45,7 @@ case class CromwellIncrement(override val bucket: CromwellBucket) extends Cromwe /** * Measures a time value */ -case class CromwellTiming(bucket: CromwellBucket, value: FiniteDuration, sampling: Double = 1.0D) extends CromwellMetric +case class CromwellTiming(bucket: CromwellBucket, value: FiniteDuration, sampling: Double = 1.0d) extends CromwellMetric /** * Measures a gauge value diff --git a/services/src/main/scala/cromwell/services/instrumentation/InstrumentedBatchActor.scala b/services/src/main/scala/cromwell/services/instrumentation/InstrumentedBatchActor.scala index 46ec07b1b33..0e08371829d 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/InstrumentedBatchActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/InstrumentedBatchActor.scala @@ -40,10 +40,9 @@ trait InstrumentedBatchActor[C] { this: BatchActor[C] with CromwellInstrumentati * Don't forget to chain this into your receive method to instrument the queue size: * override def receive = instrumentationReceive.orElse(super.receive) */ - protected def instrumentationReceive: Receive = { - case QueueSizeTimerAction => - sendGauge(queueSizePath, stateData.weight.toLong, instrumentationPrefix) - timers.startSingleTimer(QueueSizeTimerKey, QueueSizeTimerAction, CromwellInstrumentation.InstrumentationRate) + protected def instrumentationReceive: Receive = { case QueueSizeTimerAction => + sendGauge(queueSizePath, stateData.weight.toLong, instrumentationPrefix) + timers.startSingleTimer(QueueSizeTimerKey, QueueSizeTimerAction, CromwellInstrumentation.InstrumentationRate) } /** @@ -59,7 +58,10 @@ trait InstrumentedBatchActor[C] { this: BatchActor[C] with CromwellInstrumentati action onComplete { case Success(numProcessed) => count(processedPath, numProcessed.toLong, instrumentationPrefix) - sendTiming(durationPath, (OffsetDateTime.now().toEpochSecond - startTime.toEpochSecond).seconds, instrumentationPrefix) + sendTiming(durationPath, + (OffsetDateTime.now().toEpochSecond - startTime.toEpochSecond).seconds, + instrumentationPrefix + ) case Failure(_) => count(failurePath, 1L, instrumentationPrefix) } action diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/noop/NoopInstrumentationServiceActor.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/noop/NoopInstrumentationServiceActor.scala index e7385ed7991..5c8eb8c744a 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/noop/NoopInstrumentationServiceActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/noop/NoopInstrumentationServiceActor.scala @@ -6,13 +6,16 @@ import cromwell.services.instrumentation.InstrumentationService.InstrumentationS import cromwell.util.GracefulShutdownHelper.ShutdownCommand object NoopInstrumentationServiceActor { - def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = Props(new NoopInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor)) + def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = Props( + new NoopInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor) + ) } /** * Actor that ignores every InstrumentationServiceMessage - This is the default implementation of this service */ -class NoopInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor { +class NoopInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) + extends Actor { override def receive = { case InstrumentationServiceMessage(_) => // Drop all messages case ShutdownCommand => context stop self diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/selectivetsv/SelectiveTsvInstrumentationServiceActor.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/selectivetsv/SelectiveTsvInstrumentationServiceActor.scala index 4df3f966db6..de991711689 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/selectivetsv/SelectiveTsvInstrumentationServiceActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/selectivetsv/SelectiveTsvInstrumentationServiceActor.scala @@ -9,7 +9,10 @@ import com.typesafe.config.Config import com.typesafe.scalalogging.StrictLogging import cromwell.services.instrumentation.{CromwellBucket, CromwellGauge, CromwellIncrement} import cromwell.services.instrumentation.InstrumentationService.InstrumentationServiceMessage -import cromwell.services.instrumentation.impl.selectivetsv.SelectiveTsvInstrumentationServiceActor.{SnapshotState, StateHistory} +import cromwell.services.instrumentation.impl.selectivetsv.SelectiveTsvInstrumentationServiceActor.{ + SnapshotState, + StateHistory +} import cromwell.util.GracefulShutdownHelper.ShutdownCommand import scala.concurrent.duration._ @@ -24,12 +27,14 @@ import scala.concurrent.ExecutionContext * * This is a test/demonstration version of the actor. I strongly discourage deploying with it in any production usage! */ -class SelectiveTsvInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends Actor +class SelectiveTsvInstrumentationServiceActor(serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef +) extends Actor with StrictLogging { implicit val ec: ExecutionContext = context.dispatcher - context.system.scheduler.schedule(10.seconds, 1.seconds) { self ! SnapshotState } + context.system.scheduler.schedule(10.seconds, 1.seconds)(self ! SnapshotState) var stateHistory: StateHistory = StateHistory.empty @@ -59,14 +64,15 @@ class SelectiveTsvInstrumentationServiceActor(serviceConfig: Config, globalConfi case ShutdownCommand => context stop self } - private def checkTsvPrintout(): Boolean = { - if (stateHistory.stateHistory.size >= 2 && + private def checkTsvPrintout(): Boolean = + if ( + stateHistory.stateHistory.size >= 2 && stateHistory.stateHistory.last._2.get("jobs.ejea.executing").contains(0) && - stateHistory.stateHistory.init.last._2.get("jobs.ejea.executing").exists(_ != 0)) { + stateHistory.stateHistory.init.last._2.get("jobs.ejea.executing").exists(_ != 0) + ) { outputCountHistory() true } else false - } private def outputCountHistory(): Unit = { import java.io.BufferedWriter @@ -93,7 +99,11 @@ object SelectiveTsvInstrumentationServiceActor { case object SnapshotState - final case class StateHistory(fields: Vector[String], currentState: Map[String, Int], stateHistory: Vector[(Long, Map[String, Int])], firstEntryTimestampMillis: Option[Long]) { + final case class StateHistory(fields: Vector[String], + currentState: Map[String, Int], + stateHistory: Vector[(Long, Map[String, Int])], + firstEntryTimestampMillis: Option[Long] + ) { def increment(field: String): StateHistory = { val newState = currentState + (field -> (currentState.getOrElse(field, 0) + 1)) if (fields.contains(field)) { @@ -107,13 +117,12 @@ object SelectiveTsvInstrumentationServiceActor { ) } } - def decrement(field: String): StateHistory = { + def decrement(field: String): StateHistory = this.copy( currentState = currentState + (field -> (currentState(field) - 1)) ) - } - def set(field: String, value: Long): StateHistory = { + def set(field: String, value: Long): StateHistory = if (fields.contains(field)) { this.copy( currentState = currentState + (field -> value.intValue) @@ -124,22 +133,23 @@ object SelectiveTsvInstrumentationServiceActor { currentState = currentState + (field -> value.intValue) ) } - } - def snapshotState(): StateHistory = { + def snapshotState(): StateHistory = if (this == StateHistory.empty) this else { - val creationTimestampEpochMillis = firstEntryTimestampMillis.getOrElse(OffsetDateTime.now.toInstant.toEpochMilli) + val creationTimestampEpochMillis = + firstEntryTimestampMillis.getOrElse(OffsetDateTime.now.toInstant.toEpochMilli) this.copy( - stateHistory = stateHistory :+ ((OffsetDateTime.now.toInstant.toEpochMilli - creationTimestampEpochMillis) -> currentState), + stateHistory = + stateHistory :+ ((OffsetDateTime.now.toInstant.toEpochMilli - creationTimestampEpochMillis) -> currentState), firstEntryTimestampMillis = Option(creationTimestampEpochMillis) ) } - } def stateHistoryTsv(): Vector[String] = { - val interestingFields = fields.filter(field => stateHistory.exists(history => history._2.get(field).exists(_ != 0))) + val interestingFields = + fields.filter(field => stateHistory.exists(history => history._2.get(field).exists(_ != 0))) val header = (List("timestamp") ++ interestingFields).mkString("\t") val rows = stateHistory.map { case (timestamp, fieldMap) => diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfig.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfig.scala index 06fe0442da4..7fd6e79a708 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfig.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfig.scala @@ -18,50 +18,57 @@ case class StackdriverConfig(googleProject: String, flushRate: FiniteDuration, cromwellInstanceIdentifier: Option[String], cromwellInstanceRole: Option[String], - cromwellPerfTestCase: Option[String]) + cromwellPerfTestCase: Option[String] +) object StackdriverConfig { val CromwellInstanceIdentifier = "cromwell-instance-identifier" val CromwellInstanceRole = "cromwell-instance-role" val CromwellPerfTest = "cromwell-perf-test-case" - private def validateFlushRate(rateFunc: => FiniteDuration): ErrorOr[FiniteDuration] = { + private def validateFlushRate(rateFunc: => FiniteDuration): ErrorOr[FiniteDuration] = validate[FiniteDuration](rateFunc) match { - case Valid(duration) => duration match { - case _ if duration < 1.minute => (s"`flush-rate` must be 1 minute or longer, specified rate is `$duration`. " + - s"Google's Stackdriver API needs each metric to be sent not more than once per minute.").invalidNel - case _ => duration.validNel - } + case Valid(duration) => + duration match { + case _ if duration < 1.minute => + (s"`flush-rate` must be 1 minute or longer, specified rate is `$duration`. " + + s"Google's Stackdriver API needs each metric to be sent not more than once per minute.").invalidNel + case _ => duration.validNel + } case Invalid(e) => e.invalid } - } - - private def validateAuth(authSchemeFunc: => String, googleConfiguration: GoogleConfiguration): ErrorOr[GoogleAuthMode] = { + private def validateAuth(authSchemeFunc: => String, + googleConfiguration: GoogleConfiguration + ): ErrorOr[GoogleAuthMode] = validate[String](authSchemeFunc) match { - case Valid(schemeString) => googleConfiguration.auth(schemeString) match { - case Valid(auth @ (_:ApplicationDefaultMode | _:ServiceAccountMode)) => auth.valid - case Valid(auth @ (_: MockAuthMode)) => auth.valid // Allow mocking for tests - case Valid(_) => s"`auth` scheme: $schemeString is not allowed for Stackdriver instrumentation. Only `application_default` and `service_account` modes are valid.".invalidNel - case Invalid(error) => s"`auth` scheme is invalid. Errors: $error".invalidNel - } + case Valid(schemeString) => + googleConfiguration.auth(schemeString) match { + case Valid(auth @ (_: ApplicationDefaultMode | _: ServiceAccountMode)) => auth.valid + case Valid(auth @ (_: MockAuthMode)) => auth.valid // Allow mocking for tests + case Valid(_) => + s"`auth` scheme: $schemeString is not allowed for Stackdriver instrumentation. Only `application_default` and `service_account` modes are valid.".invalidNel + case Invalid(error) => s"`auth` scheme is invalid. Errors: $error".invalidNel + } case Invalid(e) => e.invalid } - } - def apply(serviceConfig: Config, globalConfig: Config): StackdriverConfig = { val googleConfiguration: GoogleConfiguration = GoogleConfiguration(globalConfig) val cromwellInstanceId: ErrorOr[Option[String]] = globalConfig.getAs[String]("system.cromwell_id").validNel - val googleProject: ErrorOr[String] = validate[String] { serviceConfig.as[String]("google-project") } + val googleProject: ErrorOr[String] = validate[String](serviceConfig.as[String]("google-project")) val authScheme: ErrorOr[GoogleAuthMode] = validateAuth(serviceConfig.as[String]("auth"), googleConfiguration) val flushRate: ErrorOr[FiniteDuration] = validateFlushRate(serviceConfig.as[FiniteDuration]("flush-rate")) val cromwellInstanceRole: ErrorOr[Option[String]] = serviceConfig.getAs[String](CromwellInstanceRole).validNel val cromwellPerfTestCase: ErrorOr[Option[String]] = serviceConfig.getAs[String](CromwellPerfTest).validNel - (googleProject, authScheme, flushRate, cromwellInstanceId, cromwellInstanceRole, cromwellPerfTestCase).mapN({ (p, a, f, i, r, t) => - new StackdriverConfig(p, a, f, i, r, t) - }).valueOr(errors => throw AggregatedMessageException("Stackdriver instrumentation config is invalid. Error(s)", errors.toList)) + (googleProject, authScheme, flushRate, cromwellInstanceId, cromwellInstanceRole, cromwellPerfTestCase) + .mapN { (p, a, f, i, r, t) => + new StackdriverConfig(p, a, f, i, r, t) + } + .valueOr(errors => + throw AggregatedMessageException("Stackdriver instrumentation config is invalid. Error(s)", errors.toList) + ) } } diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala index 02dce2b78ad..28048becea8 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala @@ -20,8 +20,11 @@ import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ import scala.util.Try - -class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor with StrictLogging { +class StackdriverInstrumentationServiceActor(serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef +) extends Actor + with StrictLogging { implicit lazy val executionContext = context.dispatcher val stackdriverConfig = StackdriverConfig(serviceConfig, globalConfig) @@ -33,7 +36,8 @@ class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig var metricsMap = Map.empty[StackdriverMetric, Vector[Double]] // Instantiates a client - val metricServiceSettings = MetricServiceSettings.newBuilder.setCredentialsProvider(FixedCredentialsProvider.create(credentials)).build + val metricServiceSettings = + MetricServiceSettings.newBuilder.setCredentialsProvider(FixedCredentialsProvider.create(credentials)).build final val metricServiceClient = MetricServiceClient.create(metricServiceSettings) // Prepares the monitored resource descriptor @@ -43,16 +47,16 @@ class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig // Give the actor time to warm up, then start sending the metrics to Stackdriver at an interval context.system.scheduler.schedule(InitialDelay, stackdriverConfig.flushRate, self, SendStackdriverMetricCommand) - override def receive = { case SendStackdriverMetricCommand => sendMetricData() - case InstrumentationServiceMessage(cromwellMetric) => cromwellMetric match { - case CromwellTiming(bucket, value, _) => updateMetricMap(bucket, value.toMillis.toDouble, StackdriverGauge) - case CromwellGauge(bucket, value) => updateMetricMap(bucket, value.toDouble, StackdriverGauge) - case CromwellCount(bucket, value, _) => updateMetricMap(bucket, value.toDouble, StackdriverCumulative) - case CromwellIncrement(bucket) => updateMetricMap(bucket, metricValue = 1D, metricKind = StackdriverCumulative) - case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") - } + case InstrumentationServiceMessage(cromwellMetric) => + cromwellMetric match { + case CromwellTiming(bucket, value, _) => updateMetricMap(bucket, value.toMillis.toDouble, StackdriverGauge) + case CromwellGauge(bucket, value) => updateMetricMap(bucket, value.toDouble, StackdriverGauge) + case CromwellCount(bucket, value, _) => updateMetricMap(bucket, value.toDouble, StackdriverCumulative) + case CromwellIncrement(bucket) => updateMetricMap(bucket, metricValue = 1d, metricKind = StackdriverCumulative) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") + } case ShutdownCommand => // flush out metrics (if any) before shut down sendMetricData() @@ -60,29 +64,24 @@ class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig context stop self } - private def generateMetricLabels(): Map[String, String] = { - def labelFromConfig(op: StackdriverConfig => Option[String], key: String): Option[(String, String)] = { + def labelFromConfig(op: StackdriverConfig => Option[String], key: String): Option[(String, String)] = op(stackdriverConfig).map(v => (key.replace("-", "_"), v)) - } labelFromConfig(_.cromwellInstanceIdentifier, CromwellInstanceIdentifier).toMap ++ labelFromConfig(_.cromwellInstanceRole, CromwellInstanceRole) ++ labelFromConfig(_.cromwellPerfTestCase, CromwellPerfTest) } - private def updateMetricMap(bucket: CromwellBucket, metricValue: Double, metricKind: StackdriverMetricKind): Unit = { val metricObj = StackdriverMetric(bucket.toStackdriverString, metricKind) if (metricsMap.contains(metricObj)) { val valueVector: Vector[Double] = metricsMap(metricObj) :+ metricValue metricsMap += metricObj -> valueVector - } - else metricsMap += metricObj -> Vector(metricValue) + } else metricsMap += metricObj -> Vector(metricValue) } - private def sendMetricData(): Unit = { metricsMap.foreach { case (key, value: Vector[Double]) => val dataPointVectorSum = value.sum @@ -91,29 +90,42 @@ class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig case StackdriverCumulative => dataPointVectorSum } - writeMetrics(key, dataPoint) recover { - case e => logger.error(s"Failed to send metrics to Stackdriver API for metric ${key.name} with value $dataPoint.", e) + writeMetrics(key, dataPoint) recover { case e => + logger.error(s"Failed to send metrics to Stackdriver API for metric ${key.name} with value $dataPoint.", e) } } metricsMap = Map.empty[StackdriverMetric, Vector[Double]] } - private def writeMetrics(metricObj: StackdriverMetric, value: Double): Try[Unit] = { - def timeInterval(metricKind: StackdriverMetricKind): TimeInterval = { + def timeInterval(metricKind: StackdriverMetricKind): TimeInterval = metricKind match { - case StackdriverGauge => TimeInterval.newBuilder.setEndTime(Timestamps.fromMillis(System.currentTimeMillis)).build - case StackdriverCumulative => TimeInterval.newBuilder.setStartTime(ActorCreationTime).setEndTime(Timestamps.fromMillis(System.currentTimeMillis)).build + case StackdriverGauge => + TimeInterval.newBuilder.setEndTime(Timestamps.fromMillis(System.currentTimeMillis)).build + case StackdriverCumulative => + TimeInterval.newBuilder + .setStartTime(ActorCreationTime) + .setEndTime(Timestamps.fromMillis(System.currentTimeMillis)) + .build } - } - def createTimeSeries(metricKind: StackdriverMetricKind, metric: Metric, resource: MonitoredResource, dataPointList: util.List[Point]): TimeSeries = { + def createTimeSeries(metricKind: StackdriverMetricKind, + metric: Metric, + resource: MonitoredResource, + dataPointList: util.List[Point] + ): TimeSeries = metricKind match { - case StackdriverGauge => TimeSeries.newBuilder.setMetric(metric).setResource(resource).addAllPoints(dataPointList).build - case StackdriverCumulative => TimeSeries.newBuilder.setMetric(metric).setResource(resource).setMetricKind(MetricDescriptor.MetricKind.CUMULATIVE).addAllPoints(dataPointList).build + case StackdriverGauge => + TimeSeries.newBuilder.setMetric(metric).setResource(resource).addAllPoints(dataPointList).build + case StackdriverCumulative => + TimeSeries.newBuilder + .setMetric(metric) + .setResource(resource) + .setMetricKind(MetricDescriptor.MetricKind.CUMULATIVE) + .addAllPoints(dataPointList) + .build } - } // Prepares an individual data point val interval = timeInterval(metricObj.kind) @@ -122,26 +134,25 @@ class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig val dataPointList: List[Point] = List[Point](dataPoint) // Prepares the metric descriptor - val metric: Metric = Metric.newBuilder.setType(s"$CustomMetricDomain/${metricObj.name}").putAllLabels(metricLabelsMap.asJava).build + val metric: Metric = + Metric.newBuilder.setType(s"$CustomMetricDomain/${metricObj.name}").putAllLabels(metricLabelsMap.asJava).build // Prepares the time series request val timeSeries = createTimeSeries(metricObj.kind, metric, monitoredResource, dataPointList.asJava) val timeSeriesList = List[TimeSeries](timeSeries) - val timeSeriesRequest = CreateTimeSeriesRequest.newBuilder.setName(projectName.toString).addAllTimeSeries(timeSeriesList.asJava).build + val timeSeriesRequest = + CreateTimeSeriesRequest.newBuilder.setName(projectName.toString).addAllTimeSeries(timeSeriesList.asJava).build // Writes time series data Try(sendTimeSeriesToStackdriver(timeSeriesRequest)) } - // This single line of code is a separate function to help with StackdriverInstrumentationActor tests - def sendTimeSeriesToStackdriver(timeSeriesRequest: CreateTimeSeriesRequest): Unit = { + def sendTimeSeriesToStackdriver(timeSeriesRequest: CreateTimeSeriesRequest): Unit = metricServiceClient.createTimeSeries(timeSeriesRequest) - } } - object StackdriverInstrumentationServiceActor { val CromwellMetricPrefix = List("cromwell") @@ -160,24 +171,24 @@ object StackdriverInstrumentationServiceActor { val InitialDelay = 1.minute - def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = Props(new StackdriverInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor)) + def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = Props( + new StackdriverInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor) + ) implicit class CromwellBucketEnhanced(val cromwellBucket: CromwellBucket) extends AnyVal { + /** * Transforms a CromwellBucket to a Stackdriver path */ def toStackdriverString = (CromwellMetricPrefix ++ cromwellBucket.prefix ++ - cromwellBucket.path.toList).mkString("/").replaceAll(" ", "_").replaceAll("\\[|\\]", "") + cromwellBucket.path.toList).mkString("/").replaceAll(" ", "_").replaceAll("\\[|\\]", "") } } - sealed trait StackdriverMetricKind object StackdriverGauge extends StackdriverMetricKind object StackdriverCumulative extends StackdriverMetricKind - case class StackdriverMetric(name: String, kind: StackdriverMetricKind) - object SendStackdriverMetricCommand diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/CromwellStatsD.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/CromwellStatsD.scala index c9450c4efcd..2a7b559387e 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/CromwellStatsD.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/CromwellStatsD.scala @@ -12,10 +12,9 @@ object CromwellStatsD { */ case class CromwellStatsD(hostname: String, port: Int) extends StatsD(hostname, port) { val MetricSuffixesToFilter = Set("max", "min", "p50", "p75", "p98", "p99", "p999", "mean_rate", "m5_rate", "m15_rate") - - override def send(name: String, value: String): Unit = { + + override def send(name: String, value: String): Unit = if (MetricSuffixesToFilter.exists(suffix => name.endsWith(suffix))) CromwellStatsD.logger.debug(s"Filtering metric with name [$name] and value [$value]") else super.send(name, value) - } } diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfig.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfig.scala index d67eb348bdf..9dd063cf1cd 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfig.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfig.scala @@ -15,16 +15,20 @@ case class StatsDConfig(hostname: String, port: Int, prefix: Option[String], flu object StatsDConfig { def apply(serviceConfig: Config): StatsDConfig = { - val hostname: ErrorOr[String] = validate[String] { serviceConfig.as[String]("hostname") } - val port: ErrorOr[Int] = validate[Int] { serviceConfig.as[Int]("port") } - val prefix: ErrorOr[Option[String]] = validate { serviceConfig.as[Option[String]]("prefix") } - val flushRate: ErrorOr[FiniteDuration] = validate[FiniteDuration] { serviceConfig.as[FiniteDuration]("flush-rate") } + val hostname: ErrorOr[String] = validate[String](serviceConfig.as[String]("hostname")) + val port: ErrorOr[Int] = validate[Int](serviceConfig.as[Int]("port")) + val prefix: ErrorOr[Option[String]] = validate(serviceConfig.as[Option[String]]("prefix")) + val flushRate: ErrorOr[FiniteDuration] = validate[FiniteDuration](serviceConfig.as[FiniteDuration]("flush-rate")) - (hostname, port, prefix, flushRate).mapN({ (h, p, n, f) => - new StatsDConfig(h, p, n, f) - }).valueOr(errors => throw new IllegalArgumentException with MessageAggregation { - override val exceptionContext = "StatsD config is invalid" - override val errorMessages = errors.toList - }) + (hostname, port, prefix, flushRate) + .mapN { (h, p, n, f) => + new StatsDConfig(h, p, n, f) + } + .valueOr(errors => + throw new IllegalArgumentException with MessageAggregation { + override val exceptionContext = "StatsD config is invalid" + override val errorMessages = errors.toList + } + ) } } diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala index a90c7f66a8c..06accb59cf4 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala @@ -22,6 +22,7 @@ object StatsDInstrumentationServiceActor { Props(new StatsDInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor)) implicit class CromwellBucketEnhanced(val cromwellBucket: CromwellBucket) extends AnyVal { + /** * Transforms a CromwellBucket to a StatsD path, optionally inserting a value between prefix and path */ @@ -45,7 +46,9 @@ object StatsDInstrumentationServiceActor { * If performance or statistics accuracy becomes a problem one might implement a more efficient solution * by making use of downsampling and / or multi metrics packets: https://github.com/etsy/statsd/blob/master/docs/metric_types.md */ -class StatsDInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor with DefaultInstrumented { +class StatsDInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) + extends Actor + with DefaultInstrumented { val statsDConfig: StatsDConfig = StatsDConfig(serviceConfig) val cromwellInstanceIdOption: Option[String] = globalConfig.getAs[String]("system.cromwell_id") @@ -63,13 +66,14 @@ class StatsDInstrumentationServiceActor(serviceConfig: Config, globalConfig: Con .start(statsDConfig.flushRate.toMillis, TimeUnit.MILLISECONDS) override def receive: Receive = { - case InstrumentationServiceMessage(cromwellMetric) => cromwellMetric match { - case CromwellIncrement(bucket) => increment(bucket) - case CromwellCount(bucket, value, _) => updateCounter(bucket, value) - case CromwellGauge(bucket, value) => updateGauge(bucket, value) - case CromwellTiming(bucket, value, _) => updateTiming(bucket, value) - case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") - } + case InstrumentationServiceMessage(cromwellMetric) => + cromwellMetric match { + case CromwellIncrement(bucket) => increment(bucket) + case CromwellCount(bucket, value, _) => updateCounter(bucket, value) + case CromwellGauge(bucket, value) => updateGauge(bucket, value) + case CromwellTiming(bucket, value, _) => updateTiming(bucket, value) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") + } case ShutdownCommand => context stop self } @@ -105,7 +109,7 @@ class StatsDInstrumentationServiceActor(serviceConfig: Config, globalConfig: Con val newGauge = !gaugeFunctions.containsKey(bucket) gaugeFunctions.put(bucket, value) if (newGauge) { - metrics.gauge(bucket.toStatsDString()){ gaugeFunctions.get(bucket) } + metrics.gauge(bucket.toStatsDString())(gaugeFunctions.get(bucket)) } () } @@ -113,7 +117,6 @@ class StatsDInstrumentationServiceActor(serviceConfig: Config, globalConfig: Con /** * Adds a new timing value for this bucket */ - private def updateTiming(bucket: CromwellBucket, value: FiniteDuration): Unit = { + private def updateTiming(bucket: CromwellBucket, value: FiniteDuration): Unit = metrics.timer(bucket.toStatsDString()).update(value) - } } diff --git a/services/src/main/scala/cromwell/services/keyvalue/KeyValueReadActor.scala b/services/src/main/scala/cromwell/services/keyvalue/KeyValueReadActor.scala index 7f166f02ffe..3a03906ac6f 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/KeyValueReadActor.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/KeyValueReadActor.scala @@ -11,9 +11,9 @@ import scala.concurrent.Future import scala.util.{Failure, Success} abstract class KeyValueReadActor(override val threshold: Int, override val serviceRegistryActor: ActorRef) - extends EnhancedThrottlerActor[CommandAndReplyTo[KvGet]] { + extends EnhancedThrottlerActor[CommandAndReplyTo[KvGet]] { override def receive = enhancedReceive.orElse(super.receive) - + override def processHead(head: CommandAndReplyTo[KvGet]) = instrumentedProcess { val processed = processGet(head.command) processed onComplete { @@ -24,12 +24,13 @@ abstract class KeyValueReadActor(override val threshold: Int, override val servi // In this case it's 1 because we processed 1 KvGet processed.map(_ => 1) } - + def processGet(get: KvGet): Future[KvResponse] - override protected lazy val instrumentationPath = KeyValueServiceActor.InstrumentationPath.concatNel(NonEmptyList.one("read")) + override protected lazy val instrumentationPath = + KeyValueServiceActor.InstrumentationPath.concatNel(NonEmptyList.one("read")) override protected lazy val instrumentationPrefix = InstrumentationPrefixes.ServicesPrefix - override def commandToData(snd: ActorRef) = { - case get: KvGet => CommandAndReplyTo(get, snd) + override def commandToData(snd: ActorRef) = { case get: KvGet => + CommandAndReplyTo(get, snd) } } diff --git a/services/src/main/scala/cromwell/services/keyvalue/KeyValueServiceActor.scala b/services/src/main/scala/cromwell/services/keyvalue/KeyValueServiceActor.scala index 96d7071e4f7..575209ffcee 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/KeyValueServiceActor.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/KeyValueServiceActor.scala @@ -48,10 +48,10 @@ trait KeyValueServiceActor extends Actor with GracefulShutdownHelper with ActorL case _: ActorInitializationException => Escalate case t => super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate) } - + private val kvReadActor = context.actorOf(kvReadActorProps, "KvReadActor") private val kvWriteActor = context.actorOf(kvWriteActorProps, "KvWriteActor") - + override def receive = { case get: KvGet => kvReadActor forward get case put: KvPut => kvWriteActor forward put diff --git a/services/src/main/scala/cromwell/services/keyvalue/KeyValueWriteActor.scala b/services/src/main/scala/cromwell/services/keyvalue/KeyValueWriteActor.scala index 372f7fea271..799ccf7d9a0 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/KeyValueWriteActor.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/KeyValueWriteActor.scala @@ -11,19 +11,22 @@ import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration import scala.util.{Failure, Success} -abstract class KeyValueWriteActor(override val serviceRegistryActor: ActorRef, flushRate: FiniteDuration, batchSize: Int) - extends EnhancedBatchActor[CommandAndReplyTo[KvPut]](flushRate, batchSize) { +abstract class KeyValueWriteActor(override val serviceRegistryActor: ActorRef, + flushRate: FiniteDuration, + batchSize: Int +) extends EnhancedBatchActor[CommandAndReplyTo[KvPut]](flushRate, batchSize) { - override protected def process(data: NonEmptyVector[CommandAndReplyTo[KvPut]]) = instrumentedProcess { val processed = processPut(data.map(_.command).toVector) processed onComplete { - case Success(_) => data.toVector.foreach({ - case CommandAndReplyTo(command, replyTo) => replyTo ! KvPutSuccess(command) - }) - case Failure(f) => data.toVector.foreach({ - case CommandAndReplyTo(command, replyTo) => replyTo ! KvFailure(command, f) - }) + case Success(_) => + data.toVector.foreach { case CommandAndReplyTo(command, replyTo) => + replyTo ! KvPutSuccess(command) + } + case Failure(f) => + data.toVector.foreach { case CommandAndReplyTo(command, replyTo) => + replyTo ! KvFailure(command, f) + } } // This method should return how many "operations" have been performed to enable instrumentation of throughput // Here we've processed all the KvPuts in "data" @@ -31,13 +34,14 @@ abstract class KeyValueWriteActor(override val serviceRegistryActor: ActorRef, f } def processPut(put: Vector[KvPut]): Future[Unit] - + // EnhancedBatchActor overrides override def receive = enhancedReceive.orElse(super.receive) override protected def weightFunction(command: CommandAndReplyTo[KvPut]) = 1 - override protected def instrumentationPath = KeyValueServiceActor.InstrumentationPath.concatNel(NonEmptyList.one("write")) + override protected def instrumentationPath = + KeyValueServiceActor.InstrumentationPath.concatNel(NonEmptyList.one("write")) override protected def instrumentationPrefix = InstrumentationPrefixes.ServicesPrefix - override def commandToData(snd: ActorRef) = { - case put: KvPut => CommandAndReplyTo(put, snd) + override def commandToData(snd: ActorRef) = { case put: KvPut => + CommandAndReplyTo(put, snd) } } diff --git a/services/src/main/scala/cromwell/services/keyvalue/KvClient.scala b/services/src/main/scala/cromwell/services/keyvalue/KvClient.scala index c8ddbcb880f..ed130b35773 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/KvClient.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/KvClient.scala @@ -10,18 +10,18 @@ trait KvClient { this: Actor with ActorLogging => def serviceRegistryActor: ActorRef private[keyvalue] var currentKvClientRequests: Map[ScopedKey, Promise[KvResponse]] = Map.empty - final def makeKvRequest(actions: Seq[KvAction])(implicit ec: ExecutionContext): Future[Seq[KvResponse]] = { + final def makeKvRequest(actions: Seq[KvAction])(implicit ec: ExecutionContext): Future[Seq[KvResponse]] = if (actions.exists(action => currentKvClientRequests.contains(action.key))) { - val msg = "Programmer Error! KvClient does not support multiple KvActions active for the same ScopedKey concurrently. Mi Scusi!" + val msg = + "Programmer Error! KvClient does not support multiple KvActions active for the same ScopedKey concurrently. Mi Scusi!" log.error(msg) Future.failed(new RuntimeException(msg)) } else { createResponseSet(actions) } - } - final def kvClientReceive: Actor.Receive = { - case response: KvResponse => fulfillOrLog(response) + final def kvClientReceive: Actor.Receive = { case response: KvResponse => + fulfillOrLog(response) } private def createResponseSet(newActions: Seq[KvAction])(implicit ec: ExecutionContext) = { @@ -35,7 +35,10 @@ trait KvClient { this: Actor with ActorLogging => case Some(fulfilledPromise) => fulfilledPromise.success(response) currentKvClientRequests -= response.key - case None => log.error(s"Programmer Error: Got a KV response for a request that was never sent: $response. Did you use the KV store without KvClient? Current key set: ${currentKvClientRequests.keys.mkString("")}") + case None => + log.error( + s"Programmer Error: Got a KV response for a request that was never sent: $response. Did you use the KV store without KvClient? Current key set: ${currentKvClientRequests.keys + .mkString("")}" + ) } } - diff --git a/services/src/main/scala/cromwell/services/keyvalue/impl/BackendKeyValueDatabaseAccess.scala b/services/src/main/scala/cromwell/services/keyvalue/impl/BackendKeyValueDatabaseAccess.scala index 47fcbbb3a03..0bcf1c70fe4 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/impl/BackendKeyValueDatabaseAccess.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/impl/BackendKeyValueDatabaseAccess.scala @@ -17,27 +17,43 @@ object BackendKeyValueDatabaseAccess { trait BackendKeyValueDatabaseAccess { - def getBackendValueByKey(workflowId: WorkflowId, jobKey: KvJobKey, key: String) - (implicit ec: ExecutionContext): Future[Option[String]] = { - EngineServicesStore.engineDatabaseInterface.queryStoreValue( - workflowId.toString, jobKey.callFqn, jobKey.callIndex.fromIndex, jobKey.callAttempt, key) - } + def getBackendValueByKey(workflowId: WorkflowId, jobKey: KvJobKey, key: String)(implicit + ec: ExecutionContext + ): Future[Option[String]] = + EngineServicesStore.engineDatabaseInterface.queryStoreValue(workflowId.toString, + jobKey.callFqn, + jobKey.callIndex.fromIndex, + jobKey.callAttempt, + key + ) def updateBackendKeyValuePair(workflowId: WorkflowId, jobKey: KvJobKey, backendStoreKey: String, - backendStoreValue: String)(implicit ec: ExecutionContext, actorSystem: ActorSystem): Future[Unit] = { - val jobKeyValueEntry = JobKeyValueEntry(workflowId.toString, jobKey.callFqn, jobKey.callIndex.fromIndex, - jobKey.callAttempt, backendStoreKey, backendStoreValue) + backendStoreValue: String + )(implicit ec: ExecutionContext, actorSystem: ActorSystem): Future[Unit] = { + val jobKeyValueEntry = JobKeyValueEntry(workflowId.toString, + jobKey.callFqn, + jobKey.callIndex.fromIndex, + jobKey.callAttempt, + backendStoreKey, + backendStoreValue + ) withRetry(() => EngineServicesStore.engineDatabaseInterface.addJobKeyValueEntry(jobKeyValueEntry)) } - def updateBackendKeyValuePairs(pairs: Iterable[BackendKeyValuePair])(implicit ec: ExecutionContext, actorSystem: ActorSystem): Future[Unit] = { - val entries = pairs.map({ - case (workflowId, jobKey, backendStoreKey, backendStoreValue) => - JobKeyValueEntry(workflowId.toString, jobKey.callFqn, jobKey.callIndex.fromIndex, - jobKey.callAttempt, backendStoreKey, backendStoreValue) - }) + def updateBackendKeyValuePairs( + pairs: Iterable[BackendKeyValuePair] + )(implicit ec: ExecutionContext, actorSystem: ActorSystem): Future[Unit] = { + val entries = pairs.map { case (workflowId, jobKey, backendStoreKey, backendStoreValue) => + JobKeyValueEntry(workflowId.toString, + jobKey.callFqn, + jobKey.callIndex.fromIndex, + jobKey.callAttempt, + backendStoreKey, + backendStoreValue + ) + } withRetry(() => EngineServicesStore.engineDatabaseInterface.addJobKeyValueEntries(entries)) } } diff --git a/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueReadActor.scala b/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueReadActor.scala index 21bc2fa41c5..751d5a82293 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueReadActor.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueReadActor.scala @@ -5,13 +5,13 @@ import cromwell.services.keyvalue.KeyValueServiceActor.{KvKeyLookupFailed, KvPai import cromwell.services.keyvalue.{KeyValueReadActor, KeyValueServiceActor} object SqlKeyValueReadActor { - def props(threshold: Int, serviceRegistryActor: ActorRef) = { + def props(threshold: Int, serviceRegistryActor: ActorRef) = Props(new SqlKeyValueReadActor(threshold, serviceRegistryActor)) - } } class SqlKeyValueReadActor(threshold: Int, serviceRegistryActor: ActorRef) - extends KeyValueReadActor(threshold, serviceRegistryActor) with BackendKeyValueDatabaseAccess { + extends KeyValueReadActor(threshold, serviceRegistryActor) + with BackendKeyValueDatabaseAccess { override def processGet(get: KeyValueServiceActor.KvGet) = { val backendValue = getBackendValueByKey( get.key.workflowId, diff --git a/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueServiceActor.scala b/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueServiceActor.scala index 43edc284f42..b8311113a72 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueServiceActor.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueServiceActor.scala @@ -9,28 +9,27 @@ import net.ceedubs.ficus.Ficus._ import scala.concurrent.duration._ object SqlKeyValueServiceActor { - def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = Props(SqlKeyValueServiceActor(serviceConfig, globalConfig, serviceRegistryActor)).withDispatcher(ServiceDispatcher) + def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = + Props(SqlKeyValueServiceActor(serviceConfig, globalConfig, serviceRegistryActor)).withDispatcher(ServiceDispatcher) } final case class SqlKeyValueServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends KeyValueServiceActor { + extends KeyValueServiceActor { private lazy val dbFlushRate = serviceConfig.as[Option[FiniteDuration]]("db-flush-rate").getOrElse(5.seconds) private lazy val dbBatchSize = serviceConfig.as[Option[Int]]("db-batch-size").getOrElse(200) - override protected def kvReadActorProps = { + override protected def kvReadActorProps = SqlKeyValueReadActor.props( LoadConfig.KeyValueReadThreshold, serviceRegistryActor ) - } - override protected def kvWriteActorProps = { + override protected def kvWriteActorProps = SqlKeyValueWriteActor.props( LoadConfig.KeyValueWriteThreshold, serviceRegistryActor, dbFlushRate, dbBatchSize ) - } } diff --git a/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueWriteActor.scala b/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueWriteActor.scala index 0d25af74ff5..ebe6dc30a56 100644 --- a/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueWriteActor.scala +++ b/services/src/main/scala/cromwell/services/keyvalue/impl/SqlKeyValueWriteActor.scala @@ -6,22 +6,22 @@ import cromwell.services.keyvalue.{KeyValueServiceActor, KeyValueWriteActor} import scala.concurrent.duration.FiniteDuration object SqlKeyValueWriteActor { - def props(threshold: Int, serviceRegistryActor: ActorRef, flushRate: FiniteDuration, batchSize: Int) = { + def props(threshold: Int, serviceRegistryActor: ActorRef, flushRate: FiniteDuration, batchSize: Int) = Props(new SqlKeyValueWriteActor(threshold, serviceRegistryActor, flushRate, batchSize)) - } } -class SqlKeyValueWriteActor(override val threshold: Int, serviceRegistryActor: ActorRef, flushRate: FiniteDuration, batchSize: Int) - extends KeyValueWriteActor(serviceRegistryActor, flushRate, batchSize) with BackendKeyValueDatabaseAccess { - private implicit val system = context.system +class SqlKeyValueWriteActor(override val threshold: Int, + serviceRegistryActor: ActorRef, + flushRate: FiniteDuration, + batchSize: Int +) extends KeyValueWriteActor(serviceRegistryActor, flushRate, batchSize) + with BackendKeyValueDatabaseAccess { + implicit private val system = context.system override def processPut(puts: Vector[KeyValueServiceActor.KvPut]) = { - val pairs = puts.map({put => - (put.pair.key.workflowId, - put.pair.key.jobKey, - put.pair.key.key, - put.pair.value) - }) + val pairs = puts.map { put => + (put.pair.key.workflowId, put.pair.key.jobKey, put.pair.key.key, put.pair.value) + } updateBackendKeyValuePairs(pairs) } } diff --git a/services/src/main/scala/cromwell/services/loadcontroller/LoadControlledBatchActor.scala b/services/src/main/scala/cromwell/services/loadcontroller/LoadControlledBatchActor.scala index bb25febf5c0..30fc5935090 100644 --- a/services/src/main/scala/cromwell/services/loadcontroller/LoadControlledBatchActor.scala +++ b/services/src/main/scala/cromwell/services/loadcontroller/LoadControlledBatchActor.scala @@ -18,9 +18,13 @@ object LoadControlledBatchActor { trait LoadControlledBatchActor[C] { this: BatchActor[C] => def threshold: Int def serviceRegistryActor: ActorRef - private val path = if (routed) NonEmptyList.of(context.parent.path.name, self.path.name) else NonEmptyList.one(self.path.name) + private val path = + if (routed) NonEmptyList.of(context.parent.path.name, self.path.name) else NonEmptyList.one(self.path.name) - timers.startSingleTimer(LoadControlledBatchActorTimerKey, LoadControlledBatchActorTimerAction, LoadConfig.MonitoringFrequency) + timers.startSingleTimer(LoadControlledBatchActorTimerKey, + LoadControlledBatchActorTimerAction, + LoadConfig.MonitoringFrequency + ) private def weightToLoad(weight: Int) = if (weight > threshold) HighLoad else NormalLoad @@ -28,9 +32,11 @@ trait LoadControlledBatchActor[C] { this: BatchActor[C] => * Don't forget to chain this into your receive method to monitor the queue size: * override def receive = loadControlReceive.orElse(super.receive) */ - protected def loadControlReceive: Receive = { - case LoadControlledBatchActorTimerAction => - serviceRegistryActor ! LoadMetric(path, weightToLoad(stateData.weight)) - timers.startSingleTimer(LoadControlledBatchActorTimerKey, LoadControlledBatchActorTimerAction, LoadConfig.MonitoringFrequency) + protected def loadControlReceive: Receive = { case LoadControlledBatchActorTimerAction => + serviceRegistryActor ! LoadMetric(path, weightToLoad(stateData.weight)) + timers.startSingleTimer(LoadControlledBatchActorTimerKey, + LoadControlledBatchActorTimerAction, + LoadConfig.MonitoringFrequency + ) } } diff --git a/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala b/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala index 8dba38e7e99..8c4c878c4d2 100644 --- a/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala +++ b/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala @@ -28,21 +28,27 @@ object LoadControllerServiceActor { class LoadControllerServiceActor(serviceConfig: Config, globalConfig: Config, override val serviceRegistryActor: ActorRef - ) extends Actor - with ActorLogging with Listeners with Timers with CromwellInstrumentation { +) extends Actor + with ActorLogging + with Listeners + with Timers + with CromwellInstrumentation { private val controlFrequency = serviceConfig .as[Option[Duration]]("control-frequency") .getOrElse(5.seconds) - private [impl] var loadLevel: LoadLevel = NormalLoad - private [impl] var monitoredActors: Set[ActorRef] = Set.empty - private [impl] var loadMetrics: Map[ActorAndMetric, LoadLevel] = Map.empty + private[impl] var loadLevel: LoadLevel = NormalLoad + private[impl] var monitoredActors: Set[ActorRef] = Set.empty + private[impl] var loadMetrics: Map[ActorAndMetric, LoadLevel] = Map.empty override def receive = listenerManagement.orElse(controlReceive) override def preStart() = { if (controlFrequency.isFinite) - timers.startPeriodicTimer(LoadControlTimerKey, LoadControlTimerAction, controlFrequency.asInstanceOf[FiniteDuration]) + timers.startPeriodicTimer(LoadControlTimerKey, + LoadControlTimerAction, + controlFrequency.asInstanceOf[FiniteDuration] + ) else log.info("Load control disabled") super.preStart() @@ -74,7 +80,8 @@ class LoadControllerServiceActor(serviceConfig: Config, val backToNormal = loadLevel != NormalLoad && newLoadLevel == NormalLoad // If there's something to say, let it out ! if (escalates || backToNormal) { - if (newLoadLevel == HighLoad) log.info(s"The following components have reported being overloaded: $highLoadMetricsForLogging") + if (newLoadLevel == HighLoad) + log.info(s"The following components have reported being overloaded: $highLoadMetricsForLogging") gossip(newLoadLevel) } loadLevel = newLoadLevel @@ -83,14 +90,16 @@ class LoadControllerServiceActor(serviceConfig: Config, private def handleTerminated(terminee: ActorRef) = { monitoredActors = monitoredActors - terminee - loadMetrics = loadMetrics.view.filterKeys({ - case ActorAndMetric(actor, _) => actor != terminee - }).toMap + loadMetrics = loadMetrics.view.filterKeys { case ActorAndMetric(actor, _) => + actor != terminee + }.toMap } - private def highLoadMetricsForLogging = { - loadMetrics.collect({ - case (ActorAndMetric(_, metricPath), HighLoad) => metricPath.head - }).toSet.mkString(", ") - } + private def highLoadMetricsForLogging = + loadMetrics + .collect { case (ActorAndMetric(_, metricPath), HighLoad) => + metricPath.head + } + .toSet + .mkString(", ") } diff --git a/services/src/main/scala/cromwell/services/metadata/MetadataArchiveStatus.scala b/services/src/main/scala/cromwell/services/metadata/MetadataArchiveStatus.scala index 0f32733f666..6c3e978d746 100644 --- a/services/src/main/scala/cromwell/services/metadata/MetadataArchiveStatus.scala +++ b/services/src/main/scala/cromwell/services/metadata/MetadataArchiveStatus.scala @@ -30,13 +30,15 @@ object MetadataArchiveStatus { case Some(other) => withName(other) } - def withName(str: String): ErrorOr[MetadataArchiveStatus] = MetadataArchiveStatusValues.find(_.toString.equalsIgnoreCase(str)) match { - case Some(value) => value.validNel - case None => s"No such MetadataArchiveStatus: $str".invalidNel - } + def withName(str: String): ErrorOr[MetadataArchiveStatus] = + MetadataArchiveStatusValues.find(_.toString.equalsIgnoreCase(str)) match { + case Some(value) => value.validNel + case None => s"No such MetadataArchiveStatus: $str".invalidNel + } case object Unarchived extends MetadataArchiveStatus case object Archived extends MetadataArchiveStatus - case object ArchivedAndDeleted extends MetadataArchiveStatus // this means that original data is deleted from METADATA_ENTRY table + case object ArchivedAndDeleted + extends MetadataArchiveStatus // this means that original data is deleted from METADATA_ENTRY table case object ArchiveFailed extends MetadataArchiveStatus } diff --git a/services/src/main/scala/cromwell/services/metadata/MetadataJsonSupport.scala b/services/src/main/scala/cromwell/services/metadata/MetadataJsonSupport.scala index 42930f463d5..efc36c01d28 100644 --- a/services/src/main/scala/cromwell/services/metadata/MetadataJsonSupport.scala +++ b/services/src/main/scala/cromwell/services/metadata/MetadataJsonSupport.scala @@ -9,17 +9,23 @@ import common.util.TimeUtil._ object MetadataJsonSupport extends DefaultJsonProtocol { implicit object WorkflowIdJsonFormatter extends RootJsonFormat[WorkflowId] { def write(id: WorkflowId) = JsString(id.id.toString) - def read(value: JsValue) = throw new UnsupportedOperationException("Reading WorkflowId from JSON is currently unsupported") + def read(value: JsValue) = throw new UnsupportedOperationException( + "Reading WorkflowId from JSON is currently unsupported" + ) } implicit object MetadataTypeFormatter extends RootJsonFormat[MetadataType] { def write(t: MetadataType) = JsString(t.typeName) - def read(value: JsValue) = throw new UnsupportedOperationException("Reading MetadataType from JSON is currently unsupported") + def read(value: JsValue) = throw new UnsupportedOperationException( + "Reading MetadataType from JSON is currently unsupported" + ) } implicit object OffsetDateTimeFormatter extends RootJsonFormat[OffsetDateTime] { def write(offsetDateTime: OffsetDateTime) = new JsString(offsetDateTime.toUtcMilliString) - def read(value: JsValue) = throw new UnsupportedOperationException("Reading OffsetDateTime from JSON is currently unsupported") + def read(value: JsValue) = throw new UnsupportedOperationException( + "Reading OffsetDateTime from JSON is currently unsupported" + ) } implicit val MetadataValueFormat = jsonFormat2(MetadataValue.apply) diff --git a/services/src/main/scala/cromwell/services/metadata/MetadataQuery.scala b/services/src/main/scala/cromwell/services/metadata/MetadataQuery.scala index 1b38bd2f479..c88a142605b 100644 --- a/services/src/main/scala/cromwell/services/metadata/MetadataQuery.scala +++ b/services/src/main/scala/cromwell/services/metadata/MetadataQuery.scala @@ -17,9 +17,8 @@ object MetadataKey { val KeySeparator = ':' - def apply(workflowId: WorkflowId, jobKey: Option[MetadataJobKey], keys: String*): MetadataKey = { - new MetadataKey(workflowId, jobKey, compositeKey(keys:_*)) - } + def apply(workflowId: WorkflowId, jobKey: Option[MetadataJobKey], keys: String*): MetadataKey = + new MetadataKey(workflowId, jobKey, compositeKey(keys: _*)) def compositeKey(keys: String*): String = keys.toList.mkString(KeySeparator.toString) } @@ -27,18 +26,18 @@ object MetadataKey { object MetadataEvent { def apply(key: MetadataKey, value: MetadataValue) = new MetadataEvent(key, Option(value), OffsetDateTime.now) - def apply(key: MetadataKey, optionalValue: Option[MetadataValue]) = new MetadataEvent(key, optionalValue, OffsetDateTime.now) + def apply(key: MetadataKey, optionalValue: Option[MetadataValue]) = + new MetadataEvent(key, optionalValue, OffsetDateTime.now) def empty(key: MetadataKey) = new MetadataEvent(key, None, OffsetDateTime.now) - def labelsToMetadataEvents(labels: Labels, workflowId: WorkflowId): Iterable[MetadataEvent] = { + def labelsToMetadataEvents(labels: Labels, workflowId: WorkflowId): Iterable[MetadataEvent] = labels.value map { label => MetadataEvent( MetadataKey(workflowId, None, s"${WorkflowMetadataKeys.Labels}:${label.key}"), MetadataValue(label.value) ) } - } } sealed trait MetadataType { def typeName: String } @@ -47,13 +46,13 @@ case object MetadataInt extends MetadataType { override val typeName = "int" } case object MetadataNumber extends MetadataType { override val typeName = "number" } case object MetadataBoolean extends MetadataType { override val typeName = "boolean" } /* TODO Might be better to have MetadataNull be a value instead of a type ? - * We'd need to reorganize MetadataValue, maybe like spray does and have explicit case classes types - * instead of one generic MetadataValue(value, type) -*/ + * We'd need to reorganize MetadataValue, maybe like spray does and have explicit case classes types + * instead of one generic MetadataValue(value, type) + */ case object MetadataNull extends MetadataType { override val typeName = "null" } object MetadataValue { - def apply(value: Any): MetadataValue = { + def apply(value: Any): MetadataValue = Option(value).getOrElse("") match { case WomInteger(i) => new MetadataValue(i.toString, MetadataInt) case WomFloat(f) => new MetadataValue(f.toString, MetadataNumber) @@ -63,12 +62,12 @@ object MetadataValue { case WomOptionalValue(_, None) => new MetadataValue("", MetadataNull) case value: WomValue => new MetadataValue(value.valueString, MetadataString) case _: Int | Long | _: java.lang.Long | _: java.lang.Integer => new MetadataValue(value.toString, MetadataInt) - case _: Double | Float | _: java.lang.Double | _: java.lang.Float => new MetadataValue(value.toString, MetadataNumber) + case _: Double | Float | _: java.lang.Double | _: java.lang.Float => + new MetadataValue(value.toString, MetadataNumber) case _: Boolean | _: java.lang.Boolean => new MetadataValue(value.toString, MetadataBoolean) case offsetDateTime: OffsetDateTime => new MetadataValue(offsetDateTime.toUtcMilliString, MetadataString) case other => new MetadataValue(other.toString, MetadataString) } - } } object MetadataType { @@ -93,7 +92,8 @@ final case class MetadataEvent(key: MetadataKey, value: Option[MetadataValue], o final case class MetadataQueryJobKey(callFqn: String, index: Option[Int], attempt: Option[Int]) object MetadataQueryJobKey { - def forMetadataJobKey(jobKey: MetadataJobKey) = MetadataQueryJobKey(jobKey.callFqn, jobKey.index, Option(jobKey.attempt)) + def forMetadataJobKey(jobKey: MetadataJobKey) = + MetadataQueryJobKey(jobKey.callFqn, jobKey.index, Option(jobKey.attempt)) } case class MetadataQuery(workflowId: WorkflowId, @@ -101,16 +101,28 @@ case class MetadataQuery(workflowId: WorkflowId, key: Option[String], includeKeysOption: Option[NonEmptyList[String]], excludeKeysOption: Option[NonEmptyList[String]], - expandSubWorkflows: Boolean) + expandSubWorkflows: Boolean +) object MetadataQuery { - def forWorkflow(workflowId: WorkflowId) = MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) - - def forJob(workflowId: WorkflowId, jobKey: MetadataJobKey): MetadataQuery = { - MetadataQuery(workflowId, Option(MetadataQueryJobKey.forMetadataJobKey(jobKey)), None, None, None, expandSubWorkflows = false) - } - - def forKey(key: MetadataKey): MetadataQuery = { - MetadataQuery(key.workflowId, key.jobKey map MetadataQueryJobKey.forMetadataJobKey, Option(key.key), None, None, expandSubWorkflows = false) - } + def forWorkflow(workflowId: WorkflowId) = + MetadataQuery(workflowId, None, None, None, None, expandSubWorkflows = false) + + def forJob(workflowId: WorkflowId, jobKey: MetadataJobKey): MetadataQuery = + MetadataQuery(workflowId, + Option(MetadataQueryJobKey.forMetadataJobKey(jobKey)), + None, + None, + None, + expandSubWorkflows = false + ) + + def forKey(key: MetadataKey): MetadataQuery = + MetadataQuery(key.workflowId, + key.jobKey map MetadataQueryJobKey.forMetadataJobKey, + Option(key.key), + None, + None, + expandSubWorkflows = false + ) } diff --git a/services/src/main/scala/cromwell/services/metadata/MetadataService.scala b/services/src/main/scala/cromwell/services/metadata/MetadataService.scala index dd45cf5535d..57557a842b1 100644 --- a/services/src/main/scala/cromwell/services/metadata/MetadataService.scala +++ b/services/src/main/scala/cromwell/services/metadata/MetadataService.scala @@ -27,13 +27,15 @@ object MetadataService { labels: Option[Map[String, String]], parentWorkflowId: Option[String], rootWorkflowId: Option[String], - metadataArchiveStatus: MetadataArchiveStatus) + metadataArchiveStatus: MetadataArchiveStatus + ) final case class WorkflowQueryResponse(results: Seq[WorkflowQueryResult], totalResultsCount: Int) final case class QueryMetadata(page: Option[Int], pageSize: Option[Int], totalRecords: Option[Int]) trait MetadataServiceMessage + /** * Command Actions */ @@ -66,8 +68,13 @@ object MetadataService { serviceRegistryActor ! PutMetadataAction(events) } - def putMetadataWithRawKey(workflowId: WorkflowId, jobKey: Option[(FullyQualifiedName, Option[Int], Int)], keyValue: Map[String, Any]) = { - val metadataJobKey = jobKey map { case (fullyQualifiedName, index, attempt) => MetadataJobKey(fullyQualifiedName, index, attempt) } + def putMetadataWithRawKey(workflowId: WorkflowId, + jobKey: Option[(FullyQualifiedName, Option[Int], Int)], + keyValue: Map[String, Any] + ) = { + val metadataJobKey = jobKey map { case (fullyQualifiedName, index, attempt) => + MetadataJobKey(fullyQualifiedName, index, attempt) + } val events = keyValue map { case (key, value) => val metadataKey = MetadataKey(workflowId, metadataJobKey, key) @@ -85,8 +92,12 @@ object MetadataService { } val MaximumMetadataActionAttempts = 10 - final case class PutMetadataAction(events: Iterable[MetadataEvent], maxAttempts: Int = MaximumMetadataActionAttempts) extends MetadataWriteAction - final case class PutMetadataActionAndRespond(events: Iterable[MetadataEvent], replyTo: ActorRef, maxAttempts: Int = MaximumMetadataActionAttempts) extends MetadataWriteAction + final case class PutMetadataAction(events: Iterable[MetadataEvent], maxAttempts: Int = MaximumMetadataActionAttempts) + extends MetadataWriteAction + final case class PutMetadataActionAndRespond(events: Iterable[MetadataEvent], + replyTo: ActorRef, + maxAttempts: Int = MaximumMetadataActionAttempts + ) extends MetadataWriteAction final case object ListenToMetadataWriteActor extends MetadataServiceAction with ListenToMessage @@ -95,14 +106,13 @@ object MetadataService { def apply(workflowId: WorkflowId, includeKeysOption: Option[NonEmptyList[String]], excludeKeysOption: Option[NonEmptyList[String]], - expandSubWorkflows: Boolean): BuildWorkflowMetadataJsonAction = { + expandSubWorkflows: Boolean + ): BuildWorkflowMetadataJsonAction = GetMetadataAction(MetadataQuery(workflowId, None, None, includeKeysOption, excludeKeysOption, expandSubWorkflows)) - } } - final case class GetMetadataAction(key: MetadataQuery, - checkTotalMetadataRowNumberBeforeQuerying: Boolean = true) - extends BuildWorkflowMetadataJsonWithOverridableSourceAction { + final case class GetMetadataAction(key: MetadataQuery, checkTotalMetadataRowNumberBeforeQuerying: Boolean = true) + extends BuildWorkflowMetadataJsonWithOverridableSourceAction { override def workflowId: WorkflowId = key.workflowId } @@ -112,7 +122,8 @@ object MetadataService { final case class GetStatus(workflowId: WorkflowId) extends BuildWorkflowMetadataJsonAction final case class GetLabels(workflowId: WorkflowId) extends BuildWorkflowMetadataJsonAction final case class GetRootAndSubworkflowLabels(workflowId: WorkflowId) extends BuildWorkflowMetadataJsonAction - final case class QueryForWorkflowsMatchingParameters(parameters: Seq[(String, String)]) extends BuildMetadataJsonAction + final case class QueryForWorkflowsMatchingParameters(parameters: Seq[(String, String)]) + extends BuildMetadataJsonAction final case class WorkflowOutputs(workflowId: WorkflowId) extends BuildWorkflowMetadataJsonWithOverridableSourceAction final case class GetLogs(workflowId: WorkflowId) extends BuildWorkflowMetadataJsonWithOverridableSourceAction case object RefreshSummary extends MetadataServiceAction @@ -137,24 +148,33 @@ object MetadataService { def reason: Throwable } - final case class MetadataLookupStreamSuccess(id: WorkflowId, result: DatabasePublisher[MetadataEntry]) extends MetadataServiceResponse + final case class MetadataLookupStreamSuccess(id: WorkflowId, result: DatabasePublisher[MetadataEntry]) + extends MetadataServiceResponse final case class MetadataLookupStreamFailed(id: WorkflowId, reason: Throwable) extends MetadataServiceResponse - final case class MetadataLookupFailedTooLargeResponse(query: MetadataQuery, metadataSizeRows: Int) extends MetadataServiceResponse + final case class MetadataLookupFailedTooLargeResponse(query: MetadataQuery, metadataSizeRows: Int) + extends MetadataServiceResponse final case class MetadataLookupFailedTimeoutResponse(query: MetadataQuery) extends MetadataServiceResponse final case class FetchFailedTasksTimeoutResponse(workflowId: WorkflowId) extends MetadataServiceResponse - final case class MetadataLookupResponse(query: MetadataQuery, eventList: Seq[MetadataEvent]) extends MetadataServiceResponse + final case class MetadataLookupResponse(query: MetadataQuery, eventList: Seq[MetadataEvent]) + extends MetadataServiceResponse final case class FetchFailedJobsMetadataLookupResponse(events: Seq[MetadataEvent]) extends MetadataServiceResponse - final case class FetchFailedJobsMetadataLookupFailed(workflowId: WorkflowId, reason: Throwable) extends MetadataServiceFailure - final case class MetadataServiceKeyLookupFailed(query: MetadataQuery, reason: Throwable) extends MetadataServiceFailure + final case class FetchFailedJobsMetadataLookupFailed(workflowId: WorkflowId, reason: Throwable) + extends MetadataServiceFailure + final case class MetadataServiceKeyLookupFailed(query: MetadataQuery, reason: Throwable) + extends MetadataServiceFailure final case class StatusLookupResponse(workflowId: WorkflowId, status: WorkflowState) extends MetadataServiceResponse final case class StatusLookupFailed(workflowId: WorkflowId, reason: Throwable) extends MetadataServiceFailure - final case class LabelLookupResponse(workflowId: WorkflowId, labels: Map[String, String]) extends MetadataServiceResponse + final case class LabelLookupResponse(workflowId: WorkflowId, labels: Map[String, String]) + extends MetadataServiceResponse final case class LabelLookupFailed(workflowId: WorkflowId, reason: Throwable) extends MetadataServiceFailure - final case class RootAndSubworkflowLabelsLookupResponse(rootWorkflowId: WorkflowId, labels: Map[WorkflowId, Map[String, String]]) extends MetadataServiceResponse - final case class RootAndSubworkflowLabelsLookupFailed(rootWorkflowId: WorkflowId, reason: Throwable) extends MetadataServiceFailure + final case class RootAndSubworkflowLabelsLookupResponse(rootWorkflowId: WorkflowId, + labels: Map[WorkflowId, Map[String, String]] + ) extends MetadataServiceResponse + final case class RootAndSubworkflowLabelsLookupFailed(rootWorkflowId: WorkflowId, reason: Throwable) + extends MetadataServiceFailure final case class WorkflowOutputsResponse(id: WorkflowId, outputs: Seq[MetadataEvent]) extends MetadataServiceResponse final case class WorkflowOutputsFailure(id: WorkflowId, reason: Throwable) extends MetadataServiceFailure @@ -163,7 +183,8 @@ object MetadataService { final case class LogsFailure(id: WorkflowId, reason: Throwable) extends MetadataServiceFailure final case class MetadataWriteSuccess(events: Iterable[MetadataEvent]) extends MetadataServiceResponse - final case class MetadataWriteFailure(reason: Throwable, events: Iterable[MetadataEvent]) extends MetadataServiceFailure + final case class MetadataWriteFailure(reason: Throwable, events: Iterable[MetadataEvent]) + extends MetadataServiceFailure sealed abstract class WorkflowValidationResponse extends MetadataServiceResponse case object RecognizedWorkflowId extends WorkflowValidationResponse @@ -171,26 +192,32 @@ object MetadataService { final case class FailedToCheckWorkflowId(cause: Throwable) extends WorkflowValidationResponse sealed abstract class FetchWorkflowArchiveStatusAndEndTimeResponse extends MetadataServiceResponse - final case class WorkflowMetadataArchivedStatusAndEndTime(archiveStatus: MetadataArchiveStatus, endTime: Option[OffsetDateTime]) extends FetchWorkflowArchiveStatusAndEndTimeResponse - final case class FailedToGetArchiveStatusAndEndTime(reason: Throwable) extends FetchWorkflowArchiveStatusAndEndTimeResponse + final case class WorkflowMetadataArchivedStatusAndEndTime(archiveStatus: MetadataArchiveStatus, + endTime: Option[OffsetDateTime] + ) extends FetchWorkflowArchiveStatusAndEndTimeResponse + final case class FailedToGetArchiveStatusAndEndTime(reason: Throwable) + extends FetchWorkflowArchiveStatusAndEndTimeResponse sealed abstract class MetadataQueryResponse extends MetadataServiceResponse - final case class WorkflowQuerySuccess(response: WorkflowQueryResponse, meta: Option[QueryMetadata]) extends MetadataQueryResponse + final case class WorkflowQuerySuccess(response: WorkflowQueryResponse, meta: Option[QueryMetadata]) + extends MetadataQueryResponse final case class WorkflowQueryFailure(reason: Throwable) extends MetadataQueryResponse - private implicit class EnhancedWomTraversable(val womValues: Iterable[WomValue]) extends AnyVal { + implicit private class EnhancedWomTraversable(val womValues: Iterable[WomValue]) extends AnyVal { def toEvents(metadataKey: MetadataKey): List[MetadataEvent] = if (womValues.isEmpty) { List(MetadataEvent.empty(metadataKey.copy(key = s"${metadataKey.key}[]"))) } else { - womValues.toList - .zipWithIndex - .flatMap { case (value, index) => womValueToMetadataEvents(metadataKey.copy(key = s"${metadataKey.key}[$index]"), value) } + womValues.toList.zipWithIndex + .flatMap { case (value, index) => + womValueToMetadataEvents(metadataKey.copy(key = s"${metadataKey.key}[$index]"), value) + } } } private def toPrimitiveEvent(metadataKey: MetadataKey, valueName: String)(value: Option[Any]) = value match { case Some(v) => MetadataEvent(metadataKey.copy(key = s"${metadataKey.key}:$valueName"), MetadataValue(v)) - case None => MetadataEvent(metadataKey.copy(key = s"${metadataKey.key}:$valueName"), MetadataValue("", MetadataNull)) + case None => + MetadataEvent(metadataKey.copy(key = s"${metadataKey.key}:$valueName"), MetadataValue("", MetadataNull)) } def womValueToMetadataEvents(metadataKey: MetadataKey, womValue: WomValue): Iterable[MetadataEvent] = womValue match { @@ -199,13 +226,17 @@ object MetadataService { if (valueMap.isEmpty) { List(MetadataEvent.empty(metadataKey)) } else { - valueMap.toList flatMap { case (key, value) => womValueToMetadataEvents(metadataKey.copy(key = metadataKey.key + s":${key.valueString}"), value) } + valueMap.toList flatMap { case (key, value) => + womValueToMetadataEvents(metadataKey.copy(key = metadataKey.key + s":${key.valueString}"), value) + } } case objectLike: WomObjectLike => if (objectLike.values.isEmpty) { List(MetadataEvent.empty(metadataKey)) } else { - objectLike.values.toList flatMap { case (key, value) => womValueToMetadataEvents(metadataKey.copy(key = metadataKey.key + s":$key"), value) } + objectLike.values.toList flatMap { case (key, value) => + womValueToMetadataEvents(metadataKey.copy(key = metadataKey.key + s":$key"), value) + } } case WomOptionalValue(_, Some(value)) => womValueToMetadataEvents(metadataKey, value) @@ -214,7 +245,8 @@ object MetadataService { womValueToMetadataEvents(metadataKey.copy(key = metadataKey.key + ":right"), right) case populated: WomMaybePopulatedFile => import mouse.all._ - val secondaryFiles = populated.secondaryFiles.toEvents(metadataKey.copy(key = s"${metadataKey.key}:secondaryFiles")) + val secondaryFiles = + populated.secondaryFiles.toEvents(metadataKey.copy(key = s"${metadataKey.key}:secondaryFiles")) List( MetadataEvent(metadataKey.copy(key = s"${metadataKey.key}:class"), MetadataValue("File")), @@ -226,7 +258,8 @@ object MetadataService { ) ++ secondaryFiles case listedDirectory: WomMaybeListedDirectory => import mouse.all._ - val listing = listedDirectory.listingOption.toList.flatten.toEvents(metadataKey.copy(key = s"${metadataKey.key}:listing")) + val listing = + listedDirectory.listingOption.toList.flatten.toEvents(metadataKey.copy(key = s"${metadataKey.key}:listing")) List( MetadataEvent(metadataKey.copy(key = s"${metadataKey.key}:class"), MetadataValue("Directory")), listedDirectory.valueOption |> toPrimitiveEvent(metadataKey, "location") @@ -235,13 +268,22 @@ object MetadataService { List(MetadataEvent(metadataKey, MetadataValue(value))) } - def throwableToMetadataEvents(metadataKey: MetadataKey, t: Throwable, failureIndex: Int = Random.nextInt(Int.MaxValue)): List[MetadataEvent] = { - val emptyCauseList = List(MetadataEvent.empty(metadataKey.copy(key = metadataKey.key + s"[$failureIndex]:causedBy[]"))) + def throwableToMetadataEvents(metadataKey: MetadataKey, + t: Throwable, + failureIndex: Int = Random.nextInt(Int.MaxValue) + ): List[MetadataEvent] = { + val emptyCauseList = List( + MetadataEvent.empty(metadataKey.copy(key = metadataKey.key + s"[$failureIndex]:causedBy[]")) + ) val metadataKeyAndFailureIndex = s"${metadataKey.key}[$failureIndex]" t match { case aggregation: ThrowableAggregation => - val message = List(MetadataEvent(metadataKey.copy(key = s"$metadataKeyAndFailureIndex:message"), MetadataValue(aggregation.exceptionContext))) + val message = List( + MetadataEvent(metadataKey.copy(key = s"$metadataKeyAndFailureIndex:message"), + MetadataValue(aggregation.exceptionContext) + ) + ) val indexedCauses = aggregation.throwables.toList.zipWithIndex val indexedCauseEvents = if (indexedCauses.nonEmpty) { indexedCauses flatMap { case (cause, index) => @@ -253,7 +295,11 @@ object MetadataService { } message ++ indexedCauseEvents case aggregation: MessageAggregation => - val message = List(MetadataEvent(metadataKey.copy(key = s"$metadataKeyAndFailureIndex:message"), MetadataValue(aggregation.exceptionContext))) + val message = List( + MetadataEvent(metadataKey.copy(key = s"$metadataKeyAndFailureIndex:message"), + MetadataValue(aggregation.exceptionContext) + ) + ) val indexedCauses = aggregation.errorMessages.toList.zipWithIndex val indexedCauseEvents = if (indexedCauses.nonEmpty) { indexedCauses flatMap { case (cause, index) => @@ -267,11 +313,14 @@ object MetadataService { message ++ indexedCauseEvents case _ => - val message = List(MetadataEvent(metadataKey.copy(key = s"$metadataKeyAndFailureIndex:message"), MetadataValue(t.getMessage))) + val message = List( + MetadataEvent(metadataKey.copy(key = s"$metadataKeyAndFailureIndex:message"), MetadataValue(t.getMessage)) + ) val causeKey = metadataKey.copy(key = s"$metadataKeyAndFailureIndex:causedBy") - val cause = Option(t.getCause) map { cause => throwableToMetadataEvents(causeKey, cause, 0) } getOrElse emptyCauseList + val cause = Option(t.getCause) map { cause => + throwableToMetadataEvents(causeKey, cause, 0) + } getOrElse emptyCauseList message ++ cause } } } - diff --git a/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala b/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala index 726361f2cfc..ce6444128fc 100644 --- a/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala +++ b/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala @@ -65,7 +65,7 @@ object WorkflowQueryKey { override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[List[String]] = { val values = valuesFromMap(grouped).toList - val nels:List[data.ValidatedNel[String,String]] = values map { + val nels: List[data.ValidatedNel[String, String]] = values map { case Patterns.WorkflowName(n) => n.validNel[String] case v => v.invalidNel[String] } @@ -77,18 +77,16 @@ object WorkflowQueryKey { override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[List[Label]] = { val values = valuesFromMap(grouped).toList - def validateLabelRegex(labelKeyValue: String): ErrorOr[Label] = { + def validateLabelRegex(labelKeyValue: String): ErrorOr[Label] = labelKeyValue.split("\\:", 2) match { case Array(k, v) => Label.validateLabel(k, v) case _ => labelKeyValue.invalidNel } - } val nels: List[ErrorOr[Label]] = values map validateLabelRegex sequenceListOfValidatedNels("Label values do not match allowed pattern label-key:label-value", nels) } } - case object LabelAndKeyValue extends LabelLikeKeyValue { override val name = "Label" } @@ -111,7 +109,8 @@ object WorkflowQueryKey { override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[List[String]] = { val values = valuesFromMap(grouped).toList val nels = values map { v => - if (Try(WorkflowId.fromString(v.toLowerCase.capitalize)).isSuccess) v.validNel[String] else s"invalid Id value: '$v'".invalidNel[String] + if (Try(WorkflowId.fromString(v.toLowerCase.capitalize)).isSuccess) v.validNel[String] + else s"invalid Id value: '$v'".invalidNel[String] } sequenceListOfValidatedNels("Id values do match allowed workflow id pattern", nels) } @@ -150,11 +149,11 @@ object WorkflowQueryKey { /* The inclusion of `WorkflowMetadataKeys.ParentWorkflowId` is for backwards compatibility. As of #4381 parentWorkflowId is always included, but we did not want to break old automated queries - */ + */ val allowedValues = Seq(WorkflowMetadataKeys.Labels, WorkflowMetadataKeys.ParentWorkflowId) - val nels: List[ErrorOr[String]] = values map { v => { + val nels: List[ErrorOr[String]] = values map { v => allowedValues.contains(v).fold(v.validNel[String], v.invalidNel[String]) - }} + } sequenceListOfValidatedNels(s"Keys should be from $allowedValues. Unrecognized values", nels) } } @@ -177,13 +176,12 @@ object WorkflowQueryKey { sealed trait WorkflowQueryKey[T] { def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[T] def name: String - def valuesFromMap(grouped: Map[String, Seq[(String, String)]]): Seq[String] = { + def valuesFromMap(grouped: Map[String, Seq[(String, String)]]): Seq[String] = grouped.getOrElse(name, Seq.empty) map { _._2 } - } } sealed trait DateTimeWorkflowQueryKey extends WorkflowQueryKey[Option[OffsetDateTime]] { - override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[Option[OffsetDateTime]] = { + override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[Option[OffsetDateTime]] = valuesFromMap(grouped).toList match { case vs if vs.lengthCompare(1) > 0 => s"Found ${vs.size} values for key '$name' but at most one is allowed.".invalidNel[Option[OffsetDateTime]] @@ -195,11 +193,11 @@ sealed trait DateTimeWorkflowQueryKey extends WorkflowQueryKey[Option[OffsetDate } case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } - } def displayName: String } sealed trait SeqWorkflowQueryKey[A] extends WorkflowQueryKey[Seq[A]] { + /** `sequence` the `List[ErrorOr[A]]` to a single `ErrorOr[List[A]]` */ protected def sequenceListOfValidatedNels(prefix: String, errorOrList: List[ErrorOr[A]]): ErrorOr[List[A]] = { val errorOr = errorOrList.sequence[ErrorOr, A] @@ -210,38 +208,36 @@ sealed trait SeqWorkflowQueryKey[A] extends WorkflowQueryKey[Seq[A]] { } sealed trait IntWorkflowQueryKey extends WorkflowQueryKey[Option[Int]] { - override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[Option[Int]] = { + override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[Option[Int]] = valuesFromMap(grouped).toList match { case vs if vs.lengthCompare(1) > 0 => s"Found ${vs.size} values for key '$name' but at most one is allowed.".invalidNel[Option[Int]] case Nil => None.validNel case v :: Nil => Try(v.toInt) match { - case Success(intVal) => if (intVal > 0) Option(intVal).validNel else s"Integer value not greater than 0".invalidNel[Option[Int]] + case Success(intVal) => + if (intVal > 0) Option(intVal).validNel else s"Integer value not greater than 0".invalidNel[Option[Int]] case _ => s"Value given for $displayName does not parse as a integer: $v".invalidNel[Option[Int]] } case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } - } def displayName: String } sealed trait BooleanWorkflowQueryKey extends WorkflowQueryKey[Boolean] { - override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[Boolean] = { + override def validate(grouped: Map[String, Seq[(String, String)]]): ErrorOr[Boolean] = valuesFromMap(grouped).toList match { - case vs if vs.lengthCompare(1) > 0 => s"Found ${vs.size} values for key '$name' but at most one is allowed.".invalidNel[Boolean] + case vs if vs.lengthCompare(1) > 0 => + s"Found ${vs.size} values for key '$name' but at most one is allowed.".invalidNel[Boolean] case Nil => defaultBooleanValue.validNel - case v :: Nil => { + case v :: Nil => Try(v.toBoolean) match { case Success(bool) => bool.validNel case _ => s"Value given for $displayName does not parse as a boolean: $v".invalidNel[Boolean] } - } case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } - } - def displayName: String + def displayName: String def defaultBooleanValue: Boolean } - diff --git a/services/src/main/scala/cromwell/services/metadata/WorkflowQueryParameters.scala b/services/src/main/scala/cromwell/services/metadata/WorkflowQueryParameters.scala index 55ebe102211..020bdfea5d9 100644 --- a/services/src/main/scala/cromwell/services/metadata/WorkflowQueryParameters.scala +++ b/services/src/main/scala/cromwell/services/metadata/WorkflowQueryParameters.scala @@ -11,26 +11,31 @@ import cromwell.core.labels.Label import cromwell.services.metadata.WorkflowQueryKey._ import common.validation.ErrorOr._ -case class WorkflowQueryParameters private(statuses: Set[String], - names: Set[String], - ids: Set[WorkflowId], - labelsAnd: Set[Label], - labelsOr: Set[Label], - excludeLabelsAnd: Set[Label], - excludeLabelsOr: Set[Label], - submissionTime: Option[OffsetDateTime], - startDate: Option[OffsetDateTime], - endDate: Option[OffsetDateTime], - metadataArchiveStatus: Set[MetadataArchiveStatus], - page: Option[Int], - pageSize: Option[Int], - additionalQueryResultFields: Set[String], - includeSubworkflows: Boolean, - newestFirst: Boolean) +case class WorkflowQueryParameters private (statuses: Set[String], + names: Set[String], + ids: Set[WorkflowId], + labelsAnd: Set[Label], + labelsOr: Set[Label], + excludeLabelsAnd: Set[Label], + excludeLabelsOr: Set[Label], + submissionTime: Option[OffsetDateTime], + startDate: Option[OffsetDateTime], + endDate: Option[OffsetDateTime], + metadataArchiveStatus: Set[MetadataArchiveStatus], + page: Option[Int], + pageSize: Option[Int], + additionalQueryResultFields: Set[String], + includeSubworkflows: Boolean, + newestFirst: Boolean +) object WorkflowQueryParameters { - private def validateDate1BeforeDate2(date1: Option[OffsetDateTime], date2: Option[OffsetDateTime], date1Type: String, date2Type: String): ErrorOr[Unit] = { + private def validateDate1BeforeDate2(date1: Option[OffsetDateTime], + date2: Option[OffsetDateTime], + date1Type: String, + date2Type: String + ): ErrorOr[Unit] = { // Invert the notion of success/failure here to only "successfully" generate an error message if // both date1 and date2 dates have been specified and date1 is after date2. val date1AfterDate2Error = for { @@ -62,7 +67,7 @@ object WorkflowQueryParameters { * Run the validation logic over the specified raw parameters, creating a `WorkflowQueryParameters` if all * validation succeeds, otherwise accumulate all validation messages within the `ValidationNel`. */ - private [metadata] def runValidation(rawParameters: Seq[(String, String)]): ErrorOr[WorkflowQueryParameters] = { + private[metadata] def runValidation(rawParameters: Seq[(String, String)]): ErrorOr[WorkflowQueryParameters] = { val onlyRecognizedKeysValidation = validateOnlyRecognizedKeys(rawParameters) @@ -74,16 +79,23 @@ object WorkflowQueryParameters { val submissionTimeValidation = SubmissionTime.validate(valuesByCanonicalCapitalization) val statusesValidation: ErrorOr[Set[String]] = Status.validate(valuesByCanonicalCapitalization).map(_.toSet) val namesValidation: ErrorOr[Set[String]] = Name.validate(valuesByCanonicalCapitalization).map(_.toSet) - val workflowIdsValidation: ErrorOr[Set[WorkflowId]] = WorkflowQueryKey.Id.validate(valuesByCanonicalCapitalization).map(ids => (ids map WorkflowId.fromString).toSet) - val labelsAndValidation: ErrorOr[Set[Label]] = WorkflowQueryKey.LabelAndKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) - val labelsOrValidation: ErrorOr[Set[Label]] = WorkflowQueryKey.LabelOrKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) - val excludeLabelsAndValidation: ErrorOr[Set[Label]] = WorkflowQueryKey.ExcludeLabelAndKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) - val excludeLabelsOrValidation: ErrorOr[Set[Label]] = WorkflowQueryKey.ExcludeLabelOrKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) + val workflowIdsValidation: ErrorOr[Set[WorkflowId]] = + WorkflowQueryKey.Id.validate(valuesByCanonicalCapitalization).map(ids => (ids map WorkflowId.fromString).toSet) + val labelsAndValidation: ErrorOr[Set[Label]] = + WorkflowQueryKey.LabelAndKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) + val labelsOrValidation: ErrorOr[Set[Label]] = + WorkflowQueryKey.LabelOrKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) + val excludeLabelsAndValidation: ErrorOr[Set[Label]] = + WorkflowQueryKey.ExcludeLabelAndKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) + val excludeLabelsOrValidation: ErrorOr[Set[Label]] = + WorkflowQueryKey.ExcludeLabelOrKeyValue.validate(valuesByCanonicalCapitalization).map(_.toSet) val pageValidation = Page.validate(valuesByCanonicalCapitalization) val pageSizeValidation = PageSize.validate(valuesByCanonicalCapitalization) - val additionalQueryResultFieldsValidation: ErrorOr[Set[String]] = AdditionalQueryResultFields.validate(valuesByCanonicalCapitalization).map(_.toSet) + val additionalQueryResultFieldsValidation: ErrorOr[Set[String]] = + AdditionalQueryResultFields.validate(valuesByCanonicalCapitalization).map(_.toSet) val includeSubworkflowsValidation = IncludeSubworkflows.validate(valuesByCanonicalCapitalization) - val metadataArchiveStatusValidation: ErrorOr[Set[MetadataArchiveStatus]] = WorkflowQueryKey.MetadataArchiveStatus.validate(valuesByCanonicalCapitalization).map(_.toSet) + val metadataArchiveStatusValidation: ErrorOr[Set[MetadataArchiveStatus]] = + WorkflowQueryKey.MetadataArchiveStatus.validate(valuesByCanonicalCapitalization).map(_.toSet) val newestFirstValidation = NewestFirst.validate(valuesByCanonicalCapitalization) // Only validate start before end if both of the individual date parsing validations have already succeeded. @@ -99,26 +111,45 @@ object WorkflowQueryParameters { } (onlyRecognizedKeysValidation, - startBeforeEndValidation, - submissionBeforeStartValidation, - statusesValidation, - namesValidation, - workflowIdsValidation, - labelsAndValidation, - labelsOrValidation, - excludeLabelsAndValidation, - excludeLabelsOrValidation, - submissionTimeValidation, - startDateValidation, - endDateValidation, - pageValidation, - pageSizeValidation, - additionalQueryResultFieldsValidation, - includeSubworkflowsValidation, - metadataArchiveStatusValidation, - newestFirstValidation + startBeforeEndValidation, + submissionBeforeStartValidation, + statusesValidation, + namesValidation, + workflowIdsValidation, + labelsAndValidation, + labelsOrValidation, + excludeLabelsAndValidation, + excludeLabelsOrValidation, + submissionTimeValidation, + startDateValidation, + endDateValidation, + pageValidation, + pageSizeValidation, + additionalQueryResultFieldsValidation, + includeSubworkflowsValidation, + metadataArchiveStatusValidation, + newestFirstValidation ) mapN { - (_, _, _, statuses, names, ids, labelsAnd, labelsOr, excludeLabelsAnd, excludeLabelsOr, submissionTime, startDate, endDate, page, pageSize, additionalQueryResultFields, includeSubworkflows, metadataArchiveStatus, newestFirst) => + (_, + _, + _, + statuses, + names, + ids, + labelsAnd, + labelsOr, + excludeLabelsAnd, + excludeLabelsOr, + submissionTime, + startDate, + endDate, + page, + pageSize, + additionalQueryResultFields, + includeSubworkflows, + metadataArchiveStatus, + newestFirst + ) => WorkflowQueryParameters( statuses, names, @@ -140,10 +171,9 @@ object WorkflowQueryParameters { } } - def apply(rawParameters: Seq[(String, String)]): WorkflowQueryParameters = { + def apply(rawParameters: Seq[(String, String)]): WorkflowQueryParameters = runValidation(rawParameters) match { case Valid(queryParameters) => queryParameters case Invalid(x) => throw new IllegalArgumentException(x.toList.mkString("\n")) } - } } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala index 0069ffe8821..13bb31463da 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala @@ -28,7 +28,8 @@ object MetadataDatabaseAccess { private lazy val WorkflowMetadataSummarySemigroup = new Semigroup[WorkflowMetadataSummaryEntry] { override def combine(summary1: WorkflowMetadataSummaryEntry, - summary2: WorkflowMetadataSummaryEntry): WorkflowMetadataSummaryEntry = { + summary2: WorkflowMetadataSummaryEntry + ): WorkflowMetadataSummaryEntry = { // Resolve the status if both `this` and `that` have defined statuses. This will evaluate to `None` // if one or both of the statuses is not defined. val resolvedStatus = for { @@ -45,7 +46,7 @@ object MetadataDatabaseAccess { submissionTimestamp = summary1.submissionTimestamp orElse summary2.submissionTimestamp, parentWorkflowExecutionUuid = summary1.parentWorkflowExecutionUuid orElse summary2.parentWorkflowExecutionUuid, rootWorkflowExecutionUuid = summary1.rootWorkflowExecutionUuid orElse summary2.rootWorkflowExecutionUuid, - metadataArchiveStatus = summary1.metadataArchiveStatus, + metadataArchiveStatus = summary1.metadataArchiveStatus ) } } @@ -75,17 +76,20 @@ object MetadataDatabaseAccess { } private def buildUpdatedSummary(existingSummary: Option[WorkflowMetadataSummaryEntry], - metadataForUuid: Seq[MetadataEntry]): WorkflowMetadataSummaryEntry = { + metadataForUuid: Seq[MetadataEntry] + ): WorkflowMetadataSummaryEntry = { implicit val wmss = WorkflowMetadataSummarySemigroup val base = existingSummary.getOrElse(baseSummary(metadataForUuid.head.workflowExecutionUuid)) - metadataForUuid.foldLeft(base) { - case (metadataSummary, metadatum) => metadataSummary |+| metadatum.toSummary + metadataForUuid.foldLeft(base) { case (metadataSummary, metadatum) => + metadataSummary |+| metadatum.toSummary } } final case class SummaryResult(rowsProcessedIncreasing: Long, rowsProcessedDecreasing: Long, decreasingGap: Long) - final case class WorkflowArchiveStatusAndEndTimestamp(archiveStatus: Option[String], endTimestamp: Option[OffsetDateTime]) + final case class WorkflowArchiveStatusAndEndTimestamp(archiveStatus: Option[String], + endTimestamp: Option[OffsetDateTime] + ) } trait MetadataDatabaseAccess { @@ -99,8 +103,15 @@ trait MetadataDatabaseAccess { val value = metadataEvent.value map { _.value } val valueType = metadataEvent.value map { _.valueType.typeName } val jobKey = key.jobKey map { jk => (jk.callFqn, jk.index, jk.attempt) } - MetadataEntry(workflowUuid, jobKey.map(_._1), jobKey.flatMap(_._2), jobKey.map(_._3), - key.key, value.toClobOption, valueType, timestamp) + MetadataEntry(workflowUuid, + jobKey.map(_._1), + jobKey.flatMap(_._2), + jobKey.map(_._3), + key.key, + value.toClobOption, + valueType, + timestamp + ) } metadataDatabaseInterface.addMetadataEntries( metadataEntries = metadata, @@ -111,10 +122,11 @@ trait MetadataDatabaseAccess { submissionMetadataKey = WorkflowMetadataKeys.SubmissionTime, parentWorkflowIdKey = WorkflowMetadataKeys.ParentWorkflowId, rootWorkflowIdKey = WorkflowMetadataKeys.RootWorkflowId, - labelMetadataKey = WorkflowMetadataKeys.Labels) + labelMetadataKey = WorkflowMetadataKeys.Labels + ) } - def metadataToMetadataEvents(workflowId: WorkflowId)(metadata: Seq[MetadataEntry]): Seq[MetadataEvent] = { + def metadataToMetadataEvents(workflowId: WorkflowId)(metadata: Seq[MetadataEntry]): Seq[MetadataEvent] = metadata map { m => // If callFullyQualifiedName is non-null then attempt will also be non-null and there is a MetadataJobKey. val metadataJobKey: Option[MetadataJobKey] = for { @@ -123,17 +135,16 @@ trait MetadataDatabaseAccess { } yield MetadataJobKey(callFqn, m.jobIndex, attempt) val key = MetadataKey(workflowId, metadataJobKey, m.metadataKey) - val value = m.metadataValueType.map(mType => - MetadataValue(m.metadataValue.toRawString, MetadataType.fromString(mType)) - ) + val value = + m.metadataValueType.map(mType => MetadataValue(m.metadataValue.toRawString, MetadataType.fromString(mType))) MetadataEvent(key, value, m.metadataTimestamp.toSystemOffsetDateTime) } - } def getMetadataReadRowCount(query: MetadataQuery, timeout: Duration)(implicit ec: ExecutionContext): Future[Int] = { - def listKeyRequirements(keyRequirementsInput: Option[NonEmptyList[String]]): List[String] = keyRequirementsInput.map(_.toList).toList.flatten.map(_ + "%") + def listKeyRequirements(keyRequirementsInput: Option[NonEmptyList[String]]): List[String] = + keyRequirementsInput.map(_.toList).toList.flatten.map(_ + "%") val uuid = query.workflowId.id.toString @@ -143,27 +154,60 @@ trait MetadataDatabaseAccess { case q @ MetadataQuery(_, None, Some(key), None, None, _) => metadataDatabaseInterface.countMetadataEntries(uuid, key, q.expandSubWorkflows, timeout) case q @ MetadataQuery(_, Some(jobKey), None, None, None, _) => - metadataDatabaseInterface.countMetadataEntries(uuid, jobKey.callFqn, jobKey.index, jobKey.attempt, q.expandSubWorkflows, timeout) + metadataDatabaseInterface.countMetadataEntries(uuid, + jobKey.callFqn, + jobKey.index, + jobKey.attempt, + q.expandSubWorkflows, + timeout + ) case q @ MetadataQuery(_, Some(jobKey), Some(key), None, None, _) => - metadataDatabaseInterface.countMetadataEntries(uuid, key, jobKey.callFqn, jobKey.index, jobKey.attempt, q.expandSubWorkflows, timeout) + metadataDatabaseInterface.countMetadataEntries(uuid, + key, + jobKey.callFqn, + jobKey.index, + jobKey.attempt, + q.expandSubWorkflows, + timeout + ) case q @ MetadataQuery(_, None, None, includeKeys, excludeKeys, _) => val excludeKeyRequirements = listKeyRequirements(excludeKeys) val queryType = if (excludeKeyRequirements.contains("calls%")) WorkflowQuery else CallOrWorkflowQuery - metadataDatabaseInterface.countMetadataEntryWithKeyConstraints(uuid, listKeyRequirements(includeKeys), excludeKeyRequirements, queryType, q.expandSubWorkflows, timeout) - case q @ MetadataQuery(_, Some(MetadataQueryJobKey(callFqn, index, attempt)), None, includeKeys, excludeKeys, _) => - metadataDatabaseInterface.countMetadataEntryWithKeyConstraints(uuid, listKeyRequirements(includeKeys), listKeyRequirements(excludeKeys), CallQuery(callFqn, index, attempt), q.expandSubWorkflows, timeout) + metadataDatabaseInterface.countMetadataEntryWithKeyConstraints(uuid, + listKeyRequirements(includeKeys), + excludeKeyRequirements, + queryType, + q.expandSubWorkflows, + timeout + ) + case q @ MetadataQuery(_, + Some(MetadataQueryJobKey(callFqn, index, attempt)), + None, + includeKeys, + excludeKeys, + _ + ) => + metadataDatabaseInterface.countMetadataEntryWithKeyConstraints(uuid, + listKeyRequirements(includeKeys), + listKeyRequirements(excludeKeys), + CallQuery(callFqn, index, attempt), + q.expandSubWorkflows, + timeout + ) case _ => Future.failed(new IllegalArgumentException(s"Invalid MetadataQuery: $query")) } } - def metadataEventsStream(workflowId: WorkflowId): Try[DatabasePublisher[MetadataEntry]] = { + def metadataEventsStream(workflowId: WorkflowId): Try[DatabasePublisher[MetadataEntry]] = Try(metadataDatabaseInterface.streamMetadataEntries(workflowId.id.toString)) - } - def queryMetadataEvents(query: MetadataQuery, timeout: Duration)(implicit ec: ExecutionContext): Future[Seq[MetadataEvent]] = { + def queryMetadataEvents(query: MetadataQuery, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Seq[MetadataEvent]] = { - def listKeyRequirements(keyRequirementsInput: Option[NonEmptyList[String]]): List[String] = keyRequirementsInput.map(_.toList).toList.flatten.map(_ + "%") + def listKeyRequirements(keyRequirementsInput: Option[NonEmptyList[String]]): List[String] = + keyRequirementsInput.map(_.toList).toList.flatten.map(_ + "%") val uuid = query.workflowId.id.toString @@ -180,79 +224,97 @@ trait MetadataDatabaseAccess { val excludeKeyRequirements = listKeyRequirements(excludeKeys) val queryType = if (excludeKeyRequirements.contains("calls%")) WorkflowQuery else CallOrWorkflowQuery - metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints(uuid, listKeyRequirements(includeKeys), excludeKeyRequirements, queryType, timeout) + metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints(uuid, + listKeyRequirements(includeKeys), + excludeKeyRequirements, + queryType, + timeout + ) case MetadataQuery(_, Some(MetadataQueryJobKey(callFqn, index, attempt)), None, includeKeys, excludeKeys, _) => - metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints(uuid, listKeyRequirements(includeKeys), listKeyRequirements(excludeKeys), CallQuery(callFqn, index, attempt), timeout) + metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints(uuid, + listKeyRequirements(includeKeys), + listKeyRequirements(excludeKeys), + CallQuery(callFqn, index, attempt), + timeout + ) case _ => Future.failed(new IllegalArgumentException(s"Invalid MetadataQuery: $query")) } futureMetadata map metadataToMetadataEvents(query.workflowId) } - def queryWorkflowOutputs(id: WorkflowId, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEvent]] = { + def queryWorkflowOutputs(id: WorkflowId, timeout: Duration)(implicit + ec: ExecutionContext + ): Future[Seq[MetadataEvent]] = { val uuid = id.id.toString - metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints( - uuid, List(s"${WorkflowMetadataKeys.Outputs}:%"), List.empty, WorkflowQuery, timeout). - map(metadataToMetadataEvents(id)) + metadataDatabaseInterface + .queryMetadataEntryWithKeyConstraints(uuid, + List(s"${WorkflowMetadataKeys.Outputs}:%"), + List.empty, + WorkflowQuery, + timeout + ) + .map(metadataToMetadataEvents(id)) } - def queryLogs(id: WorkflowId, - timeout: Duration) - (implicit ec: ExecutionContext): Future[Seq[MetadataEvent]] = { + def queryLogs(id: WorkflowId, timeout: Duration)(implicit ec: ExecutionContext): Future[Seq[MetadataEvent]] = { import cromwell.services.metadata.CallMetadataKeys._ val keys = List(Stdout, Stderr, BackendLogsPrefix + ":%") - metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints(id.id.toString, keys, List.empty, CallOrWorkflowQuery, timeout) map + metadataDatabaseInterface.queryMetadataEntryWithKeyConstraints(id.id.toString, + keys, + List.empty, + CallOrWorkflowQuery, + timeout + ) map metadataToMetadataEvents(id) } - def refreshWorkflowMetadataSummaries(limit: Int)(implicit ec: ExecutionContext): Future[SummaryResult] = { + def refreshWorkflowMetadataSummaries(limit: Int)(implicit ec: ExecutionContext): Future[SummaryResult] = for { increasingProcessed <- metadataDatabaseInterface.summarizeIncreasing( labelMetadataKey = WorkflowMetadataKeys.Labels, limit = limit, - buildUpdatedSummary = MetadataDatabaseAccess.buildUpdatedSummary) + buildUpdatedSummary = MetadataDatabaseAccess.buildUpdatedSummary + ) (decreasingProcessed, decreasingGap) <- metadataDatabaseInterface.summarizeDecreasing( summaryNameDecreasing = WorkflowMetadataKeys.SummaryNameDecreasing, summaryNameIncreasing = WorkflowMetadataKeys.SummaryNameIncreasing, labelMetadataKey = WorkflowMetadataKeys.Labels, limit = limit, - buildUpdatedSummary = MetadataDatabaseAccess.buildUpdatedSummary) + buildUpdatedSummary = MetadataDatabaseAccess.buildUpdatedSummary + ) } yield SummaryResult(increasingProcessed, decreasingProcessed, decreasingGap) - } def updateMetadataArchiveStatus(workflowId: WorkflowId, newStatus: MetadataArchiveStatus): Future[Int] = { val databaseStatusValue = MetadataArchiveStatus.toDatabaseValue(newStatus) metadataDatabaseInterface.updateMetadataArchiveStatus(workflowId.toString, databaseStatusValue) } - def getWorkflowStatus(id: WorkflowId) - (implicit ec: ExecutionContext): Future[Option[WorkflowState]] = { + def getWorkflowStatus(id: WorkflowId)(implicit ec: ExecutionContext): Future[Option[WorkflowState]] = metadataDatabaseInterface.getWorkflowStatus(id.toString) map { _ map WorkflowState.withName } - } - def getWorkflowLabels(id: WorkflowId)(implicit ec: ExecutionContext): Future[Map[String, String]] = { + def getWorkflowLabels(id: WorkflowId)(implicit ec: ExecutionContext): Future[Map[String, String]] = metadataDatabaseInterface.getWorkflowLabels(id.toString) - } - def getRootAndSubworkflowLabels(rootWorkflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Map[WorkflowId, Map[String, String]]] = { + def getRootAndSubworkflowLabels( + rootWorkflowId: WorkflowId + )(implicit ec: ExecutionContext): Future[Map[WorkflowId, Map[String, String]]] = metadataDatabaseInterface.getRootAndSubworkflowLabels(rootWorkflowId.toString) map { _ map { case (id, labelsForId) => WorkflowId.fromString(id) -> labelsForId } } - } - def workflowWithIdExistsInMetadata(possibleWorkflowId: String)(implicit ec: ExecutionContext): Future[Boolean] = { + def workflowWithIdExistsInMetadata(possibleWorkflowId: String)(implicit ec: ExecutionContext): Future[Boolean] = metadataDatabaseInterface.metadataEntryExists(possibleWorkflowId) - } - def workflowWithIdExistsInMetadataSummaries(possibleWorkflowId: String)(implicit ec: ExecutionContext): Future[Boolean] = { + def workflowWithIdExistsInMetadataSummaries(possibleWorkflowId: String)(implicit + ec: ExecutionContext + ): Future[Boolean] = metadataDatabaseInterface.metadataSummaryEntryExists(possibleWorkflowId) - } - def queryWorkflowSummaries(queryParameters: WorkflowQueryParameters) - (implicit ec: ExecutionContext): Future[(WorkflowQueryResponse, Option[QueryMetadata])] = { + def queryWorkflowSummaries( + queryParameters: WorkflowQueryParameters + )(implicit ec: ExecutionContext): Future[(WorkflowQueryResponse, Option[QueryMetadata])] = { val labelsAndToQuery = queryParameters.labelsAnd.map(label => (label.key, label.value)) val labelsOrToQuery = queryParameters.labelsOr.map(label => (label.key, label.value)) @@ -295,24 +357,27 @@ trait MetadataDatabaseAccess { queryParameters.includeSubworkflows ) - def queryMetadata(count: Int): Option[QueryMetadata] = { + def queryMetadata(count: Int): Option[QueryMetadata] = (queryParameters.page, queryParameters.pageSize) match { case (None, None) => None case (Some(_), None) => None // Page without pagesize returns everything case (None, Some(_)) => Option(QueryMetadata(Option(1), queryParameters.pageSize, Option(count))) case _ => Option(QueryMetadata(queryParameters.page, queryParameters.pageSize, Option(count))) } - } - def summariesToQueryResults(workflows: Iterable[WorkflowMetadataSummaryEntry]): Future[List[MetadataService.WorkflowQueryResult]] = { + def summariesToQueryResults( + workflows: Iterable[WorkflowMetadataSummaryEntry] + ): Future[List[MetadataService.WorkflowQueryResult]] = workflows.toList.traverse(summaryToQueryResult) - } def summaryToQueryResult(workflow: WorkflowMetadataSummaryEntry): Future[MetadataService.WorkflowQueryResult] = { val workflowLabels: Future[Map[String, String]] = - queryParameters.additionalQueryResultFields.contains(WorkflowMetadataKeys.Labels).fold( - metadataDatabaseInterface.getWorkflowLabels(workflow.workflowExecutionUuid), Future.successful(Map.empty)) + queryParameters.additionalQueryResultFields + .contains(WorkflowMetadataKeys.Labels) + .fold(metadataDatabaseInterface.getWorkflowLabels(workflow.workflowExecutionUuid), + Future.successful(Map.empty) + ) val archiveStatus = MetadataArchiveStatus.fromDatabaseValue(workflow.metadataArchiveStatus) @@ -320,17 +385,17 @@ trait MetadataDatabaseAccess { labels <- workflowLabels archived <- Future.fromTry(archiveStatus.toTry) } yield MetadataService.WorkflowQueryResult( - id = workflow.workflowExecutionUuid, - name = workflow.workflowName, - status = workflow.workflowStatus, - submission = workflow.submissionTimestamp map {_.toSystemOffsetDateTime}, - start = workflow.startTimestamp map { _.toSystemOffsetDateTime }, - end = workflow.endTimestamp map { _.toSystemOffsetDateTime }, - labels = labels.nonEmpty.option(labels), - parentWorkflowId = workflow.parentWorkflowExecutionUuid, - rootWorkflowId = workflow.rootWorkflowExecutionUuid, - metadataArchiveStatus = archived - ) + id = workflow.workflowExecutionUuid, + name = workflow.workflowName, + status = workflow.workflowStatus, + submission = workflow.submissionTimestamp map { _.toSystemOffsetDateTime }, + start = workflow.startTimestamp map { _.toSystemOffsetDateTime }, + end = workflow.endTimestamp map { _.toSystemOffsetDateTime }, + labels = labels.nonEmpty.option(labels), + parentWorkflowId = workflow.parentWorkflowExecutionUuid, + rootWorkflowId = workflow.rootWorkflowExecutionUuid, + metadataArchiveStatus = archived + ) } for { @@ -340,49 +405,87 @@ trait MetadataDatabaseAccess { } yield (WorkflowQueryResponse(queryResults, count), queryMetadata(count)) } - def deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(workflowId: WorkflowId, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int] = { + def deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(workflowId: WorkflowId, + newArchiveStatus: Option[String] + )(implicit ec: ExecutionContext): Future[Int] = { import cromwell.core.WorkflowState (metadataDatabaseInterface.getWorkflowStatus(workflowId.toString)) flatMap { case None => - Future.failed(new Exception(s"""Metadata deletion precondition failed: workflow ID "$workflowId" did not have a status in the summary table""")) + Future.failed( + new Exception( + s"""Metadata deletion precondition failed: workflow ID "$workflowId" did not have a status in the summary table""" + ) + ) case Some(status) => if (WorkflowState.withName(status).isTerminal) - metadataDatabaseInterface.deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId.toString, newArchiveStatus) + metadataDatabaseInterface.deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId.toString, + newArchiveStatus + ) else - Future.failed(new Exception(s"""Metadata deletion precondition failed: workflow ID "$workflowId" was in non-terminal status "$status"""")) + Future.failed( + new Exception( + s"""Metadata deletion precondition failed: workflow ID "$workflowId" was in non-terminal status "$status"""" + ) + ) } } - def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = metadataDatabaseInterface.getRootWorkflowId(workflowId) - - def queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp(archiveStatus: Option[String], thresholdTimestamp: OffsetDateTime, batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[String]] = - metadataDatabaseInterface.queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus, thresholdTimestamp.toSystemTimestamp, batchSize) + def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = + metadataDatabaseInterface.getRootWorkflowId(workflowId) + + def queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp(archiveStatus: Option[String], + thresholdTimestamp: OffsetDateTime, + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[String]] = + metadataDatabaseInterface.queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp( + archiveStatus, + thresholdTimestamp.toSystemTimestamp, + batchSize + ) def getSummaryQueueSize()(implicit ec: ExecutionContext): Future[Int] = metadataDatabaseInterface.getSummaryQueueSize() - def getMetadataArchiveStatusAndEndTime(id: WorkflowId)(implicit ec: ExecutionContext): Future[WorkflowArchiveStatusAndEndTimestamp] = { + def getMetadataArchiveStatusAndEndTime( + id: WorkflowId + )(implicit ec: ExecutionContext): Future[WorkflowArchiveStatusAndEndTimestamp] = metadataDatabaseInterface.getMetadataArchiveStatusAndEndTime(id.toString).map { - case (statusOption, timestampOption) => WorkflowArchiveStatusAndEndTimestamp(statusOption, timestampOption.map(_.toSystemOffsetDateTime)) + case (statusOption, timestampOption) => + WorkflowArchiveStatusAndEndTimestamp(statusOption, timestampOption.map(_.toSystemOffsetDateTime)) } - } def queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], workflowEndTimestampThreshold: OffsetDateTime, - batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = - metadataDatabaseInterface.queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses, workflowEndTimestampThreshold.toSystemTimestamp, batchSize) + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = + metadataDatabaseInterface.queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp( + workflowStatuses, + workflowEndTimestampThreshold.toSystemTimestamp, + batchSize + ) def countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], - workflowEndTimestampThreshold: OffsetDateTime)(implicit ec: ExecutionContext): Future[Int] = - metadataDatabaseInterface.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses, workflowEndTimestampThreshold.toSystemTimestamp) + workflowEndTimestampThreshold: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Int] = + metadataDatabaseInterface.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp( + workflowStatuses, + workflowEndTimestampThreshold.toSystemTimestamp + ) - def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: OffsetDateTime)(implicit ec: ExecutionContext): Future[Int] = - metadataDatabaseInterface.countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold.toSystemTimestamp) + def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp( + workflowEndTimestampThreshold: OffsetDateTime + )(implicit ec: ExecutionContext): Future[Int] = + metadataDatabaseInterface.countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp( + workflowEndTimestampThreshold.toSystemTimestamp + ) - def getMetadataTableSizeInformation()(implicit ec: ExecutionContext): Future[Option[InformationSchemaEntry]] = metadataDatabaseInterface.getMetadataTableSizeInformation() + def getMetadataTableSizeInformation()(implicit ec: ExecutionContext): Future[Option[InformationSchemaEntry]] = + metadataDatabaseInterface.getMetadataTableSizeInformation() - def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: WorkflowId)(implicit ec: ExecutionContext): Future[Vector[MetadataEntry]] = + def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: WorkflowId)(implicit + ec: ExecutionContext + ): Future[Vector[MetadataEntry]] = metadataDatabaseInterface.getFailedJobsMetadataWithWorkflowId(rootWorkflowId.toString) } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala index d320adb42ae..5b45d7bb1e0 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala @@ -1,7 +1,16 @@ package cromwell.services.metadata.impl import akka.actor.SupervisorStrategy.{Decider, Directive, Escalate, Resume} -import akka.actor.{Actor, ActorContext, ActorInitializationException, ActorLogging, ActorRef, Cancellable, OneForOneStrategy, Props} +import akka.actor.{ + Actor, + ActorContext, + ActorInitializationException, + ActorLogging, + ActorRef, + Cancellable, + OneForOneStrategy, + Props +} import akka.routing.Listen import cats.data.NonEmptyList import com.typesafe.config.Config @@ -15,7 +24,11 @@ import cromwell.services.metadata.MetadataArchiveStatus import cromwell.services.metadata.MetadataService._ import cromwell.services.metadata.impl.MetadataDatabaseAccess.WorkflowArchiveStatusAndEndTimestamp import cromwell.services.metadata.impl.MetadataStatisticsRecorder.MetadataStatisticsRecorderSettings -import cromwell.services.metadata.impl.MetadataSummaryRefreshActor.{MetadataSummaryFailure, MetadataSummarySuccess, SummarizeMetadata} +import cromwell.services.metadata.impl.MetadataSummaryRefreshActor.{ + MetadataSummaryFailure, + MetadataSummarySuccess, + SummarizeMetadata +} import cromwell.services.metadata.impl.archiver.{ArchiveMetadataConfig, ArchiveMetadataSchedulerActor} import cromwell.services.metadata.impl.builder.MetadataBuilderActor import cromwell.services.metadata.impl.deleter.{DeleteMetadataActor, DeleteMetadataConfig} @@ -30,11 +43,12 @@ import scala.util.{Failure, Success} object MetadataServiceActor { val MetadataInstrumentationPrefix = NonEmptyList.of("metadata") - def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = Props(MetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor)).withDispatcher(ServiceDispatcher) + def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = + Props(MetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor)).withDispatcher(ServiceDispatcher) } case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends Actor + extends Actor with ActorLogging with MetadataDatabaseAccess with MetadataServicesStore @@ -49,7 +63,10 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser override val supervisorStrategy = new OneForOneStrategy()(decider) { override def logFailure(context: ActorContext, child: ActorRef, cause: Throwable, decision: Directive) = { val childName = if (child == readActor) "Read" else "Write" - log.error(cause, s"The $childName Metadata Actor died unexpectedly, metadata events might have been lost. Restarting it...") + log.error( + cause, + s"The $childName Metadata Actor died unexpectedly, metadata events might have been lost. Restarting it..." + ) } } @@ -65,12 +82,14 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser private val metadataReadRowNumberSafetyThreshold: Int = serviceConfig.getOrElse[Int]("metadata-read-row-number-safety-threshold", 1000000) - private val metadataTableMetricsInterval: Option[FiniteDuration] = serviceConfig.getAs[FiniteDuration]("metadata-table-metrics-interval") + private val metadataTableMetricsInterval: Option[FiniteDuration] = + serviceConfig.getAs[FiniteDuration]("metadata-table-metrics-interval") - private val metadataTableMetricsPath: NonEmptyList[String] = MetadataServiceActor.MetadataInstrumentationPrefix :+ "table" + private val metadataTableMetricsPath: NonEmptyList[String] = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "table" private val dataFreeMetricsPath: NonEmptyList[String] = metadataTableMetricsPath :+ "data_free" private val dataLengthMetricsPath: NonEmptyList[String] = metadataTableMetricsPath :+ "data_length" - private val indexLengthMetricsPath: NonEmptyList[String] = metadataTableMetricsPath :+ "index_length" + private val indexLengthMetricsPath: NonEmptyList[String] = metadataTableMetricsPath :+ "index_length" def readMetadataWorkerActorProps(): Props = ReadDatabaseMetadataWorkerActor @@ -81,15 +100,28 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser .props(readMetadataWorkerActorProps, metadataReadRowNumberSafetyThreshold) .withDispatcher(ServiceDispatcher) - val readActor = context.actorOf(ReadMetadataRegulatorActor.props(metadataBuilderActorProps, readMetadataWorkerActorProps), "ClassicMSA-ReadMetadataRegulatorActor") + val readActor = context.actorOf( + ReadMetadataRegulatorActor.props(metadataBuilderActorProps, readMetadataWorkerActorProps), + "ClassicMSA-ReadMetadataRegulatorActor" + ) val dbFlushRate = serviceConfig.getOrElse("db-flush-rate", 5.seconds) val dbBatchSize = serviceConfig.getOrElse("db-batch-size", 200) - val metadataWriteStatisticsConfig = MetadataStatisticsRecorderSettings(serviceConfig.as[Option[Config]]("metadata-write-statistics")) - val writeActor = context.actorOf(WriteMetadataActor.props(dbBatchSize, dbFlushRate, serviceRegistryActor, LoadConfig.MetadataWriteThreshold, metadataWriteStatisticsConfig), "WriteMetadataActor") + val metadataWriteStatisticsConfig = MetadataStatisticsRecorderSettings( + serviceConfig.as[Option[Config]]("metadata-write-statistics") + ) + val writeActor = context.actorOf( + WriteMetadataActor.props(dbBatchSize, + dbFlushRate, + serviceRegistryActor, + LoadConfig.MetadataWriteThreshold, + metadataWriteStatisticsConfig + ), + "WriteMetadataActor" + ) implicit val ec = context.dispatcher - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection private var summaryRefreshCancellable: Option[Cancellable] = None private val summaryActor: Option[ActorRef] = buildSummaryActor @@ -101,13 +133,15 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser private val deleteMetadataActor: Option[ActorRef] = buildDeleteMetadataActor // if `metadata-table-size-metrics-interval` is specified, schedule sending size metrics at that interval - metadataTableMetricsInterval.map(context.system.scheduler.schedule(1.minute, _, self, SendMetadataTableSizeMetrics)(context.dispatcher, self)) + metadataTableMetricsInterval.map( + context.system.scheduler.schedule(1.minute, _, self, SendMetadataTableSizeMetrics)(context.dispatcher, self) + ) - private def scheduleSummary(): Unit = { + private def scheduleSummary(): Unit = metadataSummaryRefreshInterval foreach { interval => - summaryRefreshCancellable = Option(context.system.scheduler.scheduleOnce(interval, self, RefreshSummary)(context.dispatcher, self)) + summaryRefreshCancellable = + Option(context.system.scheduler.scheduleOnce(interval, self, RefreshSummary)(context.dispatcher, self)) } - } override def postStop(): Unit = { summaryRefreshCancellable foreach { _.cancel() } @@ -115,8 +149,8 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser } private def buildSummaryActor: Option[ActorRef] = { - val actor = metadataSummaryRefreshInterval map { - _ => context.actorOf(MetadataSummaryRefreshActor.props(serviceRegistryActor), "metadata-summary-actor") + val actor = metadataSummaryRefreshInterval map { _ => + context.actorOf(MetadataSummaryRefreshActor.props(serviceRegistryActor), "metadata-summary-actor") } val message = metadataSummaryRefreshInterval match { case Some(interval) => s"Metadata summary refreshing every $interval." @@ -126,60 +160,75 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser actor } - private def buildArchiveMetadataActor: Option[ActorRef] = { + private def buildArchiveMetadataActor: Option[ActorRef] = if (serviceConfig.hasPath("archive-metadata")) { log.info("Building metadata archiver from config") ArchiveMetadataConfig.parseConfig(serviceConfig.getConfig("archive-metadata"))(context.system) match { - case Right(config) => Option(context.actorOf(ArchiveMetadataSchedulerActor.props(config, serviceRegistryActor), "archive-metadata-scheduler")) - case Left(errorList) => throw AggregatedMessageException("Failed to parse the archive-metadata config", errorList.toList) + case Right(config) => + Option( + context.actorOf(ArchiveMetadataSchedulerActor.props(config, serviceRegistryActor), + "archive-metadata-scheduler" + ) + ) + case Left(errorList) => + throw AggregatedMessageException("Failed to parse the archive-metadata config", errorList.toList) } } else { log.info("No metadata archiver defined in config") None } - } - private def buildDeleteMetadataActor: Option[ActorRef] = { + private def buildDeleteMetadataActor: Option[ActorRef] = if (serviceConfig.hasPath("delete-metadata")) { log.info("Building metadata deleter from config") DeleteMetadataConfig.parseConfig(serviceConfig.getConfig("delete-metadata")) match { - case Right(config) => Option(context.actorOf(DeleteMetadataActor.props(config, serviceRegistryActor), "delete-metadata-actor")) - case Left(errorList) => throw AggregatedMessageException("Failed to parse the archive-metadata config", errorList.toList) + case Right(config) => + Option(context.actorOf(DeleteMetadataActor.props(config, serviceRegistryActor), "delete-metadata-actor")) + case Left(errorList) => + throw AggregatedMessageException("Failed to parse the archive-metadata config", errorList.toList) } } else { log.info("No metadata deleter defined in config") None } - } - private def validateWorkflowIdInMetadata(possibleWorkflowId: WorkflowId, sender: ActorRef): Unit = { + private def validateWorkflowIdInMetadata(possibleWorkflowId: WorkflowId, sender: ActorRef): Unit = workflowWithIdExistsInMetadata(possibleWorkflowId.toString) onComplete { case Success(true) => sender ! RecognizedWorkflowId case Success(false) => sender ! UnrecognizedWorkflowId - case Failure(e) => sender ! FailedToCheckWorkflowId(new RuntimeException(s"Failed lookup attempt for workflow ID $possibleWorkflowId", e)) + case Failure(e) => + sender ! FailedToCheckWorkflowId( + new RuntimeException(s"Failed lookup attempt for workflow ID $possibleWorkflowId", e) + ) } - } - private def validateWorkflowIdInMetadataSummaries(possibleWorkflowId: WorkflowId, sender: ActorRef): Unit = { + private def validateWorkflowIdInMetadataSummaries(possibleWorkflowId: WorkflowId, sender: ActorRef): Unit = workflowWithIdExistsInMetadataSummaries(possibleWorkflowId.toString) onComplete { case Success(true) => sender ! RecognizedWorkflowId case Success(false) => sender ! UnrecognizedWorkflowId - case Failure(e) => sender ! FailedToCheckWorkflowId(new RuntimeException(s"Failed lookup attempt for workflow ID $possibleWorkflowId", e)) + case Failure(e) => + sender ! FailedToCheckWorkflowId( + new RuntimeException(s"Failed lookup attempt for workflow ID $possibleWorkflowId", e) + ) } - } - private def fetchWorkflowMetadataArchiveStatusAndEndTime(workflowId: WorkflowId, sender: ActorRef): Unit = { + private def fetchWorkflowMetadataArchiveStatusAndEndTime(workflowId: WorkflowId, sender: ActorRef): Unit = getMetadataArchiveStatusAndEndTime(workflowId) onComplete { case Success(WorkflowArchiveStatusAndEndTimestamp(status, endTime)) => MetadataArchiveStatus.fromDatabaseValue(status).toTry match { case Success(archiveStatus) => sender ! WorkflowMetadataArchivedStatusAndEndTime(archiveStatus, endTime) - case Failure(e) => sender ! FailedToGetArchiveStatusAndEndTime(new RuntimeException(s"Failed to get metadata archive status for workflow ID $workflowId", e)) + case Failure(e) => + sender ! FailedToGetArchiveStatusAndEndTime( + new RuntimeException(s"Failed to get metadata archive status for workflow ID $workflowId", e) + ) } - case Failure(e) => sender ! FailedToGetArchiveStatusAndEndTime(new RuntimeException(s"Failed to get metadata archive status for workflow ID $workflowId", e)) + case Failure(e) => + sender ! FailedToGetArchiveStatusAndEndTime( + new RuntimeException(s"Failed to get metadata archive status for workflow ID $workflowId", e) + ) } - } - private def sendMetadataTableSizeMetrics(): Unit = { + private def sendMetadataTableSizeMetrics(): Unit = getMetadataTableSizeInformation() onComplete { case Success(v) => v foreach { d => @@ -187,9 +236,9 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser sendGauge(indexLengthMetricsPath, d.indexLength) sendGauge(dataFreeMetricsPath, d.dataFree) } - case Failure(e) => log.error(e, s"Error fetching metadata table size metrics. Will try again in $metadataTableMetricsInterval...") + case Failure(e) => + log.error(e, s"Error fetching metadata table size metrics. Will try again in $metadataTableMetricsInterval...") } - } def summarizerReceive: Receive = { case RefreshSummary => summaryActor foreach { _ ! SummarizeMetadata(metadataSummaryRefreshLimit, sender()) } @@ -200,15 +249,18 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser } def receive = summarizerReceive orElse { - case ShutdownCommand => waitForActorsAndShutdown(NonEmptyList.of(writeActor) ++ archiveMetadataActor.toList ++ deleteMetadataActor.toList) + case ShutdownCommand => + waitForActorsAndShutdown(NonEmptyList.of(writeActor) ++ archiveMetadataActor.toList ++ deleteMetadataActor.toList) case SendMetadataTableSizeMetrics => sendMetadataTableSizeMetrics() case action: PutMetadataAction => writeActor forward action case action: PutMetadataActionAndRespond => writeActor forward action // Assume that listen messages are directed to the write metadata actor case listen: Listen => writeActor forward listen case v: ValidateWorkflowIdInMetadata => validateWorkflowIdInMetadata(v.possibleWorkflowId, sender()) - case v: ValidateWorkflowIdInMetadataSummaries => validateWorkflowIdInMetadataSummaries(v.possibleWorkflowId, sender()) - case g: FetchWorkflowMetadataArchiveStatusAndEndTime => fetchWorkflowMetadataArchiveStatusAndEndTime(g.workflowId, sender()) + case v: ValidateWorkflowIdInMetadataSummaries => + validateWorkflowIdInMetadataSummaries(v.possibleWorkflowId, sender()) + case g: FetchWorkflowMetadataArchiveStatusAndEndTime => + fetchWorkflowMetadataArchiveStatusAndEndTime(g.workflowId, sender()) case action: BuildMetadataJsonAction => readActor forward action case streamAction: GetMetadataStreamAction => readActor forward streamAction } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataStatisticsRecorder.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataStatisticsRecorder.scala index 5e9316ac89f..b3b802adfc7 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataStatisticsRecorder.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataStatisticsRecorder.scala @@ -16,35 +16,42 @@ import scala.util.Try object MetadataStatisticsRecorder { final case class HeavyMetadataAlert(workflowId: WorkflowId, count: Long) - final case class WorkflowMetadataWriteStatistics(workflowId: WorkflowId, totalWrites: Long, lastLogged: Long, knownParent: Option[WorkflowId]) + final case class WorkflowMetadataWriteStatistics(workflowId: WorkflowId, + totalWrites: Long, + lastLogged: Long, + knownParent: Option[WorkflowId] + ) sealed trait MetadataStatisticsRecorderSettings case object MetadataStatisticsDisabled extends MetadataStatisticsRecorderSettings final case class MetadataStatisticsEnabled(workflowCacheSize: Long, - metadataAlertInterval: Long, - bundleSubworkflowsIntoParents: Boolean) extends MetadataStatisticsRecorderSettings - - def apply(statisticsRecorderSettings: MetadataStatisticsRecorderSettings): MetadataStatisticsRecorder = statisticsRecorderSettings match { - case MetadataStatisticsEnabled(cacheSize, interval, subworkflowBundling) => new ActiveMetadataStatisticsRecorder(cacheSize, interval, subworkflowBundling) - case MetadataStatisticsDisabled => new NoopMetadataStatisticsRecorder() - } - + metadataAlertInterval: Long, + bundleSubworkflowsIntoParents: Boolean + ) extends MetadataStatisticsRecorderSettings + + def apply(statisticsRecorderSettings: MetadataStatisticsRecorderSettings): MetadataStatisticsRecorder = + statisticsRecorderSettings match { + case MetadataStatisticsEnabled(cacheSize, interval, subworkflowBundling) => + new ActiveMetadataStatisticsRecorder(cacheSize, interval, subworkflowBundling) + case MetadataStatisticsDisabled => new NoopMetadataStatisticsRecorder() + } object MetadataStatisticsRecorderSettings { val defaultCacheSize = 20000L val defaultAlertInterval = 100000L val defaultSubworkflowBundling = true - def apply(configSection: Option[Config]): MetadataStatisticsRecorderSettings = (configSection flatMap { conf: Config => - if (conf.as[Option[Boolean]]("enabled").forall(identity)) { - val cacheSize: Long = conf.getOrElse("cache-size", defaultCacheSize) - val metadataAlertInterval: Long = conf.getOrElse("metadata-row-alert-interval", defaultAlertInterval) - val subworkflowBundling: Boolean = conf.getOrElse("sub-workflow-bundling", defaultSubworkflowBundling) - Option(MetadataStatisticsEnabled(cacheSize, metadataAlertInterval, subworkflowBundling)) - } else None + def apply(configSection: Option[Config]): MetadataStatisticsRecorderSettings = + (configSection flatMap { conf: Config => + if (conf.as[Option[Boolean]]("enabled").forall(identity)) { + val cacheSize: Long = conf.getOrElse("cache-size", defaultCacheSize) + val metadataAlertInterval: Long = conf.getOrElse("metadata-row-alert-interval", defaultAlertInterval) + val subworkflowBundling: Boolean = conf.getOrElse("sub-workflow-bundling", defaultSubworkflowBundling) + Option(MetadataStatisticsEnabled(cacheSize, metadataAlertInterval, subworkflowBundling)) + } else None - }).getOrElse(MetadataStatisticsDisabled) + }).getOrElse(MetadataStatisticsDisabled) } } @@ -57,49 +64,58 @@ final class NoopMetadataStatisticsRecorder extends MetadataStatisticsRecorder { } final class ActiveMetadataStatisticsRecorder(workflowCacheSize: Long = 100000L, // 100,000 - metadataAlertInterval: Long = 100000L, // 100,000 - bundleSubworkflowsIntoParents: Boolean = false - ) extends MetadataStatisticsRecorder { + metadataAlertInterval: Long = 100000L, // 100,000 + bundleSubworkflowsIntoParents: Boolean = false +) extends MetadataStatisticsRecorder { // Statistics for each workflow - private val metadataWriteStatisticsCache = CacheBuilder.newBuilder() + private val metadataWriteStatisticsCache = CacheBuilder + .newBuilder() .expireAfterAccess(JDuration.ofSeconds(4.hours.toSeconds)) .maximumSize(workflowCacheSize) .build[WorkflowId, WorkflowMetadataWriteStatistics]() - def writeStatisticsLoader(workflowId: WorkflowId): Callable[WorkflowMetadataWriteStatistics] = () => WorkflowMetadataWriteStatistics(workflowId, 0L, 0L, None) + def writeStatisticsLoader(workflowId: WorkflowId): Callable[WorkflowMetadataWriteStatistics] = () => + WorkflowMetadataWriteStatistics(workflowId, 0L, 0L, None) - def processEventsAndGenerateAlerts(putEvents: Iterable[MetadataEvent]): Vector[HeavyMetadataAlert] = { - putEvents.groupBy(_.key.workflowId).toVector.flatMap { case (id, list) => processEventsForWorkflow(id, list)} - } + def processEventsAndGenerateAlerts(putEvents: Iterable[MetadataEvent]): Vector[HeavyMetadataAlert] = + putEvents.groupBy(_.key.workflowId).toVector.flatMap { case (id, list) => processEventsForWorkflow(id, list) } - private def processEventsForWorkflow(workflowId: WorkflowId, events: Iterable[MetadataEvent]): Vector[HeavyMetadataAlert] = { + private def processEventsForWorkflow(workflowId: WorkflowId, + events: Iterable[MetadataEvent] + ): Vector[HeavyMetadataAlert] = { val workflowWriteStats = metadataWriteStatisticsCache.get(workflowId, writeStatisticsLoader(workflowId)) // Find a new parent record if one exists and update the statistics to record it: - val parentallyUpdatedStatistics = { + val parentallyUpdatedStatistics = if (!bundleSubworkflowsIntoParents) workflowWriteStats else if (workflowWriteStats.knownParent.isDefined) workflowWriteStats else { val newParentId = events.collectFirst { - case MetadataEvent(MetadataKey(_, None, "parentWorkflowId"), Some(MetadataValue(value, MetadataString)), _) => Try(WorkflowId(UUID.fromString(value))).toOption + case MetadataEvent(MetadataKey(_, None, "parentWorkflowId"), Some(MetadataValue(value, MetadataString)), _) => + Try(WorkflowId(UUID.fromString(value))).toOption }.flatten workflowWriteStats.copy(knownParent = newParentId) } - } updateStatisticsCacheAndGenerateAlerts(parentallyUpdatedStatistics, events.size.longValue) } - private def updateStatisticsCacheAndGenerateAlerts(workflowWriteStats: WorkflowMetadataWriteStatistics, count: Long): Vector[HeavyMetadataAlert] = { + private def updateStatisticsCacheAndGenerateAlerts(workflowWriteStats: WorkflowMetadataWriteStatistics, + count: Long + ): Vector[HeavyMetadataAlert] = { val writesForWorkflow = workflowWriteStats.totalWrites + count val myAlerts = if (writesForWorkflow >= workflowWriteStats.lastLogged + metadataAlertInterval) { - metadataWriteStatisticsCache.put(workflowWriteStats.workflowId, workflowWriteStats.copy(totalWrites = writesForWorkflow, lastLogged = writesForWorkflow)) + metadataWriteStatisticsCache.put( + workflowWriteStats.workflowId, + workflowWriteStats.copy(totalWrites = writesForWorkflow, lastLogged = writesForWorkflow) + ) Vector(HeavyMetadataAlert(workflowWriteStats.workflowId, writesForWorkflow)) - } - else { - metadataWriteStatisticsCache.put(workflowWriteStats.workflowId, workflowWriteStats.copy(totalWrites = writesForWorkflow)) + } else { + metadataWriteStatisticsCache.put(workflowWriteStats.workflowId, + workflowWriteStats.copy(totalWrites = writesForWorkflow) + ) Vector.empty } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataSummaryRefreshActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataSummaryRefreshActor.scala index 5a80b08e4cc..64fa240388d 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataSummaryRefreshActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataSummaryRefreshActor.scala @@ -1,6 +1,5 @@ package cromwell.services.metadata.impl - import akka.actor.{ActorRef, LoggingFSM, Props} import cats.data.NonEmptyList import cromwell.core.Dispatcher.ServiceDispatcher @@ -25,7 +24,8 @@ object MetadataSummaryRefreshActor { case object MetadataSummarySuccess extends MetadataSummaryActorMessage final case class MetadataSummaryFailure(t: Throwable) extends MetadataSummaryActorMessage - def props(serviceRegistryActor: ActorRef) = Props(new MetadataSummaryRefreshActor(serviceRegistryActor)).withDispatcher(ServiceDispatcher) + def props(serviceRegistryActor: ActorRef) = + Props(new MetadataSummaryRefreshActor(serviceRegistryActor)).withDispatcher(ServiceDispatcher) sealed trait SummaryRefreshState case object WaitingForRequest extends SummaryRefreshState @@ -36,16 +36,17 @@ object MetadataSummaryRefreshActor { } class MetadataSummaryRefreshActor(override val serviceRegistryActor: ActorRef) - extends LoggingFSM[SummaryRefreshState, SummaryRefreshData.type] + extends LoggingFSM[SummaryRefreshState, SummaryRefreshData.type] with MetadataDatabaseAccess with MetadataServicesStore with CromwellInstrumentation { implicit val ec = context.dispatcher - private val summaryMetricsGapsPath: NonEmptyList[String] = MetadataServiceActor.MetadataInstrumentationPrefix :+ "summarizer" :+ "gap" - private val summaryMetricsProcessedPath: NonEmptyList[String] = MetadataServiceActor.MetadataInstrumentationPrefix :+ "summarizer" :+ "processed" - + private val summaryMetricsGapsPath: NonEmptyList[String] = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "summarizer" :+ "gap" + private val summaryMetricsProcessedPath: NonEmptyList[String] = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "summarizer" :+ "processed" val increasingGapPath = summaryMetricsGapsPath :+ "increasing" val decreasingGapPath = summaryMetricsGapsPath :+ "decreasing" @@ -56,38 +57,37 @@ class MetadataSummaryRefreshActor(override val serviceRegistryActor: ActorRef) private val instrumentationPrefix: Option[String] = InstrumentationPrefixes.ServicesPrefix private val summarizerQueueIncreasingGapMetricActor = - context.actorOf(AsynchronousThrottlingGaugeMetricActor.props(increasingGapPath, instrumentationPrefix, serviceRegistryActor)) + context.actorOf( + AsynchronousThrottlingGaugeMetricActor.props(increasingGapPath, instrumentationPrefix, serviceRegistryActor) + ) startWith(WaitingForRequest, SummaryRefreshData) - when (WaitingForRequest) { - case Event(SummarizeMetadata(limit, respondTo), _) => - refreshWorkflowMetadataSummaries(limit) onComplete { - case Success(summaryResult) => - summarizerQueueIncreasingGapMetricActor ! CalculateMetricValue { ec => getSummaryQueueSize()(ec) } - sendGauge(decreasingGapPath, summaryResult.decreasingGap, instrumentationPrefix) - - count(increasingProcessedPath, summaryResult.rowsProcessedIncreasing, instrumentationPrefix) - count(decreasingProcessedPath, summaryResult.rowsProcessedDecreasing, instrumentationPrefix) - - respondTo ! MetadataSummarySuccess - self ! MetadataSummaryComplete - case Failure(t) => - log.error(t, "Failed to summarize metadata") - respondTo ! MetadataSummaryFailure(t) - self ! MetadataSummaryComplete - } - goto(SummarizingMetadata) + when(WaitingForRequest) { case Event(SummarizeMetadata(limit, respondTo), _) => + refreshWorkflowMetadataSummaries(limit) onComplete { + case Success(summaryResult) => + summarizerQueueIncreasingGapMetricActor ! CalculateMetricValue(ec => getSummaryQueueSize()(ec)) + sendGauge(decreasingGapPath, summaryResult.decreasingGap, instrumentationPrefix) + + count(increasingProcessedPath, summaryResult.rowsProcessedIncreasing, instrumentationPrefix) + count(decreasingProcessedPath, summaryResult.rowsProcessedDecreasing, instrumentationPrefix) + + respondTo ! MetadataSummarySuccess + self ! MetadataSummaryComplete + case Failure(t) => + log.error(t, "Failed to summarize metadata") + respondTo ! MetadataSummaryFailure(t) + self ! MetadataSummaryComplete + } + goto(SummarizingMetadata) } - when (SummarizingMetadata) { - case Event(MetadataSummaryComplete, _) => - goto(WaitingForRequest) using SummaryRefreshData + when(SummarizingMetadata) { case Event(MetadataSummaryComplete, _) => + goto(WaitingForRequest) using SummaryRefreshData } - whenUnhandled { - case Event(wut, _) => - log.warning("Unrecognized or unexpected message while in state '{}': {}", stateName, wut) - stay() + whenUnhandled { case Event(wut, _) => + log.warning("Unrecognized or unexpected message while in state '{}': {}", stateName, wut) + stay() } } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/ReadDatabaseMetadataWorkerActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/ReadDatabaseMetadataWorkerActor.scala index 165c1bc2f14..25240577e37 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/ReadDatabaseMetadataWorkerActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/ReadDatabaseMetadataWorkerActor.scala @@ -14,11 +14,12 @@ import scala.util.Try object ReadDatabaseMetadataWorkerActor { def props(metadataReadTimeout: Duration, metadataReadRowNumberSafetyThreshold: Int) = - Props(new ReadDatabaseMetadataWorkerActor(metadataReadTimeout, metadataReadRowNumberSafetyThreshold)).withDispatcher(ServiceDispatcher) + Props(new ReadDatabaseMetadataWorkerActor(metadataReadTimeout, metadataReadRowNumberSafetyThreshold)) + .withDispatcher(ServiceDispatcher) } class ReadDatabaseMetadataWorkerActor(metadataReadTimeout: Duration, metadataReadRowNumberSafetyThreshold: Int) - extends Actor + extends Actor with ActorLogging with MetadataDatabaseAccess with MetadataServicesStore { @@ -26,34 +27,41 @@ class ReadDatabaseMetadataWorkerActor(metadataReadTimeout: Duration, metadataRea implicit val ec = context.dispatcher def receive = { - case FetchFailedJobsMetadataWithWorkflowId(w: WorkflowId) => { + case FetchFailedJobsMetadataWithWorkflowId(w: WorkflowId) => evaluateRespondAndStop(sender(), getFailedJobs(w)) - } case GetMetadataAction(query: MetadataQuery, checkTotalMetadataRowNumberBeforeQuerying: Boolean) => evaluateRespondAndStop(sender(), getMetadata(query, checkTotalMetadataRowNumberBeforeQuerying)) case GetMetadataStreamAction(workflowId) => evaluateRespondAndStop(sender(), Future.fromTry(getMetadataStream(workflowId))) case GetStatus(workflowId) => evaluateRespondAndStop(sender(), getStatus(workflowId)) case GetLabels(workflowId) => evaluateRespondAndStop(sender(), queryLabelsAndRespond(workflowId)) - case GetRootAndSubworkflowLabels(rootWorkflowId: WorkflowId) => evaluateRespondAndStop(sender(), queryRootAndSubworkflowLabelsAndRespond(rootWorkflowId)) + case GetRootAndSubworkflowLabels(rootWorkflowId: WorkflowId) => + evaluateRespondAndStop(sender(), queryRootAndSubworkflowLabelsAndRespond(rootWorkflowId)) case GetLogs(workflowId) => evaluateRespondAndStop(sender(), queryLogsAndRespond(workflowId)) - case QueryForWorkflowsMatchingParameters(parameters) => evaluateRespondAndStop(sender(), queryWorkflowsAndRespond(parameters)) + case QueryForWorkflowsMatchingParameters(parameters) => + evaluateRespondAndStop(sender(), queryWorkflowsAndRespond(parameters)) case WorkflowOutputs(id) => evaluateRespondAndStop(sender(), queryWorkflowOutputsAndRespond(id)) - case unexpected => log.warning(s"Programmer Error! Unexpected message received by ${getClass.getSimpleName}: $unexpected") + case unexpected => + log.warning(s"Programmer Error! Unexpected message received by ${getClass.getSimpleName}: $unexpected") } private def evaluateRespondAndStop(sndr: ActorRef, f: Future[Any]) = { f map { result => sndr ! result - } andThen { - case _ => self ! PoisonPill - } recover { - case t => log.error(t, s"Programmer Error! Unexpected error fall-through to 'evaluateRespondAndStop in ${getClass.getSimpleName}'") + } andThen { case _ => + self ! PoisonPill + } recover { case t => + log.error( + t, + s"Programmer Error! Unexpected error fall-through to 'evaluateRespondAndStop in ${getClass.getSimpleName}'" + ) } () } - private def getMetadata(query: MetadataQuery, checkResultSizeBeforeQuerying: Boolean): Future[MetadataServiceResponse] = { + private def getMetadata(query: MetadataQuery, + checkResultSizeBeforeQuerying: Boolean + ): Future[MetadataServiceResponse] = if (checkResultSizeBeforeQuerying) { getMetadataReadRowCount(query, metadataReadTimeout) flatMap { count => if (count > metadataReadRowNumberSafetyThreshold) { @@ -68,94 +76,84 @@ class ReadDatabaseMetadataWorkerActor(metadataReadTimeout: Duration, metadataRea } else { queryMetadata(query) } - } - private def getMetadataStream(workflowId: WorkflowId): Try[MetadataServiceResponse] = { - metadataEventsStream(workflowId) map { - s => MetadataLookupStreamSuccess(workflowId, s) - } recover { - case t => MetadataLookupStreamFailed(workflowId, t) + private def getMetadataStream(workflowId: WorkflowId): Try[MetadataServiceResponse] = + metadataEventsStream(workflowId) map { s => + MetadataLookupStreamSuccess(workflowId, s) + } recover { case t => + MetadataLookupStreamFailed(workflowId, t) } - } private def queryMetadata(query: MetadataQuery): Future[MetadataServiceResponse] = - queryMetadataEvents(query, metadataReadTimeout) map { - m => MetadataLookupResponse(query, m) + queryMetadataEvents(query, metadataReadTimeout) map { m => + MetadataLookupResponse(query, m) } recover { case _: SQLTimeoutException => MetadataLookupFailedTimeoutResponse(query) case t => MetadataServiceKeyLookupFailed(query, t) } private def getFailedJobs(workflowId: WorkflowId): Future[MetadataServiceResponse] = { - val futureEvents: Future[Seq[MetadataEvent]] = getFailedJobsMetadataWithWorkflowId(workflowId) map metadataToMetadataEvents(workflowId) - futureEvents map { - m => FetchFailedJobsMetadataLookupResponse(m) + val futureEvents: Future[Seq[MetadataEvent]] = + getFailedJobsMetadataWithWorkflowId(workflowId) map metadataToMetadataEvents(workflowId) + futureEvents map { m => + FetchFailedJobsMetadataLookupResponse(m) } recover { case _: SQLTimeoutException => FetchFailedTasksTimeoutResponse(workflowId) case t => FetchFailedJobsMetadataLookupFailed(workflowId, t) } } - private def getStatus(id: WorkflowId): Future[MetadataServiceResponse] = { - + private def getStatus(id: WorkflowId): Future[MetadataServiceResponse] = getWorkflowStatus(id) map { case Some(s) => StatusLookupResponse(id, s) // There's a workflow existence check at the API layer. If the request has made it this far in the system // then the workflow exists but it must not have generated a status yet. case None => StatusLookupResponse(id, WorkflowSubmitted) - } recover { - case t => StatusLookupFailed(id, t) + } recover { case t => + StatusLookupFailed(id, t) } - } - private def queryLabelsAndRespond(id: WorkflowId): Future[MetadataServiceResponse] = { - - getWorkflowLabels(id) map { - ls => LabelLookupResponse(id, ls) - } recover { - case t => LabelLookupFailed(id, t) + private def queryLabelsAndRespond(id: WorkflowId): Future[MetadataServiceResponse] = + getWorkflowLabels(id) map { ls => + LabelLookupResponse(id, ls) + } recover { case t => + LabelLookupFailed(id, t) } - } - - private def queryRootAndSubworkflowLabelsAndRespond(rootWorkflowId: WorkflowId): Future[MetadataServiceResponse] = { + private def queryRootAndSubworkflowLabelsAndRespond(rootWorkflowId: WorkflowId): Future[MetadataServiceResponse] = getRootAndSubworkflowLabels(rootWorkflowId) map { labels => RootAndSubworkflowLabelsLookupResponse(rootWorkflowId, labels) - } recover { - case t => RootAndSubworkflowLabelsLookupFailed(rootWorkflowId, t) + } recover { case t => + RootAndSubworkflowLabelsLookupFailed(rootWorkflowId, t) } - } private def queryWorkflowsAndRespond(rawParameters: Seq[(String, String)]): Future[MetadataServiceResponse] = { - def queryWorkflows: Future[(WorkflowQueryResponse, Option[QueryMetadata])] = { + def queryWorkflows: Future[(WorkflowQueryResponse, Option[QueryMetadata])] = for { - // Future/Try to wrap the exception that might be thrown from WorkflowQueryParameters.apply. + // Future/Try to wrap the exception that might be thrown from WorkflowQueryParameters.apply. parameters <- Future.fromTry(Try(WorkflowQueryParameters(rawParameters))) response <- queryWorkflowSummaries(parameters) } yield response - } - queryWorkflows map { - case (response, metadata) => WorkflowQuerySuccess(response, metadata) - } recover { - case t => WorkflowQueryFailure(t) + queryWorkflows map { case (response, metadata) => + WorkflowQuerySuccess(response, metadata) + } recover { case t => + WorkflowQueryFailure(t) } } - private def queryWorkflowOutputsAndRespond(id: WorkflowId): Future[MetadataServiceResponse] = { - queryWorkflowOutputs(id, metadataReadTimeout) map { - o => WorkflowOutputsResponse(id, o) - } recover { - case t => WorkflowOutputsFailure(id, t) + private def queryWorkflowOutputsAndRespond(id: WorkflowId): Future[MetadataServiceResponse] = + queryWorkflowOutputs(id, metadataReadTimeout) map { o => + WorkflowOutputsResponse(id, o) + } recover { case t => + WorkflowOutputsFailure(id, t) } - } - private def queryLogsAndRespond(id: WorkflowId): Future[MetadataServiceResponse] = { - queryLogs(id, metadataReadTimeout) map { - s => LogsResponse(id, s) - } recover { - case t => LogsFailure(id, t) + private def queryLogsAndRespond(id: WorkflowId): Future[MetadataServiceResponse] = + queryLogs(id, metadataReadTimeout) map { s => + LogsResponse(id, s) + } recover { case t => + LogsFailure(id, t) } - } } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala index 869ae79e10f..45ddb1bf272 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala @@ -5,13 +5,25 @@ import akka.actor.{Actor, ActorLogging, ActorRef, Props} import cromwell.core.Dispatcher.ApiDispatcher import cromwell.services.MetadataJsonResponse import cromwell.services.metadata.MetadataService -import cromwell.services.metadata.MetadataService.{BuildMetadataJsonAction, BuildWorkflowMetadataJsonAction, FetchFailedJobsMetadataWithWorkflowId, GetMetadataStreamAction, MetadataLookupStreamSuccess, MetadataQueryResponse, MetadataServiceAction, MetadataServiceResponse, RootAndSubworkflowLabelsLookupResponse} +import cromwell.services.metadata.MetadataService.{ + BuildMetadataJsonAction, + BuildWorkflowMetadataJsonAction, + FetchFailedJobsMetadataWithWorkflowId, + GetMetadataStreamAction, + MetadataLookupStreamSuccess, + MetadataQueryResponse, + MetadataServiceAction, + MetadataServiceResponse, + RootAndSubworkflowLabelsLookupResponse +} import cromwell.services.metadata.impl.ReadMetadataRegulatorActor.PropsMaker import cromwell.services.metadata.impl.builder.MetadataBuilderActor import scala.collection.mutable -class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMetadataWorkerProps: PropsMaker) extends Actor with ActorLogging { +class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMetadataWorkerProps: PropsMaker) + extends Actor + with ActorLogging { // This actor tracks all requests coming in from the API service and spins up new builders as needed to service them. // If the processing of an identical request is already in flight the requester will be added to a set of requesters // to notify when the response from the first request becomes available. @@ -31,7 +43,10 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta val currentRequesters = apiRequests.getOrElse(fetchFailedJobsMetadataAction, Set.empty) apiRequests.put(fetchFailedJobsMetadataAction, currentRequesters + sender()) if (currentRequesters.isEmpty) { - val builderActor = context.actorOf(metadataBuilderActorProps().withDispatcher(ApiDispatcher), MetadataBuilderActor.uniqueActorName(fetchFailedJobsMetadataAction.workflowId.toString)) + val builderActor = context.actorOf( + metadataBuilderActorProps().withDispatcher(ApiDispatcher), + MetadataBuilderActor.uniqueActorName(fetchFailedJobsMetadataAction.workflowId.toString) + ) builderRequests.put(builderActor, fetchFailedJobsMetadataAction) builderActor ! fetchFailedJobsMetadataAction } @@ -39,7 +54,10 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta val currentRequesters = apiRequests.getOrElse(singleWorkflowAction, Set.empty) apiRequests.put(singleWorkflowAction, currentRequesters + sender()) if (currentRequesters.isEmpty) { - val builderActor = context.actorOf(metadataBuilderActorProps().withDispatcher(ApiDispatcher), MetadataBuilderActor.uniqueActorName(singleWorkflowAction.workflowId.toString)) + val builderActor = context.actorOf( + metadataBuilderActorProps().withDispatcher(ApiDispatcher), + MetadataBuilderActor.uniqueActorName(singleWorkflowAction.workflowId.toString) + ) builderRequests.put(builderActor, singleWorkflowAction) builderActor ! singleWorkflowAction } @@ -47,7 +65,9 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta val currentRequesters = apiRequests.getOrElse(crossWorkflowAction, Set.empty) apiRequests.put(crossWorkflowAction, currentRequesters + sender()) if (currentRequesters.isEmpty) { - val readMetadataActor = context.actorOf(readMetadataWorkerProps.apply().withDispatcher(ApiDispatcher), s"MetadataQueryWorker-${UUID.randomUUID()}") + val readMetadataActor = context.actorOf(readMetadataWorkerProps.apply().withDispatcher(ApiDispatcher), + s"MetadataQueryWorker-${UUID.randomUUID()}" + ) builderRequests.put(readMetadataActor, crossWorkflowAction) readMetadataActor ! crossWorkflowAction } @@ -55,14 +75,17 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta case streamRequest: GetMetadataStreamAction => val currentRequesters = apiRequests.getOrElse(streamRequest, Set.empty) apiRequests.put(streamRequest, currentRequesters + sender()) - if(currentRequesters.isEmpty) { - val readMetadataActor = context.actorOf(readMetadataWorkerProps.apply().withDispatcher(ApiDispatcher), s"MetadataQueryWorker-${UUID.randomUUID()}") + if (currentRequesters.isEmpty) { + val readMetadataActor = context.actorOf(readMetadataWorkerProps.apply().withDispatcher(ApiDispatcher), + s"MetadataQueryWorker-${UUID.randomUUID()}" + ) builderRequests.put(readMetadataActor, streamRequest) readMetadataActor ! streamRequest } case serviceResponse: MetadataServiceResponse => serviceResponse match { - case response @ (_: MetadataJsonResponse | _: MetadataQueryResponse | _: RootAndSubworkflowLabelsLookupResponse | _: MetadataLookupStreamSuccess) => + case response @ (_: MetadataJsonResponse | _: MetadataQueryResponse | + _: RootAndSubworkflowLabelsLookupResponse | _: MetadataLookupStreamSuccess) => handleResponseFromMetadataWorker(response) } case other => log.error(s"Programmer Error: Unexpected message $other received from ${sender()}") @@ -75,16 +98,20 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta apiRequests.get(action) match { case Some(requesters) => apiRequests.remove(action) - requesters foreach { _ ! response} + requesters foreach { _ ! response } case None => // unpossible: there had to have been a request that corresponded to this response - log.error(s"Programmer Error: MetadataBuilderRegulatorActor has no registered requesters found for action: $action") + log.error( + s"Programmer Error: MetadataBuilderRegulatorActor has no registered requesters found for action: $action" + ) } builderRequests.remove(sndr) () case None => // unpossible: this actor should know about all the child MetadataBuilderActors it has begotten - log.error(s"Programmer Error: MetadataBuilderRegulatorActor received a metadata response from an unrecognized sender $sndr") + log.error( + s"Programmer Error: MetadataBuilderRegulatorActor received a metadata response from an unrecognized sender $sndr" + ) } } } @@ -92,7 +119,6 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta object ReadMetadataRegulatorActor { type PropsMaker = () => Props - def props(singleWorkflowMetadataBuilderProps: PropsMaker, summarySearcherProps: PropsMaker): Props = { + def props(singleWorkflowMetadataBuilderProps: PropsMaker, summarySearcherProps: PropsMaker): Props = Props(new ReadMetadataRegulatorActor(singleWorkflowMetadataBuilderProps, summarySearcherProps)) - } } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/WriteMetadataActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/WriteMetadataActor.scala index 46af4a28ee7..16bf344ac6f 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/WriteMetadataActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/WriteMetadataActor.scala @@ -14,13 +14,12 @@ import cromwell.services.{EnhancedBatchActor, MetadataServicesStore} import scala.concurrent.duration._ import scala.util.{Failure, Success} - class WriteMetadataActor(override val batchSize: Int, override val flushRate: FiniteDuration, override val serviceRegistryActor: ActorRef, override val threshold: Int, - metadataStatisticsRecorderSettings: MetadataStatisticsRecorderSettings) - extends EnhancedBatchActor[MetadataWriteAction](flushRate, batchSize) + metadataStatisticsRecorderSettings: MetadataStatisticsRecorderSettings +) extends EnhancedBatchActor[MetadataWriteAction](flushRate, batchSize) with ActorLogging with MetadataDatabaseAccess with MetadataServicesStore { @@ -30,22 +29,23 @@ class WriteMetadataActor(override val batchSize: Int, override def process(e: NonEmptyVector[MetadataWriteAction]) = instrumentedProcess { val empty = (Vector.empty[MetadataEvent], List.empty[(Iterable[MetadataEvent], ActorRef)]) - val (putWithoutResponse, putWithResponse) = e.foldLeft(empty)({ + val (putWithoutResponse, putWithResponse) = e.foldLeft(empty) { case ((putEvents, putAndRespondEvents), action: PutMetadataAction) => (putEvents ++ action.events, putAndRespondEvents) case ((putEvents, putAndRespondEvents), action: PutMetadataActionAndRespond) => (putEvents, putAndRespondEvents :+ (action.events -> action.replyTo)) - }) + } val allPutEvents: Iterable[MetadataEvent] = putWithoutResponse ++ putWithResponse.flatMap(_._1) val dbAction = addMetadataEvents(allPutEvents) - statsRecorder.processEventsAndGenerateAlerts(allPutEvents) foreach(a => log.warning(s"${a.workflowId} has logged a heavy amount of metadata (${a.count} rows)")) + statsRecorder.processEventsAndGenerateAlerts(allPutEvents) foreach (a => + log.warning(s"${a.workflowId} has logged a heavy amount of metadata (${a.count} rows)") + ) dbAction onComplete { case Success(_) => putWithResponse foreach { case (ev, replyTo) => replyTo ! MetadataWriteSuccess(ev) } case Failure(cause) => - val (outOfTries, stillGood) = e.toVector.partition(_.maxAttempts <= 1) handleOutOfTries(outOfTries, cause) @@ -61,8 +61,15 @@ class WriteMetadataActor(override val batchSize: Int, private def enumerateWorkflowWriteFailures(writeActions: Vector[MetadataWriteAction]): String = countActionsByWorkflow(writeActions).map { case (wfid, size) => s"$wfid: $size" }.mkString(", ") - private def handleOutOfTries(writeActions: Vector[MetadataWriteAction], reason: Throwable): Unit = if (writeActions.nonEmpty) { - log.error(reason, "Metadata event writes have failed irretrievably for the following workflows. They will be lost: " + enumerateWorkflowWriteFailures(writeActions)) + private def handleOutOfTries(writeActions: Vector[MetadataWriteAction], reason: Throwable): Unit = if ( + writeActions.nonEmpty + ) { + log.error( + reason, + "Metadata event writes have failed irretrievably for the following workflows. They will be lost: " + enumerateWorkflowWriteFailures( + writeActions + ) + ) writeActions foreach { case PutMetadataActionAndRespond(ev, replyTo, _) => replyTo ! MetadataWriteFailure(reason, ev) @@ -71,7 +78,11 @@ class WriteMetadataActor(override val batchSize: Int, } private def handleEventsToReconsider(writeActions: Vector[MetadataWriteAction]): Unit = if (writeActions.nonEmpty) { - log.warning("Metadata event writes have failed for the following workflows. They will be retried: " + enumerateWorkflowWriteFailures(writeActions)) + log.warning( + "Metadata event writes have failed for the following workflows. They will be retried: " + enumerateWorkflowWriteFailures( + writeActions + ) + ) writeActions foreach { case action: PutMetadataAction => self ! action.copy(maxAttempts = action.maxAttempts - 1) @@ -84,8 +95,8 @@ class WriteMetadataActor(override val batchSize: Int, override protected def weightFunction(command: MetadataWriteAction) = command.size override protected def instrumentationPath = MetadataServiceActor.MetadataInstrumentationPrefix override protected def instrumentationPrefix = InstrumentationPrefixes.ServicesPrefix - def commandToData(snd: ActorRef): PartialFunction[Any, MetadataWriteAction] = { - case command: MetadataWriteAction => command + def commandToData(snd: ActorRef): PartialFunction[Any, MetadataWriteAction] = { case command: MetadataWriteAction => + command } } @@ -95,7 +106,8 @@ object WriteMetadataActor { flushRate: FiniteDuration, serviceRegistryActor: ActorRef, threshold: Int, - statisticsRecorderSettings: MetadataStatisticsRecorderSettings): Props = + statisticsRecorderSettings: MetadataStatisticsRecorderSettings + ): Props = Props(new WriteMetadataActor(dbBatchSize, flushRate, serviceRegistryActor, threshold, statisticsRecorderSettings)) .withDispatcher(ServiceDispatcher) .withMailbox(PriorityMailbox) diff --git a/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataConfig.scala b/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataConfig.scala index 25390d94c39..f719116e60e 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataConfig.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataConfig.scala @@ -21,8 +21,8 @@ final case class ArchiveMetadataConfig(pathBuilders: PathBuilders, archiveDelay: FiniteDuration, instrumentationInterval: FiniteDuration, debugLogging: Boolean, - batchSize: Long) { -} + batchSize: Long +) {} object ArchiveMetadataConfig { def parseConfig(archiveMetadataConfig: Config)(implicit system: ActorSystem): Checked[ArchiveMetadataConfig] = { @@ -33,16 +33,34 @@ object ArchiveMetadataConfig { val defaultBatchSize: Long = 1 for { - _ <- Try(archiveMetadataConfig.getConfig("filesystems.gcs")).toCheckedWithContext("parse archiver 'filesystems.gcs' field from config") + _ <- Try(archiveMetadataConfig.getConfig("filesystems.gcs")) + .toCheckedWithContext("parse archiver 'filesystems.gcs' field from config") pathBuilderFactories <- CromwellFileSystems.instance.factoriesFromConfig(archiveMetadataConfig) - pathBuilders <- Try(Await.result(PathBuilderFactory.instantiatePathBuilders(pathBuilderFactories.values.toList, WorkflowOptions.empty), 60.seconds)) + pathBuilders <- Try( + Await.result( + PathBuilderFactory.instantiatePathBuilders(pathBuilderFactories.values.toList, WorkflowOptions.empty), + 60.seconds + ) + ) .toCheckedWithContext("construct archiver path builders from factories") - bucket <- Try(archiveMetadataConfig.getString("bucket")).toCheckedWithContext("parse Carboniter 'bucket' field from config") - backoffInterval <- Try(archiveMetadataConfig.getOrElse[FiniteDuration]("backoff-interval", defaultMaxInterval)).toChecked + bucket <- Try(archiveMetadataConfig.getString("bucket")) + .toCheckedWithContext("parse Carboniter 'bucket' field from config") + backoffInterval <- Try( + archiveMetadataConfig.getOrElse[FiniteDuration]("backoff-interval", defaultMaxInterval) + ).toChecked archiveDelay <- Try(archiveMetadataConfig.getOrElse("archive-delay", defaultArchiveDelay)).toChecked - instrumentationInterval <- Try(archiveMetadataConfig.getOrElse("instrumentation-interval", defaultInstrumentationInterval)).toChecked + instrumentationInterval <- Try( + archiveMetadataConfig.getOrElse("instrumentation-interval", defaultInstrumentationInterval) + ).toChecked debugLogging <- Try(archiveMetadataConfig.getOrElse("debug-logging", defaultDebugLogging)).toChecked batchSize <- Try(archiveMetadataConfig.getOrElse("batch-size", defaultBatchSize)).toChecked - } yield ArchiveMetadataConfig(pathBuilders, bucket, backoffInterval, archiveDelay, instrumentationInterval, debugLogging, batchSize) + } yield ArchiveMetadataConfig(pathBuilders, + bucket, + backoffInterval, + archiveDelay, + instrumentationInterval, + debugLogging, + batchSize + ) } } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala index 1f5ac1f4460..1908a46438c 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala @@ -21,7 +21,11 @@ import cromwell.database.sql.SqlConverters.{ClobOptionToRawString, TimestampToSy import cromwell.database.sql.tables.{MetadataEntry, WorkflowMetadataSummaryEntry} import cromwell.services.instrumentation.CromwellInstrumentation import cromwell.services.metadata.MetadataArchiveStatus.Archived -import cromwell.services.metadata.MetadataService.{GetMetadataStreamAction, MetadataLookupStreamFailed, MetadataLookupStreamSuccess} +import cromwell.services.metadata.MetadataService.{ + GetMetadataStreamAction, + MetadataLookupStreamFailed, + MetadataLookupStreamSuccess +} import cromwell.services.metadata.impl.archiver.ArchiveMetadataSchedulerActor._ import cromwell.services.metadata.impl.{MetadataDatabaseAccess, MetadataServiceActor} import cromwell.services.{IoActorRequester, MetadataServicesStore} @@ -35,10 +39,9 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} - class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig, - override val serviceRegistryActor: ActorRef) - extends Actor + override val serviceRegistryActor: ActorRef +) extends Actor with ActorLogging with GracefulShutdownHelper with MetadataDatabaseAccess @@ -48,25 +51,31 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig implicit val ec: ExecutionContext = context.dispatcher implicit val askTimeout: Timeout = new Timeout(60.seconds) - lazy val futureAsyncIo: Future[AsyncIo] = requestIoActor() map { ioActor => { + lazy val futureAsyncIo: Future[AsyncIo] = requestIoActor() map { ioActor => log.info(s"IoActor reference received by ${self.path.name}") new AsyncIo(ioActor, DefaultIoCommandBuilder) - } } + } - private val archiverMetricsBasePath: NonEmptyList[String] = MetadataServiceActor.MetadataInstrumentationPrefix :+ "archiver" + private val archiverMetricsBasePath: NonEmptyList[String] = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "archiver" private val rowsProcessedMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "rows_processed" private val rowsPerWorkflowMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "rows_per_workflow" private val bytesProcessedMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "bytes_processed" private val bytesPerWorkflowMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "bytes_per_workflow" - private val workflowsProcessedSuccessMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "workflows_processed" :+ "success" - private val workflowsProcessedFailureMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "workflows_processed" :+ "failure" - private val timeBehindExpectedDelayMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "time_behind_expected_delay" - private val workflowArchiveTotalTimeMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "workflow_archive_total_time" + private val workflowsProcessedSuccessMetricPath: NonEmptyList[String] = + archiverMetricsBasePath :+ "workflows_processed" :+ "success" + private val workflowsProcessedFailureMetricPath: NonEmptyList[String] = + archiverMetricsBasePath :+ "workflows_processed" :+ "failure" + private val timeBehindExpectedDelayMetricPath: NonEmptyList[String] = + archiverMetricsBasePath :+ "time_behind_expected_delay" + private val workflowArchiveTotalTimeMetricPath: NonEmptyList[String] = + archiverMetricsBasePath :+ "workflow_archive_total_time" private val workflowsToArchiveMetricPath: NonEmptyList[String] = archiverMetricsBasePath :+ "workflows_to_archive" private val archiverTimingMetricsBasePath: NonEmptyList[String] = archiverMetricsBasePath :+ "timings" private val archiverStreamTimingMetricsBasePath: NonEmptyList[String] = archiverTimingMetricsBasePath :+ "streaming" - private val TerminalWorkflowStatuses: List[String] = List(WorkflowSucceeded, WorkflowAborted, WorkflowFailed).map(_.toString) + private val TerminalWorkflowStatuses: List[String] = + List(WorkflowSucceeded, WorkflowAborted, WorkflowFailed).map(_.toString) // kick off archiving immediately self ! ArchiveNextWorkflowMessage @@ -80,7 +89,7 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig // These handlers send metrics for most paths even when they're not incremented, so that the metrics // paths are actively receiving data points throughout: - archiveNextWorkflows.onComplete({ + archiveNextWorkflows.onComplete { case Success(cnt) => count(workflowsProcessedSuccessMetricPath, cnt, ServicesPrefix) count(workflowsProcessedFailureMetricPath, 0L, ServicesPrefix) @@ -92,7 +101,10 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig count(rowsProcessedMetricPath, 0L, ServicesPrefix) sendGauge(workflowsToArchiveMetricPath, 0L, ServicesPrefix) sendTiming(workflowArchiveTotalTimeMetricPath, calculateTimeSince(startTime), ServicesPrefix) - if (archiveMetadataConfig.debugLogging) log.info(s"No complete workflows which finished over ${archiveMetadataConfig.archiveDelay} ago remain to be archived. Scheduling next poll in ${archiveMetadataConfig.backoffInterval}.") + if (archiveMetadataConfig.debugLogging) + log.info( + s"No complete workflows which finished over ${archiveMetadataConfig.archiveDelay} ago remain to be archived. Scheduling next poll in ${archiveMetadataConfig.backoffInterval}." + ) scheduleNextWorkflowToArchive() } case Failure(error) => @@ -102,9 +114,12 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig sendTiming(workflowArchiveTotalTimeMetricPath, calculateTimeSince(startTime), ServicesPrefix) log.error(error, s"Error while archiving, will retry.") scheduleNextWorkflowToArchive() - }) - case ShutdownCommand => context.stop(self) // TODO: cancel any streaming that might be happening? - case other => log.info(s"Programmer Error! The ArchiveMetadataSchedulerActor received unexpected message! (${sender()} sent ${other.toPrettyElidedString(1000)}})") + } + case ShutdownCommand => context.stop(self) // TODO: cancel any streaming that might be happening? + case other => + log.info( + s"Programmer Error! The ArchiveMetadataSchedulerActor received unexpected message! (${sender()} sent ${other.toPrettyElidedString(1000)}})" + ) } def workflowsLeftToArchiveMetric(): Unit = { @@ -112,17 +127,24 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp( TerminalWorkflowStatuses, currentTimestampMinusDelay - ).onComplete({ + ).onComplete { case Success(workflowsToArchive) => sendGauge(workflowsToArchiveMetricPath, workflowsToArchive.longValue(), ServicesPrefix) // schedule next workflows left to archive query after interval - context.system.scheduler.scheduleOnce(archiveMetadataConfig.instrumentationInterval)(workflowsLeftToArchiveMetric()) + context.system.scheduler.scheduleOnce(archiveMetadataConfig.instrumentationInterval)( + workflowsLeftToArchiveMetric() + ) case Failure(exception) => - log.error(exception, s"Something went wrong while fetching number of workflows left to archive. " + - s"Scheduling next poll in ${archiveMetadataConfig.instrumentationInterval}.") + log.error( + exception, + s"Something went wrong while fetching number of workflows left to archive. " + + s"Scheduling next poll in ${archiveMetadataConfig.instrumentationInterval}." + ) // schedule next workflows left to archive query after interval - context.system.scheduler.scheduleOnce(archiveMetadataConfig.instrumentationInterval)(workflowsLeftToArchiveMetric()) - }) + context.system.scheduler.scheduleOnce(archiveMetadataConfig.instrumentationInterval)( + workflowsLeftToArchiveMetric() + ) + } } def archiveNextWorkflows: Future[Long] = { @@ -131,14 +153,21 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig for { workflowSummaryEntries <- lookupNextWorkflowsToArchive(archiveMetadataConfig.batchSize) batchLookupEndTime = OffsetDateTime.now() - _ = sendTiming(archiverTimingMetricsBasePath :+ "lookup_next_workflows", calculateTimeDifference(batchLookupStartTime, batchLookupEndTime), ServicesPrefix) - _ = if (archiveMetadataConfig.debugLogging) log.info(s"About to archive batch of ${workflowSummaryEntries.size} workflows.") + _ = sendTiming(archiverTimingMetricsBasePath :+ "lookup_next_workflows", + calculateTimeDifference(batchLookupStartTime, batchLookupEndTime), + ServicesPrefix + ) + _ = if (archiveMetadataConfig.debugLogging) + log.info(s"About to archive batch of ${workflowSummaryEntries.size} workflows.") result <- archiveSummaryEntries(workflowSummaryEntries) - _ = sendTiming(archiverTimingMetricsBasePath :+ "batch_lookup_and_archive_time", calculateTimeDifference(batchLookupStartTime, OffsetDateTime.now()), ServicesPrefix) + _ = sendTiming(archiverTimingMetricsBasePath :+ "batch_lookup_and_archive_time", + calculateTimeDifference(batchLookupStartTime, OffsetDateTime.now()), + ServicesPrefix + ) } yield result } - private def archiveSummaryEntries(entries: Seq[WorkflowMetadataSummaryEntry]): Future[Long] = { + private def archiveSummaryEntries(entries: Seq[WorkflowMetadataSummaryEntry]): Future[Long] = if (entries.isEmpty) { sendGauge(timeBehindExpectedDelayMetricPath, 0L, ServicesPrefix) Future.successful(0L) @@ -148,11 +177,16 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig result.map(_.sum) } - } private def archiveSummaryEntry(entry: WorkflowMetadataSummaryEntry): Future[Long] = { entry.endTimestamp.foreach { workflowEndTime => - sendGauge(timeBehindExpectedDelayMetricPath, calculateTimeSince(workflowEndTime.toSystemOffsetDateTime).toMillis - archiveMetadataConfig.archiveDelay.toMillis, ServicesPrefix) + sendGauge( + timeBehindExpectedDelayMetricPath, + calculateTimeSince( + workflowEndTime.toSystemOffsetDateTime + ).toMillis - archiveMetadataConfig.archiveDelay.toMillis, + ServicesPrefix + ) } val archiveStartTime = OffsetDateTime.now() @@ -163,14 +197,26 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig dbStream <- fetchStreamFromDatabase(WorkflowId(UUID.fromString(workflowId))) _ = log.info(s"Archiving metadata for $workflowId to ${path.pathAsString}") readyToStreamTime = OffsetDateTime.now() - _ = sendTiming(archiverTimingMetricsBasePath :+ "prepare_to_stream", calculateTimeDifference(archiveStartTime, readyToStreamTime), ServicesPrefix) + _ = sendTiming(archiverTimingMetricsBasePath :+ "prepare_to_stream", + calculateTimeDifference(archiveStartTime, readyToStreamTime), + ServicesPrefix + ) _ <- streamMetadataToGcs(path, dbStream) streamCompleteTime = OffsetDateTime.now() - _ = sendTiming(archiverTimingMetricsBasePath :+ "stream_to_gcs", calculateTimeDifference(readyToStreamTime, streamCompleteTime), ServicesPrefix) + _ = sendTiming(archiverTimingMetricsBasePath :+ "stream_to_gcs", + calculateTimeDifference(readyToStreamTime, streamCompleteTime), + ServicesPrefix + ) _ <- updateMetadataArchiveStatus(WorkflowId(UUID.fromString(workflowId)), Archived) statusUpdatedTime = OffsetDateTime.now() - _ = sendTiming(archiverTimingMetricsBasePath :+ "archive_status_update", calculateTimeDifference(streamCompleteTime, statusUpdatedTime), ServicesPrefix) - _ = sendTiming(workflowArchiveTotalTimeMetricPath, calculateTimeDifference(archiveStartTime, statusUpdatedTime), ServicesPrefix) + _ = sendTiming(archiverTimingMetricsBasePath :+ "archive_status_update", + calculateTimeDifference(streamCompleteTime, statusUpdatedTime), + ServicesPrefix + ) + _ = sendTiming(workflowArchiveTotalTimeMetricPath, + calculateTimeDifference(archiveStartTime, statusUpdatedTime), + ServicesPrefix + ) _ = log.info(s"Archiving succeeded for $workflowId") } yield 1L } @@ -184,14 +230,14 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig ) } - private def getGcsPathForMetadata(summaryEntry: WorkflowMetadataSummaryEntry): Try[Path] = { + private def getGcsPathForMetadata(summaryEntry: WorkflowMetadataSummaryEntry): Try[Path] = { /* Note: The naming convention for archived workflows is: - if a workflow has no root workflow, its archived metadata is put in GCS at /.csv - if a workflow is a subworkflow, its archived metadata is put in GCS under it's root workflow's directory i.e. /.csv Changing this convention would break the expectations of where to find the archived metadata files. - */ + */ val bucket = archiveMetadataConfig.bucket val workflowId = summaryEntry.workflowExecutionUuid val rootWorkflowId = summaryEntry.rootWorkflowExecutionUuid.getOrElse(workflowId) @@ -200,39 +246,53 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig } } - def fetchStreamFromDatabase(workflowId: WorkflowId): Future[DatabasePublisher[MetadataEntry]] = { + def fetchStreamFromDatabase(workflowId: WorkflowId): Future[DatabasePublisher[MetadataEntry]] = (serviceRegistryActor ? GetMetadataStreamAction(workflowId)) flatMap { case MetadataLookupStreamSuccess(_, responseStream) => Future.successful(responseStream) - case MetadataLookupStreamFailed(_, reason) => Future.failed(new Exception(s"Failed to get metadata stream", reason)) + case MetadataLookupStreamFailed(_, reason) => + Future.failed(new Exception(s"Failed to get metadata stream", reason)) case other => Future.failed(new Exception(s"Failed to get metadata stream: ${other.toPrettyElidedString(1000)}")) } - } def streamMetadataToGcs(path: Path, stream: DatabasePublisher[MetadataEntry]): Future[Unit] = { val streamStartTime = OffsetDateTime.now() - val rowsCounter = new CounterAndProgressiveLogger( logFunction = (newRows, totalRows) => { - if (archiveMetadataConfig.debugLogging) logger.info(s"Uploaded $newRows new rows to ${path.pathAsString}. Total uploaded is now ${totalRows}") else () - count(rowsProcessedMetricPath, newRows, ServicesPrefix) - }, 100000) + val rowsCounter = new CounterAndProgressiveLogger( + logFunction = (newRows, totalRows) => { + if (archiveMetadataConfig.debugLogging) + logger.info(s"Uploaded $newRows new rows to ${path.pathAsString}. Total uploaded is now ${totalRows}") + else () + count(rowsProcessedMetricPath, newRows, ServicesPrefix) + }, + 100000 + ) for { asyncIo <- futureAsyncIo gotAsyncIoTime = OffsetDateTime.now() - _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "get_async_io", calculateTimeDifference(streamStartTime, gotAsyncIoTime), ServicesPrefix) + _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "get_async_io", + calculateTimeDifference(streamStartTime, gotAsyncIoTime), + ServicesPrefix + ) gcsStream = Files.newOutputStream(path.nioPath, StandardOpenOption.CREATE) gcsStreamCreatedTime = OffsetDateTime.now() - _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "create_gcs_stream", calculateTimeDifference(gotAsyncIoTime, gcsStreamCreatedTime), ServicesPrefix) + _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "create_gcs_stream", + calculateTimeDifference(gotAsyncIoTime, gcsStreamCreatedTime), + ServicesPrefix + ) crc32cStream = new Crc32cStream() teeStream = new TeeingOutputStream(gcsStream, crc32cStream, new ByteCountingOutputStream()) csvPrinter = new CSVPrinter( new OutputStreamWriter(teeStream), - CSVFormat.DEFAULT.builder().setHeader(CsvFileHeaders : _*).build(), + CSVFormat.DEFAULT.builder().setHeader(CsvFileHeaders: _*).build() ) csvPrinterCreatedTime = OffsetDateTime.now() - _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "create_csv_printer", calculateTimeDifference(gcsStreamCreatedTime, csvPrinterCreatedTime), ServicesPrefix) - _ <- stream.foreach(me => { + _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "create_csv_printer", + calculateTimeDifference(gcsStreamCreatedTime, csvPrinterCreatedTime), + ServicesPrefix + ) + _ <- stream.foreach { me => csvPrinter.printRecord( me.metadataEntryId.map(_.toString).getOrElse(""), me.workflowExecutionUuid, @@ -245,17 +305,30 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig me.metadataValueType.getOrElse("") ) rowsCounter.increment() - }) + } _ = rowsCounter.manualLog() _ = sendGauge(rowsPerWorkflowMetricPath, rowsCounter.getTotalCount, ServicesPrefix) _ = csvPrinter.close() streamingCompleteTime = OffsetDateTime.now() - _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "stream_data_to_gcs", calculateTimeDifference(csvPrinterCreatedTime, streamingCompleteTime), ServicesPrefix) + _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "stream_data_to_gcs", + calculateTimeDifference(csvPrinterCreatedTime, streamingCompleteTime), + ServicesPrefix + ) expectedChecksum = crc32cStream.checksumString uploadedChecksum <- asyncIo.hashAsync(path) checksumValidatedTime = OffsetDateTime.now() - _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "checksum_validation", calculateTimeDifference(streamingCompleteTime, checksumValidatedTime), ServicesPrefix) - _ <- if (uploadedChecksum == expectedChecksum) Future.successful(()) else Future.failed(new Exception(s"Uploaded checksum '$uploadedChecksum' did not match local calculation ('$expectedChecksum')")) + _ = sendTiming(archiverStreamTimingMetricsBasePath :+ "checksum_validation", + calculateTimeDifference(streamingCompleteTime, checksumValidatedTime), + ServicesPrefix + ) + _ <- + if (uploadedChecksum == expectedChecksum) Future.successful(()) + else + Future.failed( + new Exception( + s"Uploaded checksum '$uploadedChecksum' did not match local calculation ('$expectedChecksum')" + ) + ) } yield () } @@ -265,10 +338,15 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig } final class ByteCountingOutputStream() extends OutputStream { - val byteCounter = new CounterAndProgressiveLogger( logFunction = (newBytes, totalBytes) => { - if (archiveMetadataConfig.debugLogging) logger.info(s"Uploaded $newBytes new bytes. Total uploaded is now $totalBytes") else () - count(bytesProcessedMetricPath, newBytes, ServicesPrefix) - }, 100000) + val byteCounter = new CounterAndProgressiveLogger( + logFunction = (newBytes, totalBytes) => { + if (archiveMetadataConfig.debugLogging) + logger.info(s"Uploaded $newBytes new bytes. Total uploaded is now $totalBytes") + else () + count(bytesProcessedMetricPath, newBytes, ServicesPrefix) + }, + 100000 + ) override def write(b: Int): Unit = byteCounter.increment() override def close(): Unit = { @@ -298,9 +376,9 @@ object ArchiveMetadataSchedulerActor { Props(new ArchiveMetadataSchedulerActor(archiveMetadataConfig, serviceRegistryActor)) final class TeeingOutputStream(streams: OutputStream*) extends OutputStream { - override def write(b: Int): Unit = { streams.foreach(_.write(b)) } - override def close(): Unit = { streams.foreach(_.close())} - override def flush(): Unit = { streams.foreach(_.flush())} + override def write(b: Int): Unit = streams.foreach(_.write(b)) + override def close(): Unit = streams.foreach(_.close()) + override def flush(): Unit = streams.foreach(_.flush()) } final class Crc32cStream() extends OutputStream { diff --git a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala index 05e494680a3..1f48bc8d12c 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala @@ -18,7 +18,6 @@ import spray.json._ import scala.language.postfixOps - object MetadataBuilderActor { sealed trait MetadataBuilderActorState case object Idle extends MetadataBuilderActorState @@ -28,24 +27,26 @@ object MetadataBuilderActor { sealed trait MetadataBuilderActorData case object IdleData extends MetadataBuilderActorData - final case class HasWorkData(target: ActorRef, - originalRequest: BuildMetadataJsonAction) extends MetadataBuilderActorData + final case class HasWorkData(target: ActorRef, originalRequest: BuildMetadataJsonAction) + extends MetadataBuilderActorData final case class HasReceivedEventsData(target: ActorRef, originalRequest: BuildMetadataJsonAction, originalQuery: MetadataQuery, originalEvents: Seq[MetadataEvent], subWorkflowsMetadata: Map[String, JsValue], - waitFor: Int) extends MetadataBuilderActorData { - def withSubWorkflow(id: String, metadata: JsValue) = { + waitFor: Int + ) extends MetadataBuilderActorData { + def withSubWorkflow(id: String, metadata: JsValue) = this.copy(subWorkflowsMetadata = subWorkflowsMetadata + ((id, metadata))) - } def isComplete = subWorkflowsMetadata.size == waitFor } - def props(readMetadataWorkerMaker: () => Props, metadataReadRowNumberSafetyThreshold: Int, isForSubworkflows: Boolean = false) = { + def props(readMetadataWorkerMaker: () => Props, + metadataReadRowNumberSafetyThreshold: Int, + isForSubworkflows: Boolean = false + ) = Props(new MetadataBuilderActor(readMetadataWorkerMaker, metadataReadRowNumberSafetyThreshold, isForSubworkflows)) - } val log = LoggerFactory.getLogger("MetadataBuilder") @@ -62,18 +63,27 @@ object MetadataBuilderActor { */ private case class MetadataForIndex(index: Int, metadata: List[JsObject]) - private def eventsToAttemptMetadata(subWorkflowMetadata: Map[String, JsValue])(attempt: Int, events: Seq[MetadataEvent]) = { - val withAttemptField = JsObject(MetadataComponent(events, subWorkflowMetadata).toJson.asJsObject.fields + (AttemptKey -> JsNumber(attempt))) + private def eventsToAttemptMetadata( + subWorkflowMetadata: Map[String, JsValue] + )(attempt: Int, events: Seq[MetadataEvent]) = { + val withAttemptField = JsObject( + MetadataComponent(events, subWorkflowMetadata).toJson.asJsObject.fields + (AttemptKey -> JsNumber(attempt)) + ) MetadataForAttempt(attempt, withAttemptField) } private def attemptMetadataToIndexMetadata(index: ExecutionIndex, attemptMetadata: Iterable[MetadataForAttempt]) = { def addIndexProperty(value: JsObject) = JsObject(value.fields + (ShardKey -> JsNumber(index.getOrElse(-1)))) - val metadata = attemptMetadata.toList.sortBy(_.attempt) map { mdForAttempt => addIndexProperty(mdForAttempt.metadata) } + val metadata = attemptMetadata.toList.sortBy(_.attempt) map { mdForAttempt => + addIndexProperty(mdForAttempt.metadata) + } MetadataForIndex(index.getOrElse(-1), metadata) } - private def buildMetadataJson(events: Seq[MetadataEvent], includeCallsIfEmpty: Boolean, expandedValues: Map[String, JsValue]): JsObject = { + private def buildMetadataJson(events: Seq[MetadataEvent], + includeCallsIfEmpty: Boolean, + expandedValues: Map[String, JsValue] + ): JsObject = { // Partition events into workflow level and call level events val (workflowLevel, callLevel) = events partition { _.key.jobKey.isEmpty } val workflowLevelJson = MetadataComponent(workflowLevel, Map.empty).toJson.asJsObject @@ -112,12 +122,16 @@ object MetadataBuilderActor { * ... * ) */ - val callsGroupedByFQNAndIndexAndAttempt = callsGroupedByFQNAndIndex safeMapValues { _ safeMapValues { _ groupBy { _.key.jobKey.get.attempt } } } + val callsGroupedByFQNAndIndexAndAttempt = callsGroupedByFQNAndIndex safeMapValues { + _ safeMapValues { _ groupBy { _.key.jobKey.get.attempt } } + } val eventsToAttemptFunction = Function.tupled(eventsToAttemptMetadata(expandedValues) _) val attemptToIndexFunction = (attemptMetadataToIndexMetadata _).tupled - val callsMap = callsGroupedByFQNAndIndexAndAttempt safeMapValues { _ safeMapValues { _ map eventsToAttemptFunction } map attemptToIndexFunction } safeMapValues { md => + val callsMap = callsGroupedByFQNAndIndexAndAttempt safeMapValues { + _ safeMapValues { _ map eventsToAttemptFunction } map attemptToIndexFunction + } safeMapValues { md => JsArray(md.toVector.sortBy(_.index) flatMap { _.metadata }) } @@ -126,24 +140,31 @@ object MetadataBuilderActor { JsObject(workflowLevelJson.fields ++ callData) } - private def parseWorkflowEvents(includeCallsIfEmpty: Boolean, expandedValues: Map[String, JsValue])(events: Seq[MetadataEvent]): JsObject = { + private def parseWorkflowEvents(includeCallsIfEmpty: Boolean, expandedValues: Map[String, JsValue])( + events: Seq[MetadataEvent] + ): JsObject = buildMetadataJson(events, includeCallsIfEmpty, expandedValues) - } /** * Parse a Seq of MetadataEvent into a full Json metadata response. */ - private def parse(events: Seq[MetadataEvent], expandedValues: Map[String, JsValue]): JsObject = { - JsObject(events.groupBy(_.key.workflowId.toString) safeMapValues parseWorkflowEvents(includeCallsIfEmpty = true, expandedValues)) - } + private def parse(events: Seq[MetadataEvent], expandedValues: Map[String, JsValue]): JsObject = + JsObject( + events.groupBy(_.key.workflowId.toString) safeMapValues parseWorkflowEvents(includeCallsIfEmpty = true, + expandedValues + ) + ) val actorIdIterator = new AtomicLong(0) - def uniqueActorName(workflowId: String): String = s"${getClass.getSimpleName}.${actorIdIterator.getAndIncrement()}-for-$workflowId" + def uniqueActorName(workflowId: String): String = + s"${getClass.getSimpleName}.${actorIdIterator.getAndIncrement()}-for-$workflowId" case class JobKeyAndGrouping(jobKey: MetadataJobKey, grouping: String) - def makeSyntheticGroupedExecutionEvents(jobKeyAndGrouping: JobKeyAndGrouping, events: List[MetadataEvent]): List[MetadataEvent] = { + def makeSyntheticGroupedExecutionEvents(jobKeyAndGrouping: JobKeyAndGrouping, + events: List[MetadataEvent] + ): List[MetadataEvent] = { // The input list of `events` might be incoherent since some events that were logically generated may not (yet) have been // recorded in the database. This code is written defensively to check for a start date in the event list. If there isn't one, // just return the original list of events because we can't sanely construct a synthetic event. @@ -152,8 +173,12 @@ object MetadataBuilderActor { if (startTimeEvents.isEmpty) { events } else { - val oldestStartTimeKey = startTimeEvents.minBy(_.value collect { case MetadataValue(s, _) => OffsetDateTime.parse(s).toEpochSecond } get) - val newestStartTimeKey = startTimeEvents.maxBy(_.value collect { case MetadataValue(s, _) => OffsetDateTime.parse(s).toEpochSecond } get) + val oldestStartTimeKey = startTimeEvents.minBy(_.value collect { case MetadataValue(s, _) => + OffsetDateTime.parse(s).toEpochSecond + } get) + val newestStartTimeKey = startTimeEvents.maxBy(_.value collect { case MetadataValue(s, _) => + OffsetDateTime.parse(s).toEpochSecond + } get) // Search for an end date event corresponding to the newest start date event. Use the same prefix as on the // newestStartDateEvent to search for it. val executionEventPrefix = newestStartTimeKey.key.key.takeWhile(_ != ':') @@ -167,7 +192,9 @@ object MetadataBuilderActor { // generated from these events uses a consistent subscript. List( oldestStartTimeKey.copy(key = syntheticStartTimeKey), - oldestStartTimeKey.copy(key = syntheticDescriptionKey, value = Option(MetadataValue(jobKeyAndGrouping.grouping, MetadataString))) + oldestStartTimeKey.copy(key = syntheticDescriptionKey, + value = Option(MetadataValue(jobKeyAndGrouping.grouping, MetadataString)) + ) ) ++ endTimeEvent.toList } } @@ -181,54 +208,69 @@ object MetadataBuilderActor { val executionEventKeyPatternRe = "[^]]+".r // Group execution events by their execution event keys. // The `get` inside the `groupBy` is safe because there must be a closing brace on the opening brace. - val executionEventsByKeys = executionEvents groupBy { e => (e.key.key.substring("executionEvents[".length) |> executionEventKeyPatternRe.findFirstIn).get } + val executionEventsByKeys = executionEvents groupBy { e => + (e.key.key.substring("executionEvents[".length) |> executionEventKeyPatternRe.findFirstIn).get + } // "grouped" and "ungrouped" refer to the ":grouping" attribute that may be present in execution event metadata. - val (groupedExecutionEvents, ungroupedExecutionEvents) = executionEventsByKeys partition { case (k, es) => es.exists(_.key.key == s"executionEvents[$k]:grouping") } + val (groupedExecutionEvents, ungroupedExecutionEvents) = executionEventsByKeys partition { case (k, es) => + es.exists(_.key.key == s"executionEvents[$k]:grouping") + } // (jobKey + grouping) => eeKey => executionEvents val groupedExecutionEventsByGrouping = groupedExecutionEvents groupBy { // The `get` on the `jobKey` is safe because execution events are all job-based. The outer `get` is safe because we already verified // the presence of this "grouping" attribute in this data set above. - case (k, es) => es.collectFirst { case e if e.key.key == s"executionEvents[$k]:grouping" => JobKeyAndGrouping(e.key.jobKey.get, e.value.get.value) } get } map { - case (jkg, m) => jkg -> m.values.toList.flatten } + case (k, es) => + es.collectFirst { + case e if e.key.key == s"executionEvents[$k]:grouping" => + JobKeyAndGrouping(e.key.jobKey.get, e.value.get.value) + } get + } map { case (jkg, m) => + jkg -> m.values.toList.flatten + } // Tuplize the grouping function so it can operate on the List elements directly. val tupledGrouper = (makeSyntheticGroupedExecutionEvents _).tupled nonExecutionEvents ++ ungroupedExecutionEvents.values.toList.flatten ++ (groupedExecutionEventsByGrouping.toList flatMap tupledGrouper) } - - - def processMetadataEvents(query: MetadataQuery, eventsList: Seq[MetadataEvent], expandedValues: Map[String, JsValue]): JsObject = { + def processMetadataEvents(query: MetadataQuery, + eventsList: Seq[MetadataEvent], + expandedValues: Map[String, JsValue] + ): JsObject = // Should we send back some message ? Or even fail the request instead ? if (eventsList.isEmpty) JsObject(Map.empty[String, JsValue]) else { query match { - case mq: MetadataQuery => workflowMetadataResponse(mq.workflowId, eventsList, includeCallsIfEmpty = true, expandedValues) + case mq: MetadataQuery => + workflowMetadataResponse(mq.workflowId, eventsList, includeCallsIfEmpty = true, expandedValues) case _ => MetadataBuilderActor.parse(eventsList, expandedValues) } } - } - def processStatusResponse(workflowId: WorkflowId, status: WorkflowState): JsObject = { - JsObject(Map( - WorkflowMetadataKeys.Status -> JsString(status.toString), - WorkflowMetadataKeys.Id -> JsString(workflowId.toString) - )) - } + def processStatusResponse(workflowId: WorkflowId, status: WorkflowState): JsObject = + JsObject( + Map( + WorkflowMetadataKeys.Status -> JsString(status.toString), + WorkflowMetadataKeys.Id -> JsString(workflowId.toString) + ) + ) def processLabelsResponse(workflowId: WorkflowId, labels: Map[String, String]): JsObject = { val jsLabels = labels map { case (k, v) => k -> JsString(v) } - JsObject(Map( - WorkflowMetadataKeys.Id -> JsString(workflowId.toString), - WorkflowMetadataKeys.Labels -> JsObject(jsLabels) - )) + JsObject( + Map( + WorkflowMetadataKeys.Id -> JsString(workflowId.toString), + WorkflowMetadataKeys.Labels -> JsObject(jsLabels) + ) + ) } def processOutputsResponse(id: WorkflowId, events: Seq[MetadataEvent]): JsObject = { // Add in an empty output event if there aren't already any output events. val hasOutputs = events exists { _.key.key.startsWith(WorkflowMetadataKeys.Outputs + ":") } - val updatedEvents = if (hasOutputs) events else MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Outputs)) +: events + val updatedEvents = + if (hasOutputs) events else MetadataEvent.empty(MetadataKey(id, None, WorkflowMetadataKeys.Outputs)) +: events workflowMetadataResponse(id, updatedEvents, includeCallsIfEmpty = false, Map.empty) } @@ -236,26 +278,31 @@ object MetadataBuilderActor { def workflowMetadataResponse(workflowId: WorkflowId, eventsList: Seq[MetadataEvent], includeCallsIfEmpty: Boolean, - expandedValues: Map[String, JsValue]): JsObject = { - JsObject(MetadataBuilderActor.parseWorkflowEvents(includeCallsIfEmpty, expandedValues)(eventsList).fields + ("id" -> JsString(workflowId.toString))) - } + expandedValues: Map[String, JsValue] + ): JsObject = + JsObject( + MetadataBuilderActor + .parseWorkflowEvents(includeCallsIfEmpty, expandedValues)(eventsList) + .fields + ("id" -> JsString(workflowId.toString)) + ) } -class MetadataBuilderActor(readMetadataWorkerMaker: () => Props, metadataReadRowNumberSafetyThreshold: Int, isForSubworkflows: Boolean) - extends LoggingFSM[MetadataBuilderActorState, MetadataBuilderActorData] with DefaultJsonProtocol { +class MetadataBuilderActor(readMetadataWorkerMaker: () => Props, + metadataReadRowNumberSafetyThreshold: Int, + isForSubworkflows: Boolean +) extends LoggingFSM[MetadataBuilderActorState, MetadataBuilderActorData] + with DefaultJsonProtocol { import MetadataBuilderActor._ startWith(Idle, IdleData) val tag = self.path.name - when(Idle) { - case Event(action: BuildMetadataJsonAction, IdleData) => + when(Idle) { case Event(action: BuildMetadataJsonAction, IdleData) => + val readActor = context.actorOf(readMetadataWorkerMaker.apply()) - val readActor = context.actorOf(readMetadataWorkerMaker.apply()) - - readActor ! action - goto(WaitingForMetadataService) using HasWorkData(sender(), action) + readActor ! action + goto(WaitingForMetadataService) using HasWorkData(sender(), action) } private def allDone() = { @@ -274,14 +321,20 @@ class MetadataBuilderActor(readMetadataWorkerMaker: () => Props, metadataReadRow target ! SuccessfulMetadataJsonResponse(originalRequest, processOutputsResponse(id, events)) allDone() case Event(LogsResponse(w, l), HasWorkData(target, originalRequest)) => - target ! SuccessfulMetadataJsonResponse(originalRequest, workflowMetadataResponse(w, l, includeCallsIfEmpty = false, Map.empty)) + target ! SuccessfulMetadataJsonResponse(originalRequest, + workflowMetadataResponse(w, l, includeCallsIfEmpty = false, Map.empty) + ) allDone() case Event(MetadataLookupResponse(query, metadata), HasWorkData(target, originalRequest)) => processMetadataResponse(query, metadata, target, originalRequest) case Event(FetchFailedJobsMetadataLookupResponse(metadata), HasWorkData(target, originalRequest)) => processFailedJobsMetadataResponse(metadata, target, originalRequest) case Event(MetadataLookupFailedTooLargeResponse(query, metadataSizeRows), HasWorkData(target, originalRequest)) => - val metadataTooLargeNumberOfRowsException = new MetadataTooLargeNumberOfRowsException(query.workflowId, metadataSizeRows, metadataReadRowNumberSafetyThreshold) + val metadataTooLargeNumberOfRowsException = + new MetadataTooLargeNumberOfRowsException(query.workflowId, + metadataSizeRows, + metadataReadRowNumberSafetyThreshold + ) target ! FailedMetadataJsonResponse(originalRequest, metadataTooLargeNumberOfRowsException) allDone() case Event(MetadataLookupFailedTimeoutResponse(query), HasWorkData(target, originalRequest)) => @@ -322,26 +375,34 @@ class MetadataBuilderActor(readMetadataWorkerMaker: () => Props, metadataReadRow stay() } - def processSubWorkflowMetadata(metadataResponse: MetadataJsonResponse, data: HasReceivedEventsData) = { + def processSubWorkflowMetadata(metadataResponse: MetadataJsonResponse, data: HasReceivedEventsData) = metadataResponse match { case SuccessfulMetadataJsonResponse(GetMetadataAction(queryKey, _), js) => val subId: WorkflowId = queryKey.workflowId val newData = data.withSubWorkflow(subId.toString, js) if (newData.isComplete) { - buildAndStop(data.originalQuery, data.originalEvents, newData.subWorkflowsMetadata, data.target, data.originalRequest) + buildAndStop(data.originalQuery, + data.originalEvents, + newData.subWorkflowsMetadata, + data.target, + data.originalRequest + ) } else { stay() using newData } case FailedMetadataJsonResponse(originalRequest, e) => - failAndDie(new RuntimeException(s"Failed to retrieve metadata for a sub workflow ($originalRequest)", e), data.target, data.originalRequest) + failAndDie(new RuntimeException(s"Failed to retrieve metadata for a sub workflow ($originalRequest)", e), + data.target, + data.originalRequest + ) case other => - val message = s"Programmer Error: MetadataBuilderActor expected subworkflow metadata response type but got ${other.getClass.getSimpleName}" + val message = + s"Programmer Error: MetadataBuilderActor expected subworkflow metadata response type but got ${other.getClass.getSimpleName}" log.error(message) failAndDie(new Exception(message), data.target, data.originalRequest) } - } def failAndDie(reason: Throwable, target: ActorRef, originalRequest: BuildMetadataJsonAction) = { target ! FailedMetadataJsonResponse(originalRequest, reason) @@ -349,36 +410,63 @@ class MetadataBuilderActor(readMetadataWorkerMaker: () => Props, metadataReadRow stay() } - def buildAndStop(query: MetadataQuery, eventsList: Seq[MetadataEvent], expandedValues: Map[String, JsValue], target: ActorRef, originalRequest: BuildMetadataJsonAction) = { + def buildAndStop(query: MetadataQuery, + eventsList: Seq[MetadataEvent], + expandedValues: Map[String, JsValue], + target: ActorRef, + originalRequest: BuildMetadataJsonAction + ) = { val groupedEvents = groupEvents(eventsList) val res = processMetadataEvents(query, groupedEvents, expandedValues).fields target ! SuccessfulMetadataJsonResponse(originalRequest, JsObject(res)) allDone() } - def processMetadataResponse(query: MetadataQuery, eventsList: Seq[MetadataEvent], target: ActorRef, originalRequest: BuildMetadataJsonAction) = { + def processMetadataResponse(query: MetadataQuery, + eventsList: Seq[MetadataEvent], + target: ActorRef, + originalRequest: BuildMetadataJsonAction + ) = if (query.expandSubWorkflows) { // Scan events for sub workflow ids - val subWorkflowIds = eventsList.collect({ - case MetadataEvent(key, value, _) if key.key.endsWith(CallMetadataKeys.SubWorkflowId) => value map { _.value } - }).flatten.distinct + val subWorkflowIds = eventsList + .collect { + case MetadataEvent(key, value, _) if key.key.endsWith(CallMetadataKeys.SubWorkflowId) => value map { _.value } + } + .flatten + .distinct // If none is found just proceed to build metadata if (subWorkflowIds.isEmpty) buildAndStop(query, eventsList, Map.empty, target, originalRequest) else { // Otherwise spin up a metadata builder actor for each sub workflow subWorkflowIds foreach { subId => - val subMetadataBuilder = context.actorOf(MetadataBuilderActor.props(readMetadataWorkerMaker, metadataReadRowNumberSafetyThreshold, isForSubworkflows = true), uniqueActorName(subId)) - subMetadataBuilder ! GetMetadataAction(query.copy(workflowId = WorkflowId.fromString(subId)), checkTotalMetadataRowNumberBeforeQuerying = false) + val subMetadataBuilder = context.actorOf(MetadataBuilderActor.props(readMetadataWorkerMaker, + metadataReadRowNumberSafetyThreshold, + isForSubworkflows = true + ), + uniqueActorName(subId) + ) + subMetadataBuilder ! GetMetadataAction(query.copy(workflowId = WorkflowId.fromString(subId)), + checkTotalMetadataRowNumberBeforeQuerying = false + ) } - goto(WaitingForSubWorkflows) using HasReceivedEventsData(target, originalRequest, query, eventsList, Map.empty, subWorkflowIds.size) + goto(WaitingForSubWorkflows) using HasReceivedEventsData(target, + originalRequest, + query, + eventsList, + Map.empty, + subWorkflowIds.size + ) } } else { buildAndStop(query, eventsList, Map.empty, target, originalRequest) } - } - def processFailedJobsMetadataResponse(eventsList: Seq[MetadataEvent], target: ActorRef, originalRequest: BuildMetadataJsonAction) = { + def processFailedJobsMetadataResponse(eventsList: Seq[MetadataEvent], + target: ActorRef, + originalRequest: BuildMetadataJsonAction + ) = { val groupedEvents = groupEvents(eventsList) val res = MetadataBuilderActor.parse(groupedEvents, Map.empty) target ! SuccessfulMetadataJsonResponse(originalRequest, res) diff --git a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala index 428feafe1a1..756744806b2 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala @@ -18,7 +18,7 @@ object MetadataComponent { private lazy val stringKeyMapSg = implicitly[Semigroup[Map[String, MetadataComponent]]] private lazy val intKeyMapSg = implicitly[Semigroup[Map[Int, MetadataComponent]]] - def combine(f1: MetadataComponent, f2: MetadataComponent): MetadataComponent = { + def combine(f1: MetadataComponent, f2: MetadataComponent): MetadataComponent = (f1, f2) match { case (MetadataObject(v1), MetadataObject(v2)) => MetadataObject(stringKeyMapSg.combine(v1, v2)) case (MetadataList(v1), MetadataList(v2)) => MetadataList(intKeyMapSg.combine(v1, v2)) @@ -27,15 +27,17 @@ object MetadataComponent { // Otherwise assume it's ordered by default and take the new one case (_, o2) => o2 } - } override def empty: MetadataComponent = MetadataObject.empty } val metadataPrimitiveJsonWriter: JsonWriter[MetadataPrimitive] = JsonWriter.func2Writer[MetadataPrimitive] { - case MetadataPrimitive(MetadataValue(value, MetadataInt), _) => Try(value.toInt) map JsNumber.apply getOrElse JsString(value) - case MetadataPrimitive(MetadataValue(value, MetadataNumber), _) => Try(value.toDouble) map JsNumber.apply getOrElse JsString(value) - case MetadataPrimitive(MetadataValue(value, MetadataBoolean), _) => Try(value.toBoolean) map JsBoolean.apply getOrElse JsString(value) + case MetadataPrimitive(MetadataValue(value, MetadataInt), _) => + Try(value.toInt) map JsNumber.apply getOrElse JsString(value) + case MetadataPrimitive(MetadataValue(value, MetadataNumber), _) => + Try(value.toDouble) map JsNumber.apply getOrElse JsString(value) + case MetadataPrimitive(MetadataValue(value, MetadataBoolean), _) => + Try(value.toBoolean) map JsBoolean.apply getOrElse JsString(value) case MetadataPrimitive(MetadataValue(value, MetadataString), _) => JsString(value) case MetadataPrimitive(MetadataValue(_, MetadataNull), _) => JsNull } @@ -58,7 +60,7 @@ object MetadataComponent { val KeySplitter = s"(? MetadataObject(Map(chunk -> innerValue)) @@ -77,7 +79,7 @@ object MetadataComponent { } yield asInt // Fold into a MetadataList: MetadataList(0 -> MetadataList(1 -> innerValue)) val init = if (innerValue == MetadataEmptyComponent) { - // Empty value means empty list + // Empty value means empty list listIndices.toList.init.foldRight(MetadataList.empty)((index, acc) => MetadataList(TreeMap(index -> acc))) } else { listIndices.toList.foldRight(innerValue)((index, acc) => MetadataList(TreeMap(index -> acc))) @@ -86,27 +88,29 @@ object MetadataComponent { MetadataObject(Map(objectName -> metadataList)) } - } private def customOrdering(event: MetadataEvent): Option[Ordering[MetadataPrimitive]] = event match { - case MetadataEvent(MetadataKey(_, Some(_), key), _, _) - if key == CallMetadataKeys.ExecutionStatus => Option(MetadataPrimitive.ExecutionStatusOrdering) - case MetadataEvent(MetadataKey(_, _, key), _, _) - if key == CallMetadataKeys.Start || key == CallMetadataKeys.End => Option(MetadataPrimitive.TimestampOrdering) - case MetadataEvent(MetadataKey(_, None, key), _, _) - if key == WorkflowMetadataKeys.Status => Option(MetadataPrimitive.WorkflowStateOrdering) + case MetadataEvent(MetadataKey(_, Some(_), key), _, _) if key == CallMetadataKeys.ExecutionStatus => + Option(MetadataPrimitive.ExecutionStatusOrdering) + case MetadataEvent(MetadataKey(_, _, key), _, _) if key == CallMetadataKeys.Start || key == CallMetadataKeys.End => + Option(MetadataPrimitive.TimestampOrdering) + case MetadataEvent(MetadataKey(_, None, key), _, _) if key == WorkflowMetadataKeys.Status => + Option(MetadataPrimitive.WorkflowStateOrdering) case _ => None } private def toMetadataComponent(subWorkflowMetadata: Map[String, JsValue])(event: MetadataEvent) = { - lazy val primitive = event.value map { MetadataPrimitive(_, customOrdering(event)) } getOrElse MetadataEmptyComponent + lazy val primitive = event.value map { + MetadataPrimitive(_, customOrdering(event)) + } getOrElse MetadataEmptyComponent lazy val originalKeyAndPrimitive = (event.key.key, primitive) val keyAndPrimitive: (String, MetadataComponent) = if (event.key.key.endsWith(CallMetadataKeys.SubWorkflowId)) { (for { metadataValue <- event.value subWorkflowMetadata <- subWorkflowMetadata.get(metadataValue.value) - keyWithSubWorkflowMetadata = event.key.key.replace(CallMetadataKeys.SubWorkflowId, CallMetadataKeys.SubWorkflowMetadata) + keyWithSubWorkflowMetadata = event.key.key + .replace(CallMetadataKeys.SubWorkflowId, CallMetadataKeys.SubWorkflowMetadata) subWorkflowComponent = MetadataJsonComponent(subWorkflowMetadata) } yield (keyWithSubWorkflowMetadata, subWorkflowComponent)) getOrElse originalKeyAndPrimitive } else originalKeyAndPrimitive @@ -115,15 +119,13 @@ object MetadataComponent { } /** Sort events by timestamp, transform them into MetadataComponent, and merge them together. */ - def apply(events: Seq[MetadataEvent], subWorkflowMetadata: Map[String, JsValue] = Map.empty): MetadataComponent = { + def apply(events: Seq[MetadataEvent], subWorkflowMetadata: Map[String, JsValue] = Map.empty): MetadataComponent = // The `List` has a `Foldable` instance defined in scope, and because the `List`'s elements have a `Monoid` instance // defined in scope, `combineAll` can derive a sane `TimestampedJsValue` value even if the `List` of events is empty. events.toList map toMetadataComponent(subWorkflowMetadata) combineAll - } - def fromMetadataKeyAndPrimitive(metadataKey: String, innerComponent: MetadataComponent) = { + def fromMetadataKeyAndPrimitive(metadataKey: String, innerComponent: MetadataComponent) = metadataKey.split(KeySplitter).map(_.unescapeMeta).foldRight(innerComponent)(parseKeyChunk) - } } sealed trait MetadataComponent @@ -133,9 +135,8 @@ case object MetadataNullComponent extends MetadataComponent // Metadata Object object MetadataObject { def empty = new MetadataObject(Map.empty) - def apply(kvPair: (String, MetadataComponent)*) = { + def apply(kvPair: (String, MetadataComponent)*) = new MetadataObject(kvPair.toMap) - } } case class MetadataObject(v: Map[String, MetadataComponent]) extends MetadataComponent @@ -143,7 +144,9 @@ case class MetadataObject(v: Map[String, MetadataComponent]) extends MetadataCom // Metadata List object MetadataList { def empty = new MetadataList(Map.empty) - def apply(components: List[MetadataComponent]) = new MetadataList(components.zipWithIndex.map({case (c, i) => i -> c}).toMap) + def apply(components: List[MetadataComponent]) = new MetadataList(components.zipWithIndex.map { case (c, i) => + i -> c + }.toMap) } case class MetadataList(v: Map[Int, MetadataComponent]) extends MetadataComponent @@ -161,7 +164,8 @@ object MetadataPrimitive { Instant.parse(primitive.v.value) }.reverse } -case class MetadataPrimitive(v: MetadataValue, customOrdering: Option[Ordering[MetadataPrimitive]] = None) extends MetadataComponent +case class MetadataPrimitive(v: MetadataValue, customOrdering: Option[Ordering[MetadataPrimitive]] = None) + extends MetadataComponent // Metadata Component that owns an already computed JsValue case class MetadataJsonComponent(jsValue: JsValue) extends MetadataComponent diff --git a/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala index 92b1fff49ee..f1afcbde19f 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala @@ -1,6 +1,6 @@ package cromwell.services.metadata.impl.deleter -import java.time.{OffsetDateTime, Duration => JDuration} +import java.time.{Duration => JDuration, OffsetDateTime} import java.util.concurrent.TimeUnit import akka.actor.{Actor, ActorLogging, ActorRef, Props} @@ -21,22 +21,25 @@ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} -class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, - override val serviceRegistryActor: ActorRef) - extends Actor - with ActorLogging - with MetadataDatabaseAccess - with MetadataServicesStore - with CromwellInstrumentation { +class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, override val serviceRegistryActor: ActorRef) + extends Actor + with ActorLogging + with MetadataDatabaseAccess + with MetadataServicesStore + with CromwellInstrumentation { implicit val ec: ExecutionContext = context.dispatcher implicit val askTimeout: Timeout = new Timeout(60.seconds) - private val deleterMetricsBasePath: NonEmptyList[String] = MetadataServiceActor.MetadataInstrumentationPrefix :+ "deleter" + private val deleterMetricsBasePath: NonEmptyList[String] = + MetadataServiceActor.MetadataInstrumentationPrefix :+ "deleter" private val rowsDeletedMetricPath: NonEmptyList[String] = deleterMetricsBasePath :+ "rows_deleted" - private val workflowsDeletedSuccessMetricPath: NonEmptyList[String] = deleterMetricsBasePath :+ "workflows_deleted" :+ "success" - private val workflowsDeletedFailureMetricPath: NonEmptyList[String] = deleterMetricsBasePath :+ "workflows_deleted" :+ "failure" - private val workflowDeleteTotalTimeMetricPath: NonEmptyList[String] = deleterMetricsBasePath :+ "workflow_delete_total_time" + private val workflowsDeletedSuccessMetricPath: NonEmptyList[String] = + deleterMetricsBasePath :+ "workflows_deleted" :+ "success" + private val workflowsDeletedFailureMetricPath: NonEmptyList[String] = + deleterMetricsBasePath :+ "workflows_deleted" :+ "failure" + private val workflowDeleteTotalTimeMetricPath: NonEmptyList[String] = + deleterMetricsBasePath :+ "workflow_delete_total_time" private val workflowsToDeleteMetricPath: NonEmptyList[String] = deleterMetricsBasePath :+ "workflows_to_delete" // Send an initial delete message to get things started: @@ -46,15 +49,14 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, context.system.scheduler.scheduleOnce(deleteMetadataConfig.instrumentationInterval)(workflowsLeftToDeleteMetric()) override def receive: Receive = { - case DeleteNextWorkflowMessage => { + case DeleteNextWorkflowMessage => val startTime = OffsetDateTime.now() - def calculateTimeSinceStart() = { + def calculateTimeSinceStart() = FiniteDuration(JDuration.between(startTime, OffsetDateTime.now()).toMillis, TimeUnit.MILLISECONDS) - } // These handlers send metrics for most paths even when they're not incremented, so that the metrics // paths are actively receiving data points throughout: - deleteNextWorkflow().onComplete({ + deleteNextWorkflow().onComplete { case Success(true) => increment(workflowsDeletedSuccessMetricPath, ServicesPrefix) count(workflowsDeletedFailureMetricPath, 0L, ServicesPrefix) @@ -67,7 +69,10 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, sendGauge(workflowsToDeleteMetricPath, 0L, ServicesPrefix) sendTiming(workflowDeleteTotalTimeMetricPath, calculateTimeSinceStart(), ServicesPrefix) scheduleNextDeleteAttemptAfterInterval() - if (deleteMetadataConfig.debugLogging) log.info(s"No archived workflows which finished over ${deleteMetadataConfig.delayAfterWorkflowCompletion} ago remain to be deleted. Scheduling next poll in ${deleteMetadataConfig.backoffInterval}.") + if (deleteMetadataConfig.debugLogging) + log.info( + s"No archived workflows which finished over ${deleteMetadataConfig.delayAfterWorkflowCompletion} ago remain to be deleted. Scheduling next poll in ${deleteMetadataConfig.backoffInterval}." + ) case Failure(error) => count(rowsDeletedMetricPath, 0L, ServicesPrefix) count(workflowsDeletedSuccessMetricPath, 0L, ServicesPrefix) @@ -75,26 +80,36 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, sendTiming(workflowDeleteTotalTimeMetricPath, calculateTimeSinceStart(), ServicesPrefix) log.error(error, s"Error while deleting, will wait ${deleteMetadataConfig.backoffInterval} then try again.") scheduleNextDeleteAttemptAfterInterval() - }) + } - } - case ShutdownCommand => context.stop(self) // TODO: cancel any deletion action that might be happening? - case other => log.info(s"Programmer Error! The DeleteMetadataSchedulerActor received unexpected message! (${sender()} sent ${other.toPrettyElidedString(1000)}})") + case ShutdownCommand => context.stop(self) // TODO: cancel any deletion action that might be happening? + case other => + log.info( + s"Programmer Error! The DeleteMetadataSchedulerActor received unexpected message! (${sender()} sent ${other.toPrettyElidedString(1000)}})" + ) } def workflowsLeftToDeleteMetric(): Unit = { - val currentTimestampMinusDelay = OffsetDateTime.now().minusSeconds(deleteMetadataConfig.delayAfterWorkflowCompletion.toSeconds) - countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(currentTimestampMinusDelay).onComplete({ + val currentTimestampMinusDelay = + OffsetDateTime.now().minusSeconds(deleteMetadataConfig.delayAfterWorkflowCompletion.toSeconds) + countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(currentTimestampMinusDelay).onComplete { case Success(workflowsToDelete) => sendGauge(workflowsToDeleteMetricPath, workflowsToDelete.longValue(), ServicesPrefix) // schedule next workflows left to delete query after interval - context.system.scheduler.scheduleOnce(deleteMetadataConfig.instrumentationInterval)(workflowsLeftToDeleteMetric()) + context.system.scheduler.scheduleOnce(deleteMetadataConfig.instrumentationInterval)( + workflowsLeftToDeleteMetric() + ) case Failure(exception) => - log.error(exception, s"Something went wrong while fetching number of workflows left to delete. " + - s"Scheduling next poll in ${deleteMetadataConfig.instrumentationInterval}.") + log.error( + exception, + s"Something went wrong while fetching number of workflows left to delete. " + + s"Scheduling next poll in ${deleteMetadataConfig.instrumentationInterval}." + ) // schedule next workflows left to delete query after interval - context.system.scheduler.scheduleOnce(deleteMetadataConfig.instrumentationInterval)(workflowsLeftToDeleteMetric()) - }) + context.system.scheduler.scheduleOnce(deleteMetadataConfig.instrumentationInterval)( + workflowsLeftToDeleteMetric() + ) + } } def deleteNextWorkflow(): Future[Boolean] = for { @@ -103,7 +118,10 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, case Some(id) => log.info(s"Workflow $id identified for metadata deletion") for { - rowsDeleted <- deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(id, MetadataArchiveStatus.toDatabaseValue(ArchivedAndDeleted)) + rowsDeleted <- deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus( + id, + MetadataArchiveStatus.toDatabaseValue(ArchivedAndDeleted) + ) _ = count(rowsDeletedMetricPath, rowsDeleted.longValue(), ServicesPrefix) _ = log.info(s"Deleted $rowsDeleted metadata rows for $id") } yield true @@ -112,7 +130,8 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, } yield result def lookupNextWorkflowToDelete(): Future[Option[WorkflowId]] = { - val currentTimestampMinusDelay = OffsetDateTime.now().minusSeconds(deleteMetadataConfig.delayAfterWorkflowCompletion.toSeconds) + val currentTimestampMinusDelay = + OffsetDateTime.now().minusSeconds(deleteMetadataConfig.delayAfterWorkflowCompletion.toSeconds) queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp( MetadataArchiveStatus.toDatabaseValue(Archived), currentTimestampMinusDelay, @@ -130,5 +149,7 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, object DeleteMetadataActor { case object DeleteNextWorkflowMessage - def props(config: DeleteMetadataConfig, serviceRegistryActor: ActorRef): Props = Props(new DeleteMetadataActor(config, serviceRegistryActor)) + def props(config: DeleteMetadataConfig, serviceRegistryActor: ActorRef): Props = Props( + new DeleteMetadataActor(config, serviceRegistryActor) + ) } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataConfig.scala b/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataConfig.scala index 0be2f77655a..6fa278eba0d 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataConfig.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataConfig.scala @@ -12,7 +12,8 @@ import scala.util.Try final case class DeleteMetadataConfig(backoffInterval: FiniteDuration, delayAfterWorkflowCompletion: FiniteDuration, instrumentationInterval: FiniteDuration, - debugLogging: Boolean) + debugLogging: Boolean +) object DeleteMetadataConfig { @@ -21,11 +22,14 @@ object DeleteMetadataConfig { val defaultInstrumentationInterval = 1 minute val defaultDebugLogging = true - for { - backoffInterval <- Try(deleteMetadataConfig.getOrElse[FiniteDuration]("backoff-interval", defaultBackoffInterval)).toChecked + backoffInterval <- Try( + deleteMetadataConfig.getOrElse[FiniteDuration]("backoff-interval", defaultBackoffInterval) + ).toChecked delayAfterWorkflowCompletion <- Try(deleteMetadataConfig.as[FiniteDuration]("deletion-delay")).toChecked - instrumentationInterval <- Try(deleteMetadataConfig.getOrElse("instrumentation-interval", defaultInstrumentationInterval)).toChecked + instrumentationInterval <- Try( + deleteMetadataConfig.getOrElse("instrumentation-interval", defaultInstrumentationInterval) + ).toChecked debugLogging <- Try(deleteMetadataConfig.getOrElse("debug-logging", defaultDebugLogging)).toChecked } yield DeleteMetadataConfig(backoffInterval, delayAfterWorkflowCompletion, instrumentationInterval, debugLogging) } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/hybridpubsub/HybridPubSubMetadataServiceActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/hybridpubsub/HybridPubSubMetadataServiceActor.scala index 20e62067cf8..89e5697cd60 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/hybridpubsub/HybridPubSubMetadataServiceActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/hybridpubsub/HybridPubSubMetadataServiceActor.scala @@ -6,7 +6,6 @@ import cromwell.services.metadata.MetadataService.{PutMetadataAction, PutMetadat import cromwell.services.metadata.impl.MetadataServiceActor import cromwell.services.metadata.impl.pubsub.PubSubMetadataServiceActor - /** * A metadata service implementation which will function as a standard metadata service but also push all metadata * events to google pubsub. @@ -19,9 +18,13 @@ import cromwell.services.metadata.impl.pubsub.PubSubMetadataServiceActor * It is expected that the user will supply any desired config fields for both MetadataServiceActor and * PubSubMetadataServiceActor as the serviceConfig block will be passed along to both of them. */ -class HybridPubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor with ActorLogging { - val standardMetadataActor: ActorRef = context.actorOf(MetadataServiceActor.props(serviceConfig, globalConfig, serviceRegistryActor)) - val pubSubMetadataActor: ActorRef = context.actorOf(PubSubMetadataServiceActor.props(serviceConfig, globalConfig, serviceRegistryActor)) +class HybridPubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) + extends Actor + with ActorLogging { + val standardMetadataActor: ActorRef = + context.actorOf(MetadataServiceActor.props(serviceConfig, globalConfig, serviceRegistryActor)) + val pubSubMetadataActor: ActorRef = + context.actorOf(PubSubMetadataServiceActor.props(serviceConfig, globalConfig, serviceRegistryActor)) override def receive = { case action: PutMetadataAction => diff --git a/services/src/main/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActor.scala index f139be5c27a..85fa1847301 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActor.scala @@ -11,7 +11,12 @@ import cromwell.cloudsupport.gcp.GoogleConfiguration import cromwell.cloudsupport.gcp.auth.ServiceAccountMode import cromwell.cloudsupport.gcp.auth.ServiceAccountMode.JsonFileFormat import cromwell.core.Dispatcher._ -import cromwell.services.metadata.MetadataService.{MetadataWriteFailure, MetadataWriteSuccess, PutMetadataAction, PutMetadataActionAndRespond} +import cromwell.services.metadata.MetadataService.{ + MetadataWriteFailure, + MetadataWriteSuccess, + PutMetadataAction, + PutMetadataActionAndRespond +} import cromwell.services.metadata._ import net.ceedubs.ficus.Ficus._ import org.broadinstitute.dsde.workbench.google.{GoogleCredentialModes, GooglePubSubDAO, HttpGooglePubSubDAO} @@ -40,7 +45,9 @@ import scala.util.{Failure, Success} * - Revisit the Alpakka connector instead of the workbench-lib version. There was a bug they fixed but hadn't released * it yet. Also I'm pretty sure that same bug was repeated in a few other places. */ -class PubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor with ActorLogging { +class PubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) + extends Actor + with ActorLogging { implicit val ec = context.dispatcher // The auth *must* be a service account auth but it might not be named service-account. @@ -74,14 +81,21 @@ class PubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, se // This class requires a service account auth due to the library used val googleAuth = googleConfig.auth(googleAuthName) match { case Valid(a: ServiceAccountMode) => a - case Valid(doh) => throw new IllegalArgumentException(s"Unable to configure PubSubMetadataServiceActor: ${doh.name} was not a service account auth") - case Invalid(e) => throw new IllegalArgumentException("Unable to configure PubSubMetadataServiceActor: " + e.toList.mkString(", ")) + case Valid(doh) => + throw new IllegalArgumentException( + s"Unable to configure PubSubMetadataServiceActor: ${doh.name} was not a service account auth" + ) + case Invalid(e) => + throw new IllegalArgumentException("Unable to configure PubSubMetadataServiceActor: " + e.toList.mkString(", ")) } val jsonAuth = googleAuth.fileFormat match { case j: JsonFileFormat => val jsonString = new String(Files.readAllBytes(Paths.get(j.file))) GoogleCredentialModes.Json(jsonString) - case _ => throw new IllegalArgumentException("Unable to configure PubSubMetadataServiceActor: the service account must supply a JSON file") + case _ => + throw new IllegalArgumentException( + "Unable to configure PubSubMetadataServiceActor: the service account must supply a JSON file" + ) } // The metric name is pretty useless. It's an artifact of the workbench libraries which we're not using @@ -103,7 +117,7 @@ class PubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, se * return type of HttpGooglePubSubDAO.createSubscription and only caring about the success of the Future. It is not * considered an error if the subscription already exists (in fact, this is a likely circumstance). */ - private def createSubscription(): Future[Unit] = { + private def createSubscription(): Future[Unit] = pubSubSubscriptionName match { case Some(name) => log.info("Creating subscription " + name) @@ -112,7 +126,6 @@ class PubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, se log.info("Not creating a subscription") Future.successful(()) } - } private def publishMessages(events: Iterable[MetadataEvent]): Future[Unit] = { import PubSubMetadataServiceActor.EnhancedMetadataEvents @@ -124,9 +137,9 @@ class PubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, se } object PubSubMetadataServiceActor { - def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = { - Props(new PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor)).withDispatcher(ServiceDispatcher) - } + def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = + Props(new PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor)) + .withDispatcher(ServiceDispatcher) implicit class EnhancedMetadataEvents(val e: Iterable[MetadataEvent]) extends AnyVal { import MetadataJsonSupport._ @@ -134,4 +147,3 @@ object PubSubMetadataServiceActor { def toJson: Seq[String] = e.map(_.toJson.toString()).toSeq } } - diff --git a/services/src/main/scala/cromwell/services/metadata/metadata.scala b/services/src/main/scala/cromwell/services/metadata/metadata.scala index 9e621cfad07..6860bb7c69b 100644 --- a/services/src/main/scala/cromwell/services/metadata/metadata.scala +++ b/services/src/main/scala/cromwell/services/metadata/metadata.scala @@ -35,5 +35,5 @@ object Patterns { \. # Literal dot. (\d+) # Captured shard digits. )? # End outer optional noncapturing group for shard. - """.trim.r // The trim is necessary as (?x) must be at the beginning of the regex. + """.trim.r // The trim is necessary as (?x) must be at the beginning of the regex. } diff --git a/services/src/main/scala/cromwell/services/metadata/package.scala b/services/src/main/scala/cromwell/services/metadata/package.scala index 6ccbbcefa7f..279a82e858d 100644 --- a/services/src/main/scala/cromwell/services/metadata/package.scala +++ b/services/src/main/scala/cromwell/services/metadata/package.scala @@ -11,17 +11,19 @@ package object metadata { } sealed trait MetadataJsonResponse extends MetadataServiceResponse { def originalRequest: BuildMetadataJsonAction } -final case class SuccessfulMetadataJsonResponse(originalRequest: BuildMetadataJsonAction, responseJson: JsObject) extends MetadataJsonResponse -final case class FailedMetadataJsonResponse(originalRequest: BuildMetadataJsonAction, reason: Throwable) extends MetadataJsonResponse +final case class SuccessfulMetadataJsonResponse(originalRequest: BuildMetadataJsonAction, responseJson: JsObject) + extends MetadataJsonResponse +final case class FailedMetadataJsonResponse(originalRequest: BuildMetadataJsonAction, reason: Throwable) + extends MetadataJsonResponse class MetadataTooLargeException(message: String) extends RuntimeException(message) with NoStackTrace final class MetadataTooLargeNumberOfRowsException(workflowId: WorkflowId, metadataSizeRows: Int, metadataLimitRows: Int) - extends MetadataTooLargeException( - s"Metadata for workflow $workflowId exists in database but cannot be served because row count of $metadataSizeRows exceeds configured limit of $metadataLimitRows." - ) + extends MetadataTooLargeException( + s"Metadata for workflow $workflowId exists in database but cannot be served because row count of $metadataSizeRows exceeds configured limit of $metadataLimitRows." + ) final class MetadataTooLargeTimeoutException(workflowId: WorkflowId) - extends MetadataTooLargeException( - s"Metadata for workflow $workflowId exists in database but retrieval timed out, possibly due to large row count." - ) + extends MetadataTooLargeException( + s"Metadata for workflow $workflowId exists in database but retrieval timed out, possibly due to large row count." + ) diff --git a/services/src/main/scala/cromwell/services/womtool/Describer.scala b/services/src/main/scala/cromwell/services/womtool/Describer.scala index fa0a2331c62..7b8116001da 100644 --- a/services/src/main/scala/cromwell/services/womtool/Describer.scala +++ b/services/src/main/scala/cromwell/services/womtool/Describer.scala @@ -46,7 +46,8 @@ object Describer { private def describeWorkflowInner(factory: LanguageFactory, workflowSource: WorkflowSource, importResolvers: List[ImportResolver.ImportResolver], - workflowSourceFilesCollection: WorkflowSourceFilesCollection): WorkflowDescription = { + workflowSourceFilesCollection: WorkflowSourceFilesCollection + ): WorkflowDescription = { val submittedDescriptorType = Map( "descriptorType" -> factory.languageName, @@ -56,7 +57,12 @@ object Describer { // Mirror of the inputs/no inputs fork in womtool.validate.Validate if (workflowSourceFilesCollection.inputsJson.isEmpty) { // No inputs: just load up the WomBundle - factory.getWomBundle(workflowSource, workflowSourceOrigin = None, workflowOptionsJson = "{}", importResolvers, List(factory)) match { + factory.getWomBundle(workflowSource, + workflowSourceOrigin = None, + workflowOptionsJson = "{}", + importResolvers, + List(factory) + ) match { case Right(bundle: WomBundle) => WorkflowDescription.fromBundle(bundle, submittedDescriptorType, List.empty) case Left(workflowErrors) => @@ -64,7 +70,12 @@ object Describer { } } else { // Inputs: load up the WomBundle and then try creating an executable with WomBundle + inputs - factory.getWomBundle(workflowSource, workflowSourceOrigin = None, workflowOptionsJson = "{}", importResolvers, List(factory)) match { + factory.getWomBundle(workflowSource, + workflowSourceOrigin = None, + workflowOptionsJson = "{}", + importResolvers, + List(factory) + ) match { case Right(bundle) => factory.createExecutable(bundle, workflowSourceFilesCollection.inputsJson, NoIoFunctionSet) match { // Throw away the executable, all we care about is whether it created successfully (i.e. the inputs are valid) diff --git a/services/src/main/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActor.scala b/services/src/main/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActor.scala index 35a950a6343..ac1c748281d 100644 --- a/services/src/main/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActor.scala +++ b/services/src/main/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActor.scala @@ -11,13 +11,14 @@ import cromwell.util.GracefulShutdownHelper.ShutdownCommand import scala.concurrent.{ExecutionContext, Future} -class WomtoolServiceInCromwellActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor with LazyLogging { +class WomtoolServiceInCromwellActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) + extends Actor + with LazyLogging { implicit val ec: ExecutionContext = context.dispatcher override def receive: Receive = { case DescribeRequest(filesCollection) => - // We are consciously wrapping a Future around the Await.result way down in the HTTP import resolver until we can update the whole call hierarchy to async // https://doc.akka.io/docs/akka/2.5.16/actors.html?language=scala#ask-send-and-receive-future Future { @@ -33,5 +34,6 @@ class WomtoolServiceInCromwellActor(serviceConfig: Config, globalConfig: Config, object WomtoolServiceInCromwellActor { def props(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) = - Props(new WomtoolServiceInCromwellActor(serviceConfig, globalConfig, serviceRegistryActor)).withDispatcher(ServiceDispatcher) + Props(new WomtoolServiceInCromwellActor(serviceConfig, globalConfig, serviceRegistryActor)) + .withDispatcher(ServiceDispatcher) } diff --git a/services/src/main/scala/cromwell/services/womtool/models/InputDescription.scala b/services/src/main/scala/cromwell/services/womtool/models/InputDescription.scala index cb01ab5c920..142bda234a3 100644 --- a/services/src/main/scala/cromwell/services/womtool/models/InputDescription.scala +++ b/services/src/main/scala/cromwell/services/womtool/models/InputDescription.scala @@ -6,5 +6,10 @@ import io.circe.generic.JsonCodec import wom.expression.WomExpression import wom.types.WomType -@JsonCodec(encodeOnly=true) -case class InputDescription(name: String, valueType: WomType, typeDisplayName: String, optional: Boolean, default: Option[WomExpression]) +@JsonCodec(encodeOnly = true) +case class InputDescription(name: String, + valueType: WomType, + typeDisplayName: String, + optional: Boolean, + default: Option[WomExpression] +) diff --git a/services/src/main/scala/cromwell/services/womtool/models/MetaValueElementJsonSupport.scala b/services/src/main/scala/cromwell/services/womtool/models/MetaValueElementJsonSupport.scala index 0855c9e131c..d29edc6b6b7 100644 --- a/services/src/main/scala/cromwell/services/womtool/models/MetaValueElementJsonSupport.scala +++ b/services/src/main/scala/cromwell/services/womtool/models/MetaValueElementJsonSupport.scala @@ -9,7 +9,7 @@ object MetaValueElementJsonSupport { // We only implement the encoder, because currently, the decoder is not required. implicit val metaValueElementEncoder: Encoder[MetaValueElement] = new Encoder[MetaValueElement] { - final def apply(a : MetaValueElement) : Json = { + final def apply(a: MetaValueElement): Json = a match { case MetaValueElementNull => Json.Null @@ -30,10 +30,9 @@ object MetaValueElementJsonSupport { Json.fromValues(vec.map(apply)) case MetaValueElementObject(m) => - Json.fromFields(m.map{ case (key, value) => - key -> apply(value) - }) + Json.fromFields(m.map { case (key, value) => + key -> apply(value) + }) } - } } } diff --git a/services/src/main/scala/cromwell/services/womtool/models/OutputDescription.scala b/services/src/main/scala/cromwell/services/womtool/models/OutputDescription.scala index 255efee4df8..fcf520c5a6d 100644 --- a/services/src/main/scala/cromwell/services/womtool/models/OutputDescription.scala +++ b/services/src/main/scala/cromwell/services/womtool/models/OutputDescription.scala @@ -4,5 +4,5 @@ import cromwell.services.womtool.models.WomTypeJsonSupport.womTypeEncoder // Int import io.circe.generic.JsonCodec import wom.types.WomType -@JsonCodec(encodeOnly=true) +@JsonCodec(encodeOnly = true) case class OutputDescription(name: String, valueType: WomType, typeDisplayName: String) diff --git a/services/src/main/scala/cromwell/services/womtool/models/WomTypeJsonSupport.scala b/services/src/main/scala/cromwell/services/womtool/models/WomTypeJsonSupport.scala index 2bf4211a3b9..b8816383188 100644 --- a/services/src/main/scala/cromwell/services/womtool/models/WomTypeJsonSupport.scala +++ b/services/src/main/scala/cromwell/services/womtool/models/WomTypeJsonSupport.scala @@ -8,26 +8,26 @@ object WomTypeJsonSupport { // We use `wom.types.WomType.callCachingName` instead of `wom.types.WomType.displayName` here because // the type hierarchy is designed for machine readability and should similarly be stable. implicit val womTypeEncoder: Encoder[WomType] = new Encoder[WomType] { - final def apply(a: WomType): Json = { + final def apply(a: WomType): Json = a match { case a: WomMapType => Json.obj( ("typeName", Json.fromString("Map")), ("mapType", - Json.obj( - ("keyType", womTypeEncoder.apply(a.keyType)), - ("valueType", womTypeEncoder.apply(a.valueType)) - ) + Json.obj( + ("keyType", womTypeEncoder.apply(a.keyType)), + ("valueType", womTypeEncoder.apply(a.valueType)) + ) ) ) case a: WomPairType => Json.obj( ("typeName", Json.fromString("Pair")), ("pairType", - Json.obj( - ("leftType", womTypeEncoder.apply(a.leftType)), - ("rightType", womTypeEncoder.apply(a.rightType)) - ) + Json.obj( + ("leftType", womTypeEncoder.apply(a.leftType)), + ("rightType", womTypeEncoder.apply(a.rightType)) + ) ) ) case a: WomArrayType => @@ -39,14 +39,16 @@ object WomTypeJsonSupport { case a: WomCompositeType => Json.obj( ("typeName", Json.fromString("Object")), - ("objectFieldTypes", Json.fromValues( - a.typeMap map { entry => - Json.obj( - ("fieldName", Json.fromString(entry._1)), - ("fieldType", womTypeEncoder.apply(entry._2)) - ) - } - )) + ("objectFieldTypes", + Json.fromValues( + a.typeMap map { entry => + Json.obj( + ("fieldName", Json.fromString(entry._1)), + ("fieldType", womTypeEncoder.apply(entry._2)) + ) + } + ) + ) ) case a: WomOptionalType => Json.obj( @@ -58,7 +60,6 @@ object WomTypeJsonSupport { ("typeName", Json.fromString(a.stableName)) ) } - } } } diff --git a/services/src/main/scala/cromwell/services/womtool/models/WorkflowDescription.scala b/services/src/main/scala/cromwell/services/womtool/models/WorkflowDescription.scala index bea99387388..44e19dd758b 100644 --- a/services/src/main/scala/cromwell/services/womtool/models/WorkflowDescription.scala +++ b/services/src/main/scala/cromwell/services/womtool/models/WorkflowDescription.scala @@ -9,31 +9,34 @@ import wom.executable.WomBundle import MetaValueElementJsonSupport._ -case class WorkflowDescription( valid: Boolean, - errors: List[String], - validWorkflow: Boolean, - name: String, - inputs: List[InputDescription], - outputs: List[OutputDescription], - images: List[String], - submittedDescriptorType: Map[String, String], - importedDescriptorTypes: List[Map[String, String]], - meta: Map[String, MetaValueElement], - parameterMeta: Map[String, MetaValueElement], - isRunnableWorkflow: Boolean) +case class WorkflowDescription(valid: Boolean, + errors: List[String], + validWorkflow: Boolean, + name: String, + inputs: List[InputDescription], + outputs: List[OutputDescription], + images: List[String], + submittedDescriptorType: Map[String, String], + importedDescriptorTypes: List[Map[String, String]], + meta: Map[String, MetaValueElement], + parameterMeta: Map[String, MetaValueElement], + isRunnableWorkflow: Boolean +) case object WorkflowDescription { - def withErrors(errors: List[String], submittedDescriptorType: Map[String, String]): WorkflowDescription = { + def withErrors(errors: List[String], submittedDescriptorType: Map[String, String]): WorkflowDescription = WorkflowDescription( valid = false, errors = errors, validWorkflow = false, submittedDescriptorType = submittedDescriptorType ) - } - def fromBundle(bundle: WomBundle, submittedDescriptorType: Map[String, String], inputErrors: List[String] = List.empty): WorkflowDescription = { + def fromBundle(bundle: WomBundle, + submittedDescriptorType: Map[String, String], + inputErrors: List[String] = List.empty + ): WorkflowDescription = { val images: List[String] = bundle.allCallables.values.toList flatMap { callable: Callable => callable match { @@ -51,31 +54,72 @@ case object WorkflowDescription { // There is a primary callable in the form of a workflow case (_, Some(primaryCallable: WorkflowDefinition)) => - fromBundleInner(inputErrors, primaryCallable.name, submittedDescriptorType, primaryCallable.inputs, primaryCallable.outputs, primaryCallable.meta, primaryCallable.parameterMeta, images, isRunnableWorkflow = true) + fromBundleInner( + inputErrors, + primaryCallable.name, + submittedDescriptorType, + primaryCallable.inputs, + primaryCallable.outputs, + primaryCallable.meta, + primaryCallable.parameterMeta, + images, + isRunnableWorkflow = true + ) // There is a primary callable in the form of a task case (_, Some(primaryCallable: CallableTaskDefinition)) => - fromBundleInner(inputErrors, primaryCallable.name, submittedDescriptorType, primaryCallable.inputs, primaryCallable.outputs, primaryCallable.meta, primaryCallable.parameterMeta, images, isRunnableWorkflow = false) + fromBundleInner( + inputErrors, + primaryCallable.name, + submittedDescriptorType, + primaryCallable.inputs, + primaryCallable.outputs, + primaryCallable.meta, + primaryCallable.parameterMeta, + images, + isRunnableWorkflow = false + ) // WDL draft-2: a solo task is not primary, but we should still use its name and IO case ((soloNonPrimaryTask: CallableTaskDefinition) :: Nil, None) => - fromBundleInner(inputErrors, soloNonPrimaryTask.name, submittedDescriptorType, soloNonPrimaryTask.inputs, soloNonPrimaryTask.outputs, soloNonPrimaryTask.meta, soloNonPrimaryTask.parameterMeta, images, isRunnableWorkflow = false) + fromBundleInner( + inputErrors, + soloNonPrimaryTask.name, + submittedDescriptorType, + soloNonPrimaryTask.inputs, + soloNonPrimaryTask.outputs, + soloNonPrimaryTask.meta, + soloNonPrimaryTask.parameterMeta, + images, + isRunnableWorkflow = false + ) // Multiple tasks case _ => - fromBundleInner(inputErrors, "", submittedDescriptorType, List.empty, List.empty, Map.empty, Map.empty, images, isRunnableWorkflow = false) + fromBundleInner(inputErrors, + "", + submittedDescriptorType, + List.empty, + List.empty, + Map.empty, + Map.empty, + images, + isRunnableWorkflow = false + ) } } private def fromBundleInner( - inputErrors: List[String], - name: String, submittedDescriptorType: Map[String, String], - inputs: List[InputDefinition], outputs: List[OutputDefinition], - meta: Map[String, MetaValueElement], - parameterMeta: Map[String, MetaValueElement], - images: List[String], - isRunnableWorkflow: Boolean - ): WorkflowDescription = { + inputErrors: List[String], + name: String, + submittedDescriptorType: Map[String, String], + inputs: List[InputDefinition], + outputs: List[OutputDefinition], + meta: Map[String, MetaValueElement], + parameterMeta: Map[String, MetaValueElement], + images: List[String], + isRunnableWorkflow: Boolean + ): WorkflowDescription = { val inputDescriptions = inputs.sortBy(_.name) map { input: InputDefinition => input match { case i: InputDefinitionWithDefault => @@ -133,21 +177,30 @@ case object WorkflowDescription { importedDescriptorTypes: List[Map[String, String]] = List.empty, meta: Map[String, MetaValueElement] = Map.empty, parameterMeta: Map[String, MetaValueElement] = Map.empty, - isRunnableWorkflow: Boolean = false): WorkflowDescription = { - new WorkflowDescription(valid, errors, validWorkflow, name, inputs, outputs, images, submittedDescriptorType, importedDescriptorTypes, meta, parameterMeta, isRunnableWorkflow) - } + isRunnableWorkflow: Boolean = false + ): WorkflowDescription = + new WorkflowDescription(valid, + errors, + validWorkflow, + name, + inputs, + outputs, + images, + submittedDescriptorType, + importedDescriptorTypes, + meta, + parameterMeta, + isRunnableWorkflow + ) implicit val workflowDescriptionEncoder: Encoder[WorkflowDescription] = deriveEncoder[WorkflowDescription] // We need this decoder to exist for `responseAs[WorkflowDescription]` to work in `cromwell.webservice.routes.WomtoolRouteSupportSpec` // That test only inspects some fields in the JSON, so this works adequately for now. - implicit val workflowDescriptionDecoder: Decoder[WorkflowDescription] = (c: HCursor) => { + implicit val workflowDescriptionDecoder: Decoder[WorkflowDescription] = (c: HCursor) => for { valid <- c.downField("valid").as[Boolean] errors <- c.downField("errors").as[List[String]] validWorkflow <- c.downField("validWorkflow").as[Boolean] - } yield { - WorkflowDescription(valid = valid, errors = errors, validWorkflow = validWorkflow) - } - } + } yield WorkflowDescription(valid = valid, errors = errors, validWorkflow = validWorkflow) } diff --git a/services/src/test/scala/cromwell/services/IoActorRequesterSpec.scala b/services/src/test/scala/cromwell/services/IoActorRequesterSpec.scala index d76e38c1fa0..93211d44666 100644 --- a/services/src/test/scala/cromwell/services/IoActorRequesterSpec.scala +++ b/services/src/test/scala/cromwell/services/IoActorRequesterSpec.scala @@ -21,7 +21,9 @@ class IoActorRequesterSpec extends TestKitSuite with ImplicitSender with AnyFlat it should "Allow actors to request and receive a reference to the IO Actor" in { val serviceRegistry = TestProbe() val ioActor = TestProbe() - val requester = TestActorRef[SimpleIoActorRequester](simpleIoActorRequesterProps(serviceRegistry.ref), "simple-io-requester-request-receive") + val requester = TestActorRef[SimpleIoActorRequester](simpleIoActorRequesterProps(serviceRegistry.ref), + "simple-io-requester-request-receive" + ) serviceRegistry.expectMsg(RequestIoActorRef) serviceRegistry.lastSender ! IoActorRef(ioActor.ref) @@ -36,7 +38,9 @@ class IoActorRequesterSpec extends TestKitSuite with ImplicitSender with AnyFlat it should "keep asking if the IoActor is not available immediately" in { val serviceRegistry = TestProbe() val ioActor = TestProbe() - val requester = TestActorRef[SimpleIoActorRequester](simpleIoActorRequesterProps(serviceRegistry.ref), "simple-io-requester-not-ready-resilience") + val requester = TestActorRef[SimpleIoActorRequester](simpleIoActorRequesterProps(serviceRegistry.ref), + "simple-io-requester-not-ready-resilience" + ) 10.times { serviceRegistry.expectMsg(RequestIoActorRef) @@ -55,14 +59,17 @@ class IoActorRequesterSpec extends TestKitSuite with ImplicitSender with AnyFlat it should "fail if the IoActorRequest receives an unexpected response" in { val serviceRegistry = TestProbe() - val requester = TestActorRef[SimpleIoActorRequester](simpleIoActorRequesterProps(serviceRegistry.ref), "simple-io-requester-not-ready-resilience") + val requester = TestActorRef[SimpleIoActorRequester](simpleIoActorRequesterProps(serviceRegistry.ref), + "simple-io-requester-not-ready-resilience" + ) serviceRegistry.expectMsg(RequestIoActorRef) serviceRegistry.lastSender ! "!!!Bad Response!!!" eventually { requester.underlyingActor.ioActor.value match { - case Some(Failure(exception)) => exception.getMessage should startWith("Programmer Error: Unexpected response to a RequestIoActor message") + case Some(Failure(exception)) => + exception.getMessage should startWith("Programmer Error: Unexpected response to a RequestIoActor message") case Some(Success(other)) => fail(s"Expected the IoActor ref to be empty, but got: $other") case None => fail("No IoActor received") } @@ -77,10 +84,12 @@ object IoActorRequesterSpec { val ioActor: Future[ActorRef] = requestIoActor(backoffInterval = 10.millis) - override def receive: Receive = { - case other => throw new RuntimeException(s"Didn't expect a message but got: $other") + override def receive: Receive = { case other => + throw new RuntimeException(s"Didn't expect a message but got: $other") } } - def simpleIoActorRequesterProps(serviceRegistryActor: ActorRef): Props = Props(new SimpleIoActorRequester(serviceRegistryActor)) + def simpleIoActorRequesterProps(serviceRegistryActor: ActorRef): Props = Props( + new SimpleIoActorRequester(serviceRegistryActor) + ) } diff --git a/services/src/test/scala/cromwell/services/NooPServiceActor.scala b/services/src/test/scala/cromwell/services/NooPServiceActor.scala index 649ea0a3ebe..dedfeeead73 100644 --- a/services/src/test/scala/cromwell/services/NooPServiceActor.scala +++ b/services/src/test/scala/cromwell/services/NooPServiceActor.scala @@ -5,7 +5,7 @@ import com.typesafe.config.Config import cromwell.util.GracefulShutdownHelper.ShutdownCommand class NooPServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends Actor { - override def receive = { - case ShutdownCommand => context stop self + override def receive = { case ShutdownCommand => + context stop self } } diff --git a/services/src/test/scala/cromwell/services/ServiceRegistryActorSpec.scala b/services/src/test/scala/cromwell/services/ServiceRegistryActorSpec.scala index ce639f89387..05d86ee680c 100644 --- a/services/src/test/scala/cromwell/services/ServiceRegistryActorSpec.scala +++ b/services/src/test/scala/cromwell/services/ServiceRegistryActorSpec.scala @@ -1,6 +1,5 @@ package cromwell.services - import java.util.UUID import akka.actor.SupervisorStrategy.Stop @@ -19,7 +18,6 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.language.postfixOps - abstract class EmptyActor extends Actor { override def receive: Receive = Actor.emptyBehavior } @@ -67,8 +65,10 @@ object ServiceRegistryActorSpec { | $ServiceNameKey { | class = "$ServiceClassKey" | } - """.stripMargin.replace(ServiceNameKey, serviceClass.serviceName) - .stripMargin.replace(ServiceClassKey, serviceClass.getCanonicalName) + """.stripMargin + .replace(ServiceNameKey, serviceClass.serviceName) + .stripMargin + .replace(ServiceClassKey, serviceClass.getCanonicalName) } val AwaitTimeout: FiniteDuration = 5 seconds @@ -97,18 +97,17 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma system.actorOf( props = Props(new EmptyActor { context.actorOf(ServiceRegistryActor.props(config), s"ServiceRegistryActor-$uuid") - override val supervisorStrategy: SupervisorStrategy = OneForOneStrategy() { - case f => parentProbe.ref ! f; Stop + override val supervisorStrategy: SupervisorStrategy = OneForOneStrategy() { case f => + parentProbe.ref ! f; Stop } }), - name = s"childActor-$uuid", + name = s"childActor-$uuid" ) parentProbe } - private def buildServiceRegistry(config: Config): ActorRef = { + private def buildServiceRegistry(config: Config): ActorRef = system.actorOf(ServiceRegistryActor.props(config), s"ServiceRegistryActor-${UUID.randomUUID()}") - } behavior of "ServiceRegistryActorSpec" @@ -121,10 +120,9 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma val configString = buildConfig(classOf[FooServiceActor]) val missingServices = configString.replace(" services ", " shmervices") val probe = buildProbeForInitializationException(ConfigFactory.parseString(missingServices)) - probe.expectMsgPF(AwaitTimeout) { - case e: ActorInitializationException => - e.getCause shouldBe a [ConfigException.Missing] - e.getCause.getMessage shouldBe "String: 1: No configuration setting found for key 'services'" + probe.expectMsgPF(AwaitTimeout) { case e: ActorInitializationException => + e.getCause shouldBe a[ConfigException.Missing] + e.getCause.getMessage shouldBe "String: 1: No configuration setting found for key 'services'" } } @@ -132,24 +130,24 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma val configString = buildConfig(classOf[FooServiceActor]) val missingService = configString.replace("FooServiceActor", "FooWhoServiceActor") val probe = buildProbeForInitializationException(ConfigFactory.parseString(missingService)) - probe.expectMsgPF(AwaitTimeout) { - case e: ActorInitializationException => - // The class not found exception is wrapped in a Runtime Exception giving the name of the faulty service - val cause = e.getCause - cause shouldBe a [RuntimeException] - val classNotFound = cause.getCause - classNotFound shouldBe a [ClassNotFoundException] - classNotFound.getMessage shouldBe "cromwell.services.FooWhoServiceActor" + probe.expectMsgPF(AwaitTimeout) { case e: ActorInitializationException => + // The class not found exception is wrapped in a Runtime Exception giving the name of the faulty service + val cause = e.getCause + cause shouldBe a[RuntimeException] + val classNotFound = cause.getCause + classNotFound shouldBe a[ClassNotFoundException] + classNotFound.getMessage shouldBe "cromwell.services.FooWhoServiceActor" } } it should "die during construction if a service class lacks a proper constructor" in { val configString = buildConfig(classOf[NoAppropriateConstructorServiceActor]) val probe = buildProbeForInitializationException(ConfigFactory.parseString(configString)) - probe.expectMsgPF(AwaitTimeout) { - case e: ActorInitializationException => - e.getCause shouldBe an [IllegalArgumentException] - e.getCause.getMessage should include("no matching constructor found on class cromwell.services.NoAppropriateConstructorServiceActor") + probe.expectMsgPF(AwaitTimeout) { case e: ActorInitializationException => + e.getCause shouldBe an[IllegalArgumentException] + e.getCause.getMessage should include( + "no matching constructor found on class cromwell.services.NoAppropriateConstructorServiceActor" + ) } } @@ -157,10 +155,9 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma val configString = buildConfig(classOf[FooServiceActor]) val missingService = configString.replace("class = \"cromwell.services.FooServiceActor\"", "") val probe = buildProbeForInitializationException(ConfigFactory.parseString(missingService)) - probe.expectMsgPF(AwaitTimeout) { - case e: ActorInitializationException => - e.getCause shouldBe an [IllegalArgumentException] - e.getCause.getMessage shouldBe "Invalid configuration for service Foo: missing 'class' definition" + probe.expectMsgPF(AwaitTimeout) { case e: ActorInitializationException => + e.getCause shouldBe an[IllegalArgumentException] + e.getCause.getMessage shouldBe "Invalid configuration for service Foo: missing 'class' definition" } } @@ -171,9 +168,8 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma service.tell("This is a String, not an appropriate ServiceRegistryActor message", probe.ref) - probe.expectMsgPF(AwaitTimeout) { - case e: ServiceRegistryFailure => - e.serviceName should include("Message is not a ServiceRegistryMessage:") + probe.expectMsgPF(AwaitTimeout) { case e: ServiceRegistryFailure => + e.serviceName should include("Message is not a ServiceRegistryMessage:") } } @@ -185,9 +181,8 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma service.tell(ArbitraryBarMessage, probe.ref) - probe.expectMsgPF(AwaitTimeout) { - case e: ServiceRegistryFailure => - e.serviceName shouldBe "Bar" + probe.expectMsgPF(AwaitTimeout) { case e: ServiceRegistryFailure => + e.serviceName shouldBe "Bar" } } @@ -204,7 +199,7 @@ class ServiceRegistryActorSpec extends TestKitSuite with AnyFlatSpecLike with Ma case x => fail("unexpected message: " + x) } } - + it should "unpack ListenTo messages" in { val snd = TestProbe("snd").ref val configString = buildConfig(classOf[BarServiceActor]) diff --git a/services/src/test/scala/cromwell/services/ServicesSpec.scala b/services/src/test/scala/cromwell/services/ServicesSpec.scala index 364d2e26bf6..ef7423ab34d 100644 --- a/services/src/test/scala/cromwell/services/ServicesSpec.scala +++ b/services/src/test/scala/cromwell/services/ServicesSpec.scala @@ -70,8 +70,7 @@ object ServicesSpec { val config: Config = ConfigFactory.parseString(ServicesSpec.configString) } -abstract class ServicesSpec extends TestKitSuite - with Matchers with AnyWordSpecLike with ScalaFutures { +abstract class ServicesSpec extends TestKitSuite with Matchers with AnyWordSpecLike with ScalaFutures { override protected lazy val actorSystemConfig: Config = ServicesSpec.config implicit val timeout: Timeout = Timeout(60.seconds.dilated) diff --git a/services/src/test/scala/cromwell/services/database/ConnectionMetadata.scala b/services/src/test/scala/cromwell/services/database/ConnectionMetadata.scala index 0ba8478ad4a..5f788ba2f9b 100644 --- a/services/src/test/scala/cromwell/services/database/ConnectionMetadata.scala +++ b/services/src/test/scala/cromwell/services/database/ConnectionMetadata.scala @@ -3,8 +3,7 @@ package cromwell.services.database /** * Metadata from the JDBC connection and driver. */ -case class ConnectionMetadata -( +case class ConnectionMetadata( databaseProductName: String, databaseProductVersion: String, databaseMajorVersion: Int, @@ -14,5 +13,5 @@ case class ConnectionMetadata driverMajorVersion: Int, driverMinorVersion: Int, jdbcMajorVersion: Int, - jdbcMinorVersion: Int, + jdbcMinorVersion: Int ) diff --git a/services/src/test/scala/cromwell/services/database/CromwellDatabaseType.scala b/services/src/test/scala/cromwell/services/database/CromwellDatabaseType.scala index 8a8ede69e19..97d1851c79a 100644 --- a/services/src/test/scala/cromwell/services/database/CromwellDatabaseType.scala +++ b/services/src/test/scala/cromwell/services/database/CromwellDatabaseType.scala @@ -20,7 +20,7 @@ sealed trait CromwellDatabaseType[T <: SlickDatabase] { object CromwellDatabaseType { val All: Seq[CromwellDatabaseType[_ <: SlickDatabase]] = List( EngineDatabaseType, - MetadataDatabaseType, + MetadataDatabaseType ) } @@ -28,16 +28,14 @@ object EngineDatabaseType extends CromwellDatabaseType[EngineSlickDatabase] { override val name: String = "Engine" override val liquibaseSettings: LiquibaseSettings = EngineServicesStore.EngineLiquibaseSettings - override def newDatabase(config: Config): EngineSlickDatabase with TestSlickDatabase = { + override def newDatabase(config: Config): EngineSlickDatabase with TestSlickDatabase = new EngineSlickDatabase(config) with TestSlickDatabase - } } object MetadataDatabaseType extends CromwellDatabaseType[MetadataSlickDatabase] { override val name: String = "Metadata" override val liquibaseSettings: LiquibaseSettings = MetadataServicesStore.MetadataLiquibaseSettings - override def newDatabase(config: Config): MetadataSlickDatabase with TestSlickDatabase = { + override def newDatabase(config: Config): MetadataSlickDatabase with TestSlickDatabase = new MetadataSlickDatabase(config) with TestSlickDatabase - } } diff --git a/services/src/test/scala/cromwell/services/database/DatabaseSystem.scala b/services/src/test/scala/cromwell/services/database/DatabaseSystem.scala index 8add5869502..490c7823c20 100644 --- a/services/src/test/scala/cromwell/services/database/DatabaseSystem.scala +++ b/services/src/test/scala/cromwell/services/database/DatabaseSystem.scala @@ -19,7 +19,7 @@ object DatabaseSystem { MysqlEarliestDatabaseSystem, MysqlLatestDatabaseSystem, PostgresqlEarliestDatabaseSystem, - PostgresqlLatestDatabaseSystem, + PostgresqlLatestDatabaseSystem ) } diff --git a/services/src/test/scala/cromwell/services/database/DatabaseTestKit.scala b/services/src/test/scala/cromwell/services/database/DatabaseTestKit.scala index 651248b70cd..5ca41bc15bc 100644 --- a/services/src/test/scala/cromwell/services/database/DatabaseTestKit.scala +++ b/services/src/test/scala/cromwell/services/database/DatabaseTestKit.scala @@ -33,15 +33,15 @@ object DatabaseTestKit extends StrictLogging { * @tparam A The return type of the block. * @return The return value of the block. */ - def withConnection[Profile <: JdbcProfile, A](profile: Profile, database: Profile#Backend#Database) - (block: Connection => A): A = { + def withConnection[Profile <: JdbcProfile, A](profile: Profile, database: Profile#Backend#Database)( + block: Connection => A + ): A = /* TODO: Should this withConnection() method have a (implicit?) timeout parameter, that it passes on to Await.result? If we run completely asynchronously, nest calls to withConnection, and then call flatMap, the outer connection may already be closed before an inner block finishes running. */ Await.result(database.run(profile.api.SimpleDBIO(context => block(context.connection))), Duration.Inf) - } /** * Lends two connections to a block of code. @@ -56,15 +56,16 @@ object DatabaseTestKit extends StrictLogging { * @tparam A The return type of the block. * @return The return value of the block. */ - def withConnections[Profile1 <: JdbcProfile, Profile2 <: JdbcProfile, A] - (profile1: Profile1, database1: Profile1#Backend#Database, profile2: Profile2, database2: Profile2#Backend#Database) - (block: (Connection, Connection) => A): A = { + def withConnections[Profile1 <: JdbcProfile, Profile2 <: JdbcProfile, A](profile1: Profile1, + database1: Profile1#Backend#Database, + profile2: Profile2, + database2: Profile2#Backend#Database + )(block: (Connection, Connection) => A): A = withConnection(profile1, database1) { connection1 => withConnection(profile2, database2) { connection2 => block(connection1, connection2) } } - } /** * Creates a new in memory HSQLDB that should be closed after use. @@ -83,7 +84,8 @@ object DatabaseTestKit extends StrictLogging { * Opens an initialized database. */ def initializedDatabaseFromConfig[A <: SlickDatabase](databaseType: CromwellDatabaseType[A], - databaseConfig: Config): A with TestSlickDatabase = { + databaseConfig: Config + ): A with TestSlickDatabase = { val database = databaseType.newDatabase(databaseConfig) database.initialized(databaseType.liquibaseSettings) } @@ -92,21 +94,23 @@ object DatabaseTestKit extends StrictLogging { * Opens an initialized database. */ def initializedDatabaseFromSystem[A <: SlickDatabase](databaseType: CromwellDatabaseType[A], - databaseSystem: DatabaseSystem): A with TestSlickDatabase = { + databaseSystem: DatabaseSystem + ): A with TestSlickDatabase = initializeDatabaseByContainerOptTypeAndSystem(None, databaseType, databaseSystem) - } /** * Opens a database connection without any liquibase being performed. */ - def schemalessDatabaseFromContainerOptAndSystem(containerOpt: Option[Container], databaseSystem: DatabaseSystem): SchemalessSlickDatabase with TestSlickDatabase = { + def schemalessDatabaseFromContainerOptAndSystem(containerOpt: Option[Container], + databaseSystem: DatabaseSystem + ): SchemalessSlickDatabase with TestSlickDatabase = containerOpt match { case None => new SchemalessSlickDatabase(getConfig(databaseSystem, dbContainerOpt = None)) with TestSlickDatabase case Some(cont) if cont.isInstanceOf[JdbcDatabaseContainer] => - new SchemalessSlickDatabase(getConfig(databaseSystem, Option(cont.asInstanceOf[JdbcDatabaseContainer]))) with TestSlickDatabase + new SchemalessSlickDatabase(getConfig(databaseSystem, Option(cont.asInstanceOf[JdbcDatabaseContainer]))) + with TestSlickDatabase case Some(_) => throw new RuntimeException("ERROR: container is not a JdbcDatabaseContainer.") } - } def getDatabaseTestContainer(databaseSystem: DatabaseSystem): Option[Container] = { val containerOption: Option[SingleContainer[_ <: JavaJdbcDatabaseContainer[_]]] = databaseSystem match { @@ -114,24 +118,32 @@ object DatabaseTestKit extends StrictLogging { case networkDbSystem: NetworkDatabaseSystem => networkDbSystem.platform match { case MariadbDatabasePlatform => - Option(MariaDBContainer( - dockerImageName = DockerImageName.parse(s"mariadb:${networkDbSystem.dockerImageVersion}"), - dbName = "cromwell_test", - dbUsername = "cromwell", - dbPassword = "test" - )) + Option( + MariaDBContainer( + dockerImageName = DockerImageName.parse(s"mariadb:${networkDbSystem.dockerImageVersion}"), + dbName = "cromwell_test", + dbUsername = "cromwell", + dbPassword = "test" + ) + ) case MysqlDatabasePlatform => - Option(MySQLContainer( - mysqlImageVersion = DockerImageName.parse(s"mysql:${networkDbSystem.dockerImageVersion}"), - databaseName = "cromwell_test", - username = "cromwell", - password = "test")) + Option( + MySQLContainer( + mysqlImageVersion = DockerImageName.parse(s"mysql:${networkDbSystem.dockerImageVersion}"), + databaseName = "cromwell_test", + username = "cromwell", + password = "test" + ) + ) case PostgresqlDatabasePlatform => - Option(PostgreSQLContainer( - dockerImageNameOverride = DockerImageName.parse(s"postgres:${networkDbSystem.dockerImageVersion}"), - databaseName = "cromwell_test", - username = "cromwell", - password = "test")) + Option( + PostgreSQLContainer( + dockerImageNameOverride = DockerImageName.parse(s"postgres:${networkDbSystem.dockerImageVersion}"), + databaseName = "cromwell_test", + username = "cromwell", + password = "test" + ) + ) case _ => None } } @@ -145,22 +157,28 @@ object DatabaseTestKit extends StrictLogging { def initializeDatabaseByContainerOptTypeAndSystem[A <: SlickDatabase](containerOpt: Option[Container], databaseType: CromwellDatabaseType[A], - databaseSystem: DatabaseSystem): A with TestSlickDatabase = { + databaseSystem: DatabaseSystem + ): A with TestSlickDatabase = containerOpt match { case None => initializedDatabaseFromConfig(databaseType, getConfig(databaseSystem, None)) case Some(cont) if cont.isInstanceOf[JdbcDatabaseContainer] => - initializedDatabaseFromConfig(databaseType, getConfig(databaseSystem, Option(cont.asInstanceOf[JdbcDatabaseContainer]))) + initializedDatabaseFromConfig(databaseType, + getConfig(databaseSystem, Option(cont.asInstanceOf[JdbcDatabaseContainer])) + ) case Some(_) => throw new RuntimeException("ERROR: container is not a JdbcDatabaseContainer.") } - } - def getConfig(databaseSystem: DatabaseSystem, dbContainerOpt: Option[JdbcDatabaseContainer] = None): Config = { + def getConfig(databaseSystem: DatabaseSystem, dbContainerOpt: Option[JdbcDatabaseContainer] = None): Config = dbContainerOpt match { case None if databaseSystem == HsqldbDatabaseSystem => hsqldbDatabaseConfig - case None => throw new RuntimeException("ERROR: dbContainer option must be passed into `getConfig` method for all databases except HSQLDB.") + case None => + throw new RuntimeException( + "ERROR: dbContainer option must be passed into `getConfig` method for all databases except HSQLDB." + ) case Some(dbContainer) => val (slickProfile, jdbcDriver) = databaseSystem.platform match { - case HsqldbDatabasePlatform => throw new RuntimeException("ERROR: dbContainer option cannot be defined for HSQLDB database.") + case HsqldbDatabasePlatform => + throw new RuntimeException("ERROR: dbContainer option cannot be defined for HSQLDB database.") case MariadbDatabasePlatform => ("slick.jdbc.MySQLProfile$", "org.mariadb.jdbc.Driver") case MysqlDatabasePlatform => ("slick.jdbc.MySQLProfile$", "com.mysql.cj.jdbc.Driver") case PostgresqlDatabasePlatform => ("slick.jdbc.PostgresProfile$", "org.postgresql.Driver") @@ -177,7 +195,6 @@ object DatabaseTestKit extends StrictLogging { |""".stripMargin ) } - } /** * Run liquibase on a open database. @@ -193,68 +210,61 @@ object DatabaseTestKit extends StrictLogging { /** * Returns a Liquibase snapshot of an open Slick database. */ - def liquibaseSnapshot(database: SlickDatabase): DatabaseSnapshot = { + def liquibaseSnapshot(database: SlickDatabase): DatabaseSnapshot = withConnection(database.dataAccess.driver, database.database)(LiquibaseUtils.getSnapshot) - } /** * Returns the database connection metadata for an open Slick database. */ - def connectionMetadata(database: SlickDatabase): ConnectionMetadata = { - withConnection(database.dataAccess.driver, database.database) { - connection => - val metadata = connection.getMetaData - ConnectionMetadata( - databaseProductName = metadata.getDatabaseProductName, - databaseProductVersion = metadata.getDatabaseProductVersion, - databaseMajorVersion = metadata.getDatabaseMajorVersion, - databaseMinorVersion = metadata.getDatabaseMinorVersion, - driverName = metadata.getDriverName, - driverVersion = metadata.getDriverVersion, - driverMajorVersion = metadata.getDriverMajorVersion, - driverMinorVersion = metadata.getDriverMinorVersion, - jdbcMajorVersion = metadata.getJDBCMajorVersion, - jdbcMinorVersion = metadata.getJDBCMinorVersion, - ) + def connectionMetadata(database: SlickDatabase): ConnectionMetadata = + withConnection(database.dataAccess.driver, database.database) { connection => + val metadata = connection.getMetaData + ConnectionMetadata( + databaseProductName = metadata.getDatabaseProductName, + databaseProductVersion = metadata.getDatabaseProductVersion, + databaseMajorVersion = metadata.getDatabaseMajorVersion, + databaseMinorVersion = metadata.getDatabaseMinorVersion, + driverName = metadata.getDriverName, + driverVersion = metadata.getDriverVersion, + driverMajorVersion = metadata.getDriverMajorVersion, + driverMinorVersion = metadata.getDriverMinorVersion, + jdbcMajorVersion = metadata.getJDBCMajorVersion, + jdbcMinorVersion = metadata.getJDBCMinorVersion + ) } - } /** * Returns a Liquibase snapshot of an in memory HSQLDB. */ def inMemorySnapshot[A <: SlickDatabase](databaseType: CromwellDatabaseType[A], - schemaManager: SchemaManager): DatabaseSnapshot = { + schemaManager: SchemaManager + ): DatabaseSnapshot = { var snapshot: DatabaseSnapshot = null for { database <- inMemoryDatabase(databaseType, schemaManager).autoClosed - } { - snapshot = liquibaseSnapshot(database) } + snapshot = liquibaseSnapshot(database) snapshot } /** * Returns true if the primary key was auto generated by the database. */ - def isGenerated(primaryKey: MPrimaryKey): Boolean = { + def isGenerated(primaryKey: MPrimaryKey): Boolean = isGenerated(primaryKey.pkName.get) - } /** * Returns true if the index was auto generated by the database. */ - def isGenerated(index: MIndexInfo): Boolean = { + def isGenerated(index: MIndexInfo): Boolean = isGenerated(index.indexName.get) - } /** * Returns true if the index was auto generated by the database. */ - def isGenerated(index: Index): Boolean = { + def isGenerated(index: Index): Boolean = isGenerated(index.getName) - } - private def isGenerated(name: String): Boolean = { + private def isGenerated(name: String): Boolean = name.startsWith("SYS_") - } } diff --git a/services/src/test/scala/cromwell/services/database/HsqldbTransactionIsolationSpec.scala b/services/src/test/scala/cromwell/services/database/HsqldbTransactionIsolationSpec.scala index affca45d5b4..1bd8b0312f3 100644 --- a/services/src/test/scala/cromwell/services/database/HsqldbTransactionIsolationSpec.scala +++ b/services/src/test/scala/cromwell/services/database/HsqldbTransactionIsolationSpec.scala @@ -14,7 +14,12 @@ import org.scalatest.matchers.should.Matchers * http://www.hsqldb.org/doc/guide/sessions-chapt.html#snc_tx_mvcc * https://en.wikipedia.org/wiki/Multiversion_concurrency_control */ -class HsqldbTransactionIsolationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with StringNormalizations { +class HsqldbTransactionIsolationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalaFutures + with StringNormalizations { CromwellDatabaseType.All foreach { databaseType => behavior of s"HSQLDB transaction isolation for ${databaseType.name}" @@ -24,7 +29,7 @@ class HsqldbTransactionIsolationSpec extends AnyFlatSpec with CromwellTimeoutSpe slickDatabase <- DatabaseTestKit.initializedDatabaseFromSystem(databaseType, HsqldbDatabaseSystem).autoClosed } { import slickDatabase.dataAccess.driver.api._ - //noinspection SqlDialectInspection + // noinspection SqlDialectInspection val getHsqldbTx = sql"""SELECT PROPERTY_VALUE FROM INFORMATION_SCHEMA.SYSTEM_PROPERTIES @@ -32,7 +37,7 @@ class HsqldbTransactionIsolationSpec extends AnyFlatSpec with CromwellTimeoutSpe """.as[String].head val future = slickDatabase.database.run(getHsqldbTx) - (future.futureValue shouldEqual "mvcc") (after being lowerCased) + (future.futureValue shouldEqual "mvcc")(after being lowerCased) } } } diff --git a/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala b/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala index 3943fb65423..0db3e34bff9 100644 --- a/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala +++ b/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala @@ -32,7 +32,7 @@ class LiquibaseChangeSetSpec extends AnyFlatSpec with CromwellTimeoutSpec with M withClue("the dbms attribute must explicitly list supported databases:") { changeSet.getDbmsSet shouldNot be(null) changeSet.getDbmsSet shouldNot be(empty) - changeSet.getDbmsSet shouldNot contain atLeastOneOf("none", "all") + changeSet.getDbmsSet shouldNot contain atLeastOneOf ("none", "all") } changeSet.getDbmsSet.asScala foreach { dbms => withClue("do not use dbms excludes:") { @@ -44,7 +44,7 @@ class LiquibaseChangeSetSpec extends AnyFlatSpec with CromwellTimeoutSpec with M if (dbmsSet.contains("postgresql")) { it should s"check object quoting in $description" in { withClue("""databaseChangeLog objectQuotingStrategy="QUOTE_ALL_OBJECTS" must be set for PostgreSQL:""") { - changeSet.getObjectQuotingStrategy should be (ObjectQuotingStrategy.QUOTE_ALL_OBJECTS) + changeSet.getObjectQuotingStrategy should be(ObjectQuotingStrategy.QUOTE_ALL_OBJECTS) } } } diff --git a/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala b/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala index 255237b396f..c0cbd5383a0 100644 --- a/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala +++ b/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala @@ -31,7 +31,6 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with PatienceConfig(timeout = scaled(5.seconds), interval = scaled(100.millis)) CromwellDatabaseType.All foreach { databaseType => - lazy val expectedSnapshot = DatabaseTestKit.inMemorySnapshot(databaseType, SlickSchemaManager) lazy val expectedColumns = get[Column](expectedSnapshot).sorted lazy val expectedPrimaryKeys = get[PrimaryKey](expectedSnapshot).sorted @@ -40,12 +39,12 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with lazy val expectedIndexes = get[Index](expectedSnapshot) filterNot DatabaseTestKit.isGenerated DatabaseSystem.All foreach { databaseSystem => - behavior of s"Liquibase Comparison for ${databaseType.name} ${databaseSystem.name}" val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val liquibasedDatabase = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, databaseType, databaseSystem) + lazy val liquibasedDatabase = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, databaseType, databaseSystem) lazy val connectionMetadata = DatabaseTestKit.connectionMetadata(liquibasedDatabase) @@ -59,7 +58,7 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with lazy val columnMapping = getColumnMapping(databaseSystem) it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } expectedColumns foreach { expectedColumn => @@ -71,9 +70,11 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with } val actualColumn = actualColumnOption getOrElse fail(s"Did not find $description") - withClue(s"for type " + - s"${actualColumn.getType.getTypeName}(default = ${actualColumn.getDefaultValue}) vs. " + - s"${expectedColumn.getType.getTypeName}(default = ${expectedColumn.getDefaultValue}):") { + withClue( + s"for type " + + s"${actualColumn.getType.getTypeName}(default = ${actualColumn.getDefaultValue}) vs. " + + s"${expectedColumn.getType.getTypeName}(default = ${expectedColumn.getDefaultValue}):" + ) { val actualColumnType = ColumnType.from(actualColumn) @@ -98,7 +99,7 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with None, Option(DefaultNullBoolean), Option(DefaultNullString), - Option(DefaultNullFunction), + Option(DefaultNullFunction) ) List(Option(actualColumn.getDefaultValue)) should contain atLeastOneElementOf expectedOptions } else { @@ -129,7 +130,7 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with val expectedSequenceTypeOption = sequenceTypeValidationOption( expectedColumn, databaseSystem, - connectionMetadata, + connectionMetadata ) expectedSequenceTypeOption foreach { expectedSequenceType => val dbio = sequenceTypeDbio(expectedColumn, databaseSystem, liquibasedDatabase) @@ -225,14 +226,14 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } } } object LiquibaseComparisonSpec { - private def get[T <: DatabaseObject : ClassTag : Ordering](databaseSnapshot: DatabaseSnapshot): Seq[T] = { + private def get[T <: DatabaseObject: ClassTag: Ordering](databaseSnapshot: DatabaseSnapshot): Seq[T] = { val databaseObjectClass = classTag[T].runtimeClass.asInstanceOf[Class[T]] databaseSnapshot.get(databaseObjectClass).asScala.toSeq } @@ -241,49 +242,42 @@ object LiquibaseComparisonSpec { private val DefaultNullString = "NULL" private val DefaultNullFunction = new DatabaseFunction(DefaultNullString) - private def isSlickDefaultNull(column: Column): Boolean = { + private def isSlickDefaultNull(column: Column): Boolean = Option(column.getDefaultValue).isEmpty || column.getDefaultValue == DefaultNullFunction - } case class ColumnDescription(tableName: String, columnName: String) object ColumnDescription { - def from(column: Column): ColumnDescription = { + def from(column: Column): ColumnDescription = ColumnDescription(column.getRelation.getName, column.getName) - } } - case class ColumnType - ( + case class ColumnType( typeName: String, - sizeOption: Option[Int] = None, + sizeOption: Option[Int] = None ) object ColumnType { - def from(column: Column): ColumnType = { + def from(column: Column): ColumnType = ColumnType( column.getType.getTypeName.toUpperCase, - Option(column.getType.getColumnSize).map(_.toInt), + Option(column.getType.getColumnSize).map(_.toInt) ) - } } - case class ColumnDefault - ( + case class ColumnDefault( columnType: ColumnType, - defaultValue: AnyRef, + defaultValue: AnyRef ) object ColumnDefault { - def from(column: Column): ColumnDefault = { + def from(column: Column): ColumnDefault = ColumnDefault(ColumnType.from(column), column.getDefaultValue) - } } - case class ColumnMapping - ( + case class ColumnMapping( typeMapping: PartialFunction[ColumnType, ColumnType] = PartialFunction.empty, - defaultMapping: Map[ColumnDefault, AnyRef] = Map.empty, + defaultMapping: Map[ColumnDefault, AnyRef] = Map.empty ) /** Generate the expected PostgreSQL sequence name for a column. */ @@ -294,7 +288,7 @@ object LiquibaseComparisonSpec { // Postgres cuts of the length of names around this length val Count = 30 - def shorten(name: String, isColumn: Boolean): String = { + def shorten(name: String, isColumn: Boolean): String = pad { // NOTE: Table and column name truncation seems slightly different. // This logic was empirically derived. Feel free to modify/simplify! @@ -306,7 +300,6 @@ object LiquibaseComparisonSpec { name.take(Count - 1) } } - } val tableName = shorten(column.getRelation.getName, isColumn = false) val columnName = shorten(column.getName, isColumn = true) @@ -330,19 +323,19 @@ object LiquibaseComparisonSpec { // Note: BIT vs. TINYINT may be yet another tabs vs. spaces // https://stackoverflow.com/questions/11167793/boolean-or-tinyint-confusion/17298805 private val MysqldbColumnMapping = - ColumnMapping( - typeMapping = Map( - HsqldbTypeBigInt -> ColumnType("BIGINT", Option(19)), - HsqldbTypeBlob -> ColumnType("LONGBLOB", Option(2147483647)), - HsqldbTypeBoolean -> ColumnType("TINYINT", Option(3)), - HsqldbTypeClob -> ColumnType("LONGTEXT", Option(2147483647)), - HsqldbTypeInteger -> ColumnType("INT", Option(10)), - HsqldbTypeTimestamp -> ColumnType("DATETIME"), - ), - defaultMapping = Map( - HsqldbDefaultBooleanTrue -> Int.box(1) - ), - ) + ColumnMapping( + typeMapping = Map( + HsqldbTypeBigInt -> ColumnType("BIGINT", Option(19)), + HsqldbTypeBlob -> ColumnType("LONGBLOB", Option(2147483647)), + HsqldbTypeBoolean -> ColumnType("TINYINT", Option(3)), + HsqldbTypeClob -> ColumnType("LONGTEXT", Option(2147483647)), + HsqldbTypeInteger -> ColumnType("INT", Option(10)), + HsqldbTypeTimestamp -> ColumnType("DATETIME") + ), + defaultMapping = Map( + HsqldbDefaultBooleanTrue -> Int.box(1) + ) + ) // MariaDB should behave similar to MySQL except that only LOBs have sizes private val MariadbColumnMapping = @@ -353,11 +346,11 @@ object LiquibaseComparisonSpec { HsqldbTypeBoolean -> ColumnType("TINYINT"), HsqldbTypeClob -> ColumnType("LONGTEXT", Option(2147483647)), HsqldbTypeInteger -> ColumnType("INT"), - HsqldbTypeTimestamp -> ColumnType("DATETIME"), + HsqldbTypeTimestamp -> ColumnType("DATETIME") ), defaultMapping = Map( - HsqldbDefaultBooleanTrue -> Int.box(1), - ), + HsqldbDefaultBooleanTrue -> Int.box(1) + ) ) private val PostgresqlColumnMapping = @@ -367,21 +360,20 @@ object LiquibaseComparisonSpec { HsqldbTypeBlob -> ColumnType("OID", None), HsqldbTypeBoolean -> ColumnType("BOOL", None), HsqldbTypeClob -> ColumnType("TEXT", None), - HsqldbTypeInteger -> ColumnType("INT4", None), - ), + HsqldbTypeInteger -> ColumnType("INT4", None) + ) ) /** * Returns the column mapping for the DBMS. */ - private def getColumnMapping(databaseSystem: DatabaseSystem): ColumnMapping = { + private def getColumnMapping(databaseSystem: DatabaseSystem): ColumnMapping = databaseSystem.platform match { case HsqldbDatabasePlatform => HsqldbColumnMapping case MariadbDatabasePlatform => MariadbColumnMapping case MysqlDatabasePlatform => MysqldbColumnMapping case PostgresqlDatabasePlatform => PostgresqlColumnMapping } - } /** * Returns the column type, possibly mapped via the ColumnMapping. @@ -394,9 +386,8 @@ object LiquibaseComparisonSpec { /** * Returns the default for the column, either from ColumnMapping or the column itself. */ - private def getColumnDefault(column: Column, columnMapping: ColumnMapping): AnyRef = { + private def getColumnDefault(column: Column, columnMapping: ColumnMapping): AnyRef = columnMapping.defaultMapping.getOrElse(ColumnDefault.from(column), column.getDefaultValue) - } /** * Return the default for the auto increment column. @@ -404,8 +395,8 @@ object LiquibaseComparisonSpec { private def getAutoIncrementDefault(column: Column, columnMapping: ColumnMapping, databaseSystem: DatabaseSystem, - connectionMetadata: ConnectionMetadata, - ): ColumnDefault = { + connectionMetadata: ConnectionMetadata + ): ColumnDefault = databaseSystem.platform match { case PostgresqlDatabasePlatform if connectionMetadata.databaseMajorVersion <= 9 => val columnType = column.getType.getTypeName match { @@ -416,7 +407,6 @@ object LiquibaseComparisonSpec { ColumnDefault(columnType, columnDefault) case _ => ColumnDefault(getColumnType(column, columnMapping), column.getDefaultValue) } - } /** * Returns an optional extra check to ensure that datetimes can store microseconds. @@ -436,23 +426,23 @@ object LiquibaseComparisonSpec { * * This check also has to be done here, as Liquibase does not return the precision for Mysql datetime fields. */ - private def columnTypeValidationOption(column: Column, databaseSystem: DatabaseSystem): Option[String] = { + private def columnTypeValidationOption(column: Column, databaseSystem: DatabaseSystem): Option[String] = databaseSystem.platform match { case MysqlDatabasePlatform | MariadbDatabasePlatform if column.getType.getTypeName == "TIMESTAMP" => Option("datetime(6)") case _ => None } - } private def columnTypeDbio(column: Column, databaseSystem: DatabaseSystem, - database: SlickDatabase): database.dataAccess.driver.api.DBIO[String] = { + database: SlickDatabase + ): database.dataAccess.driver.api.DBIO[String] = { import database.dataAccess.driver.api._ databaseSystem.platform match { case MysqlDatabasePlatform | MariadbDatabasePlatform if column.getType.getTypeName == "TIMESTAMP" => val getType = GetResult(_.rs.getString("Type")) - //noinspection SqlDialectInspection + // noinspection SqlDialectInspection sql"""SHOW COLUMNS FROM #${column.getRelation.getName} WHERE FIELD = '#${column.getName}' @@ -472,23 +462,23 @@ object LiquibaseComparisonSpec { */ private def sequenceTypeValidationOption(column: Column, databaseSystem: DatabaseSystem, - connectionMetadata: ConnectionMetadata, - ): Option[String] = { + connectionMetadata: ConnectionMetadata + ): Option[String] = databaseSystem.platform match { case PostgresqlDatabasePlatform if column.isAutoIncrement && connectionMetadata.databaseMajorVersion <= 9 => // "this is currently always bigint" --> https://www.postgresql.org/docs/9.6/infoschema-sequences.html Option("bigint") case _ => None } - } private def sequenceTypeDbio(column: Column, databaseSystem: DatabaseSystem, - database: SlickDatabase): database.dataAccess.driver.api.DBIO[String] = { + database: SlickDatabase + ): database.dataAccess.driver.api.DBIO[String] = { import database.dataAccess.driver.api._ databaseSystem.platform match { case PostgresqlDatabasePlatform if column.isAutoIncrement => - //noinspection SqlDialectInspection + // noinspection SqlDialectInspection sql"""select data_type from INFORMATION_SCHEMA.sequences where sequence_name = '#${postgresqlSeqName(column)}' @@ -498,11 +488,11 @@ object LiquibaseComparisonSpec { } private def unsupportedColumnTypeException(column: Column, - databaseSystem: DatabaseSystem): UnsupportedOperationException = { + databaseSystem: DatabaseSystem + ): UnsupportedOperationException = new UnsupportedOperationException( s"${databaseSystem.name} ${column.getRelation.getName}.${column.getName}: ${column.getType.getTypeName}" ) - } /** * Returns columns that are nullable, but shouldn't be. @@ -512,7 +502,8 @@ object LiquibaseComparisonSpec { * TODO: make a changelog to fix, and then remove list of mistakes. */ private def getNullTodos(databaseSystem: DatabaseSystem, - databaseType: CromwellDatabaseType[_ <: SlickDatabase]): Seq[ColumnDescription] = { + databaseType: CromwellDatabaseType[_ <: SlickDatabase] + ): Seq[ColumnDescription] = (databaseSystem.platform, databaseType) match { case (MysqlDatabasePlatform, EngineDatabaseType) => List( @@ -538,16 +529,15 @@ object LiquibaseComparisonSpec { ColumnDescription("JOB_STORE_SIMPLETON_ENTRY", "JOB_STORE_ENTRY_ID"), ColumnDescription("WORKFLOW_STORE_ENTRY", "IMPORTS_ZIP"), ColumnDescription("WORKFLOW_STORE_ENTRY", "WORKFLOW_EXECUTION_UUID"), - ColumnDescription("WORKFLOW_STORE_ENTRY", "WORKFLOW_STATE"), + ColumnDescription("WORKFLOW_STORE_ENTRY", "WORKFLOW_STATE") ) case (MysqlDatabasePlatform, MetadataDatabaseType) => List( ColumnDescription("CUSTOM_LABEL_ENTRY", "CUSTOM_LABEL_KEY"), ColumnDescription("CUSTOM_LABEL_ENTRY", "CUSTOM_LABEL_VALUE"), ColumnDescription("SUMMARY_STATUS_ENTRY", "SUMMARY_NAME"), - ColumnDescription("SUMMARY_STATUS_ENTRY", "SUMMARY_POSITION"), + ColumnDescription("SUMMARY_STATUS_ENTRY", "SUMMARY_POSITION") ) case _ => Nil } - } } diff --git a/services/src/test/scala/cromwell/services/database/LiquibaseOrdering.scala b/services/src/test/scala/cromwell/services/database/LiquibaseOrdering.scala index 43ca64ee796..26d3885e003 100644 --- a/services/src/test/scala/cromwell/services/database/LiquibaseOrdering.scala +++ b/services/src/test/scala/cromwell/services/database/LiquibaseOrdering.scala @@ -15,9 +15,8 @@ object LiquibaseOrdering { implicit val liquibaseOrderingForeignKey: Ordering[ForeignKey] = Ordering.by[ForeignKey, String](_.getName) - implicit val liquibaseOrderingUniqueConstraint: Ordering[UniqueConstraint] = { + implicit val liquibaseOrderingUniqueConstraint: Ordering[UniqueConstraint] = Ordering.by[UniqueConstraint, String](_.getName) - } implicit val liquibaseOrderingIndex: Ordering[Index] = Ordering.by[Index, String](_.getName) } diff --git a/services/src/test/scala/cromwell/services/database/LobSpec.scala b/services/src/test/scala/cromwell/services/database/LobSpec.scala index eee252d31eb..2d140d24c81 100644 --- a/services/src/test/scala/cromwell/services/database/LobSpec.scala +++ b/services/src/test/scala/cromwell/services/database/LobSpec.scala @@ -24,15 +24,15 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc PatienceConfig(timeout = scaled(5.seconds), interval = scaled(100.millis)) DatabaseSystem.All foreach { databaseSystem => - behavior of s"CLOBs and BLOBs on ${databaseSystem.name}" val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val database = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) + lazy val database = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } it should "store empty blobs" taggedAs DbmsTest in { @@ -58,7 +58,7 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc submissionTime = OffsetDateTime.now.toSystemTimestamp, importsZip = Option(emptyBlob), customLabels = clob, - hogGroup = None, + hogGroup = None ) val workflowStoreEntries = Seq(workflowStoreEntry) @@ -122,7 +122,7 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc submissionTime = OffsetDateTime.now.toSystemTimestamp, importsZip = Option(Array.empty[Byte]).toBlobOption, customLabels = clob, - hogGroup = Option("abc-1"), + hogGroup = Option("abc-1") ) val noneWorkflowUuid = WorkflowId.randomId().toString @@ -141,7 +141,7 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc submissionTime = OffsetDateTime.now.toSystemTimestamp, importsZip = None, customLabels = clob, - hogGroup = Option("abc-1"), + hogGroup = Option("abc-1") ) val aByte = 'a'.toByte @@ -161,7 +161,7 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc submissionTime = OffsetDateTime.now.toSystemTimestamp, importsZip = Option(Array(aByte)).toBlobOption, customLabels = clob, - hogGroup = Option("abc-1"), + hogGroup = Option("abc-1") ) val workflowStoreEntries = Seq(emptyWorkflowStoreEntry, noneWorkflowStoreEntry, aByteWorkflowStoreEntry) @@ -201,7 +201,7 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } } diff --git a/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala b/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala index 33dd2a73b66..7a914ba4a1b 100644 --- a/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala +++ b/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala @@ -24,20 +24,20 @@ import scala.language.postfixOps class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures { DatabaseSystem.All foreach { databaseSystem => - implicit val ec: ExecutionContextExecutor = ExecutionContext.global behavior of s"MetadataSlickDatabase on ${databaseSystem.name}" val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val database = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) + lazy val database = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) import cromwell.database.migration.metadata.table.symbol.MetadataStatement.OffsetDateTimeToSystemTimestamp import database.dataAccess.driver.api._ val now = OffsetDateTime.now().toSystemTimestamp it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } val rootCountableId = "root workflow id: countable stuff" @@ -59,87 +59,494 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val stdErrValue = Option(new SerialClob("test value".toCharArray())) it should "set up the test data" taggedAs DbmsTest in { - database.runTestTransaction( - database.dataAccess.metadataEntries ++= Seq( - MetadataEntry("workflow id: 4 to delete, including 1 label", None, None, None, "someKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("workflow id: 4 to delete, including 1 label", None, None, None, "someKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("workflow id: 4 to delete, including 1 label", None, None, None, "someKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("workflow id: 4 to delete, including 1 label", None, None, None, "labels:do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - - MetadataEntry("workflow id: I am a root workflow with a subworkflow", None, None, None, "labels:do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("workflow id: I am a root workflow with a subworkflow", None, None, None, "please do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("workflow id: I am the subworkflow", None, None, None, "labels:do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("workflow id: I am the subworkflow", None, None, None, "please do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - - MetadataEntry("nested subworkflows: root", None, None, None, "please do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("nested subworkflows: first nesting", None, None, None, "please do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("nested subworkflows: second nesting", None, None, None, "please do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry("nested subworkflows: third nesting", None, None, None, "please do delete me", None, None, OffsetDateTime.now().toSystemTimestamp, None), - - // Workflow level - MetadataEntry(rootCountableId, None, None, None, "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(rootCountableId, None, None, None, "excludableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Call level - MetadataEntry(rootCountableId, Option("includableCall"), Option(0), Option(1), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(rootCountableId, Option("includableCall"), Option(0), Option(1), "excludableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Excludable call index - MetadataEntry(rootCountableId, Option("includableCall"), Option(1), Option(1), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Excludable call attempt - MetadataEntry(rootCountableId, Option("includableCall"), Option(0), Option(2), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(rootCountableId, Option("excludableCall"), Option(0), Option(1), "whateverKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - - // Subworkflow level - MetadataEntry(subWorkflowCountableId, None, None, None, "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(subWorkflowCountableId, None, None, None, "excludableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Call level - MetadataEntry(subWorkflowCountableId, Option("includableCall"), Option(0), Option(1), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(subWorkflowCountableId, Option("includableCall"), Option(0), Option(1), "excludableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Excludable call index - MetadataEntry(subWorkflowCountableId, Option("includableCall"), Option(1), Option(1), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Excludable call attempt - MetadataEntry(subWorkflowCountableId, Option("includableCall"), Option(0), Option(2), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(subWorkflowCountableId, Option("excludableCall"), Option(0), Option(1), "whateverKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - - // Subsubworkflow level - MetadataEntry(subSubWorkflowCountableId, None, None, None, "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(subSubWorkflowCountableId, None, None, None, "excludableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Call level - MetadataEntry(subSubWorkflowCountableId, Option("includableCall"), Option(0), Option(1), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(subSubWorkflowCountableId, Option("includableCall"), Option(0), Option(1), "excludableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Excludable call index - MetadataEntry(subSubWorkflowCountableId, Option("includableCall"), Option(1), Option(1), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - // Excludable call attempt - MetadataEntry(subSubWorkflowCountableId, Option("includableCall"), Option(0), Option(2), "includableKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(subSubWorkflowCountableId, Option("excludableCall"), Option(0), Option(1), "whateverKey", None, None, OffsetDateTime.now().toSystemTimestamp, None), + database + .runTestTransaction( + database.dataAccess.metadataEntries ++= Seq( + MetadataEntry("workflow id: 4 to delete, including 1 label", + None, + None, + None, + "someKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: 4 to delete, including 1 label", + None, + None, + None, + "someKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: 4 to delete, including 1 label", + None, + None, + None, + "someKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: 4 to delete, including 1 label", + None, + None, + None, + "labels:do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: I am a root workflow with a subworkflow", + None, + None, + None, + "labels:do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: I am a root workflow with a subworkflow", + None, + None, + None, + "please do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: I am the subworkflow", + None, + None, + None, + "labels:do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("workflow id: I am the subworkflow", + None, + None, + None, + "please do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("nested subworkflows: root", + None, + None, + None, + "please do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("nested subworkflows: first nesting", + None, + None, + None, + "please do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("nested subworkflows: second nesting", + None, + None, + None, + "please do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry("nested subworkflows: third nesting", + None, + None, + None, + "please do delete me", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Workflow level + MetadataEntry(rootCountableId, + None, + None, + None, + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(rootCountableId, + None, + None, + None, + "excludableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Call level + MetadataEntry(rootCountableId, + Option("includableCall"), + Option(0), + Option(1), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(rootCountableId, + Option("includableCall"), + Option(0), + Option(1), + "excludableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Excludable call index + MetadataEntry(rootCountableId, + Option("includableCall"), + Option(1), + Option(1), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Excludable call attempt + MetadataEntry(rootCountableId, + Option("includableCall"), + Option(0), + Option(2), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(rootCountableId, + Option("excludableCall"), + Option(0), + Option(1), + "whateverKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Subworkflow level + MetadataEntry(subWorkflowCountableId, + None, + None, + None, + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(subWorkflowCountableId, + None, + None, + None, + "excludableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Call level + MetadataEntry(subWorkflowCountableId, + Option("includableCall"), + Option(0), + Option(1), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(subWorkflowCountableId, + Option("includableCall"), + Option(0), + Option(1), + "excludableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Excludable call index + MetadataEntry(subWorkflowCountableId, + Option("includableCall"), + Option(1), + Option(1), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Excludable call attempt + MetadataEntry(subWorkflowCountableId, + Option("includableCall"), + Option(0), + Option(2), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(subWorkflowCountableId, + Option("excludableCall"), + Option(0), + Option(1), + "whateverKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Subsubworkflow level + MetadataEntry(subSubWorkflowCountableId, + None, + None, + None, + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(subSubWorkflowCountableId, + None, + None, + None, + "excludableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Call level + MetadataEntry(subSubWorkflowCountableId, + Option("includableCall"), + Option(0), + Option(1), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(subSubWorkflowCountableId, + Option("includableCall"), + Option(0), + Option(1), + "excludableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Excludable call index + MetadataEntry(subSubWorkflowCountableId, + Option("includableCall"), + Option(1), + Option(1), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + // Excludable call attempt + MetadataEntry(subSubWorkflowCountableId, + Option("includableCall"), + Option(0), + Option(2), + "includableKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry(subSubWorkflowCountableId, + Option("excludableCall"), + Option(0), + Option(1), + "whateverKey", + None, + None, + OffsetDateTime.now().toSystemTimestamp, + None + ) + ) ) - ).futureValue(Timeout(10.seconds)) - - database.runTestTransaction( - database.dataAccess.workflowMetadataSummaryEntries ++= Seq( - WorkflowMetadataSummaryEntry("workflow id: 3 to delete, 1 label", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), None, None, None, None), - - WorkflowMetadataSummaryEntry("workflow id: I am a root workflow with a subworkflow", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), None, None, None, None), - WorkflowMetadataSummaryEntry("workflow id: I am the subworkflow", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), Option("workflow id: I am a root workflow with a subworkflow"), Option("workflow id: I am a root workflow with a subworkflow"), None, None), - - WorkflowMetadataSummaryEntry("nested subworkflows: root", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), None, None, None, None), - WorkflowMetadataSummaryEntry("nested subworkflows: first nesting", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), Option("nested subworkflows: root"), Option("nested subworkflows: root"), None, None), - WorkflowMetadataSummaryEntry("nested subworkflows: second nesting", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), Option("nested subworkflows: first nesting"), Option("nested subworkflows: root"), None, None), - WorkflowMetadataSummaryEntry("nested subworkflows: third nesting", Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), Option("nested subworkflows: second nesting"), Option("nested subworkflows: root"), None, None), - - WorkflowMetadataSummaryEntry(rootCountableId, Option("workflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), None, None, None, None), - WorkflowMetadataSummaryEntry(subWorkflowCountableId, Option("subworkflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), Option(rootCountableId), Option(rootCountableId), None, None), - WorkflowMetadataSummaryEntry(subSubWorkflowCountableId, Option("subsubworkflow name"), Option("Succeeded"), Option(now), Option(now), Option(now), Option(subWorkflowCountableId), Option(rootCountableId), None, None), + .futureValue(Timeout(10.seconds)) + + database + .runTestTransaction( + database.dataAccess.workflowMetadataSummaryEntries ++= Seq( + WorkflowMetadataSummaryEntry("workflow id: 3 to delete, 1 label", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + None, + None, + None, + None + ), + WorkflowMetadataSummaryEntry( + "workflow id: I am a root workflow with a subworkflow", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + None, + None, + None, + None + ), + WorkflowMetadataSummaryEntry( + "workflow id: I am the subworkflow", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option("workflow id: I am a root workflow with a subworkflow"), + Option("workflow id: I am a root workflow with a subworkflow"), + None, + None + ), + WorkflowMetadataSummaryEntry("nested subworkflows: root", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + None, + None, + None, + None + ), + WorkflowMetadataSummaryEntry( + "nested subworkflows: first nesting", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option("nested subworkflows: root"), + Option("nested subworkflows: root"), + None, + None + ), + WorkflowMetadataSummaryEntry( + "nested subworkflows: second nesting", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option("nested subworkflows: first nesting"), + Option("nested subworkflows: root"), + None, + None + ), + WorkflowMetadataSummaryEntry( + "nested subworkflows: third nesting", + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option("nested subworkflows: second nesting"), + Option("nested subworkflows: root"), + None, + None + ), + WorkflowMetadataSummaryEntry(rootCountableId, + Option("workflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + None, + None, + None, + None + ), + WorkflowMetadataSummaryEntry( + subWorkflowCountableId, + Option("subworkflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option(rootCountableId), + Option(rootCountableId), + None, + None + ), + WorkflowMetadataSummaryEntry( + subSubWorkflowCountableId, + Option("subsubworkflow name"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option(subWorkflowCountableId), + Option(rootCountableId), + None, + None + ) + ) ) - ).futureValue(Timeout(10.seconds)) + .futureValue(Timeout(10.seconds)) } it should "delete the right number of rows for a root workflow without subworkflows" taggedAs DbmsTest in { - val delete = database.deleteAllMetadataForWorkflowAndUpdateArchiveStatus("workflow id: 4 to delete, including 1 label", None) + val delete = + database.deleteAllMetadataForWorkflowAndUpdateArchiveStatus("workflow id: 4 to delete, including 1 label", None) delete.futureValue(Timeout(10.seconds)) should be(4) } it should "delete the right number of rows for a root workflow with subworkflows" taggedAs DbmsTest in { - val delete = database.deleteAllMetadataForWorkflowAndUpdateArchiveStatus("workflow id: I am a root workflow with a subworkflow", None) + val delete = database.deleteAllMetadataForWorkflowAndUpdateArchiveStatus( + "workflow id: I am a root workflow with a subworkflow", + None + ) delete.futureValue(Timeout(10.seconds)) should be(2) } @@ -153,23 +560,41 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val expansionFactor = if (expandSubWorkflows) 3 else 1; // Everything { - val count = database.countMetadataEntries(rootCountableId, expandSubWorkflows = expandSubWorkflows, 10 seconds) + val count = + database.countMetadataEntries(rootCountableId, expandSubWorkflows = expandSubWorkflows, 10 seconds) count.futureValue(Timeout(10.seconds)) should be(7 * expansionFactor) } // Only includable keys - this looks for workflow level data only { - val count = database.countMetadataEntries(rootCountableId, "includableKey", expandSubWorkflows = expandSubWorkflows, 10 seconds) + val count = database.countMetadataEntries(rootCountableId, + "includableKey", + expandSubWorkflows = expandSubWorkflows, + 10 seconds + ) count.futureValue(Timeout(10.seconds)) should be(1 * expansionFactor) } { - val count = database.countMetadataEntries(rootCountableId, "includableCall", Option(0), Option(1), expandSubWorkflows = expandSubWorkflows, 10 seconds) + val count = database.countMetadataEntries(rootCountableId, + "includableCall", + Option(0), + Option(1), + expandSubWorkflows = expandSubWorkflows, + 10 seconds + ) count.futureValue(Timeout(10 seconds)) should be(2 * expansionFactor) } { - val count = database.countMetadataEntries(rootCountableId, "includableKey", "includableCall", Option(0), Option(1), expandSubWorkflows = expandSubWorkflows, 10 seconds) + val count = database.countMetadataEntries(rootCountableId, + "includableKey", + "includableCall", + Option(0), + Option(1), + expandSubWorkflows = expandSubWorkflows, + 10 seconds + ) count.futureValue(Timeout(10 seconds)) should be(1 * expansionFactor) } @@ -180,7 +605,8 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit metadataKeysToFilterOut = List("excludable%"), CallQuery("includableCall", Option(0), Option(1)), expandSubWorkflows = expandSubWorkflows, - 10 seconds) + 10 seconds + ) count.futureValue(Timeout(10 seconds)) should be(1 * expansionFactor) } @@ -211,76 +637,465 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "fetch failed tasks from a failed workflow" taggedAs DbmsTest in { - database.runTestTransaction( - database.dataAccess.metadataEntries ++= Seq( - //Failed parent workflow calls (successful calls and older attempts and runs mixed in for negative checks) - MetadataEntry(failedParentWorkflowId, Option("failedWorkflowCall"), Option(0), Option(0), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedParentWorkflowId, Option("failedWorkflowCall"), Option(0), Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedParentWorkflowId, Option("failedWorkflowCall"), Option(1), Option(0), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedParentWorkflowId, Option("failedWorkflowCall"), Option(1), Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedParentWorkflowId, Option("failedWorkflowCall"), Option(1), Option(1), "stderr", stdErrValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedParentWorkflowId, Option("successfulWorkflowCall"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedParentWorkflowId, Option("successfulSubWorkflowCall"), Option(0), Option(0), "subWorkflowId", Option(new SerialClob(failedChildWorkflowId.toCharArray)), Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //ignored failed workflow calls. These should not be fetched since it's not part of the target workflow tree - MetadataEntry(ignoredFailedParentWorkflowId, Option("failedWorkflowCall"), Option(0), Option(0), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(ignoredFailedChildWorkflowId, Option("failedSubWorkflowCall"), None, Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //child workflow calls (successful calls and previous failed attempts/shards are mixed in for negative checks) - //notFailedSubWorkflowCall should not be returned since it succeeded on the last attempt and has no scatters - MetadataEntry(failedChildWorkflowId, Option("notActuallyFailedSubWorkflowCall"), None, Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("notActuallyFailedSubWorkflowCall"), None, Option(2), "backendStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("notActuallyFailedSubWorkflowCall"), None, Option(2), "stderr", stdErrValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("successfulSubWorkflowCall"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //Failed child workflow calls (successful calls and previous failed attempts/shards are mixed in for negative checks) - //failedSubWorkflowCall should be returned since it never succeeded - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall"), None, Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall"), None, Option(2), "backendStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall"), None, Option(2), "stderr", stdErrValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("successfulSubWorkflowCall2"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //Third set of child workflow calls, similar to above however this set consists of retries and scatters - //It's safe to assume that if one scatter fails then they all fail, so pull the last scatter on the last attempt - //failedSubWorkflowCall2 should return since the scatters failed on the last attempt - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall2"), Option(1), Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall2"), Option(2), Option(1), "backendStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall2"), Option(1), Option(2), "backendStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall2"), Option(2), Option(2), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("failedSubWorkflowCall2"), Option(2), Option(2), "stderr", stdErrValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("successfulSubWorkflowCall3"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //Fourth set of child workflow calls - //This set should not return anything since the scatters succeeded on the last attempt - MetadataEntry(failedChildWorkflowId, Option("notActuallySubWorkflowCall2"), Option(1), Option(1), "executionStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("notActuallySubWorkflowCall2"), Option(2), Option(1), "backendStatus", failedStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("notActuallySubWorkflowCall2"), Option(1), Option(2), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("notActuallySubWorkflowCall2"), Option(2), Option(2), "backendStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("notActuallySubWorkflowCall2"), Option(2), Option(2), "stderr", stdErrValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(failedChildWorkflowId, Option("successfulSubWorkflowCall4"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //Successful parent workflow call (negative check) - MetadataEntry(successfulParentWorkflowId, Option("successfulWorkflowCall"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - - //Successful child workflow calls (negative check) - MetadataEntry(successfulChildWorkflowId, Option("successfulSubWorkflowCall"), Option(0), Option(0), "executionStatus", doneStatusMetadataValue, Option("String"), OffsetDateTime.now().toSystemTimestamp, None), - MetadataEntry(successfulChildWorkflowId, Option("successfulSubWorkflowCall"), Option(0), Option(0), "subWorkflowId", Option(new SerialClob(successfulParentWorkflowId.toCharArray)), Option("String"), OffsetDateTime.now().toSystemTimestamp, None) + database + .runTestTransaction( + database.dataAccess.metadataEntries ++= Seq( + // Failed parent workflow calls (successful calls and older attempts and runs mixed in for negative checks) + MetadataEntry( + failedParentWorkflowId, + Option("failedWorkflowCall"), + Option(0), + Option(0), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedParentWorkflowId, + Option("failedWorkflowCall"), + Option(0), + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedParentWorkflowId, + Option("failedWorkflowCall"), + Option(1), + Option(0), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedParentWorkflowId, + Option("failedWorkflowCall"), + Option(1), + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedParentWorkflowId, + Option("failedWorkflowCall"), + Option(1), + Option(1), + "stderr", + stdErrValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedParentWorkflowId, + Option("successfulWorkflowCall"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedParentWorkflowId, + Option("successfulSubWorkflowCall"), + Option(0), + Option(0), + "subWorkflowId", + Option(new SerialClob(failedChildWorkflowId.toCharArray)), + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // ignored failed workflow calls. These should not be fetched since it's not part of the target workflow tree + MetadataEntry( + ignoredFailedParentWorkflowId, + Option("failedWorkflowCall"), + Option(0), + Option(0), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + ignoredFailedChildWorkflowId, + Option("failedSubWorkflowCall"), + None, + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // child workflow calls (successful calls and previous failed attempts/shards are mixed in for negative checks) + // notFailedSubWorkflowCall should not be returned since it succeeded on the last attempt and has no scatters + MetadataEntry( + failedChildWorkflowId, + Option("notActuallyFailedSubWorkflowCall"), + None, + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("notActuallyFailedSubWorkflowCall"), + None, + Option(2), + "backendStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("notActuallyFailedSubWorkflowCall"), + None, + Option(2), + "stderr", + stdErrValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("successfulSubWorkflowCall"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Failed child workflow calls (successful calls and previous failed attempts/shards are mixed in for negative checks) + // failedSubWorkflowCall should be returned since it never succeeded + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall"), + None, + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall"), + None, + Option(2), + "backendStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall"), + None, + Option(2), + "stderr", + stdErrValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("successfulSubWorkflowCall2"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Third set of child workflow calls, similar to above however this set consists of retries and scatters + // It's safe to assume that if one scatter fails then they all fail, so pull the last scatter on the last attempt + // failedSubWorkflowCall2 should return since the scatters failed on the last attempt + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall2"), + Option(1), + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall2"), + Option(2), + Option(1), + "backendStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall2"), + Option(1), + Option(2), + "backendStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall2"), + Option(2), + Option(2), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("failedSubWorkflowCall2"), + Option(2), + Option(2), + "stderr", + stdErrValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("successfulSubWorkflowCall3"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Fourth set of child workflow calls + // This set should not return anything since the scatters succeeded on the last attempt + MetadataEntry( + failedChildWorkflowId, + Option("notActuallySubWorkflowCall2"), + Option(1), + Option(1), + "executionStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("notActuallySubWorkflowCall2"), + Option(2), + Option(1), + "backendStatus", + failedStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("notActuallySubWorkflowCall2"), + Option(1), + Option(2), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("notActuallySubWorkflowCall2"), + Option(2), + Option(2), + "backendStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("notActuallySubWorkflowCall2"), + Option(2), + Option(2), + "stderr", + stdErrValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + failedChildWorkflowId, + Option("successfulSubWorkflowCall4"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Successful parent workflow call (negative check) + MetadataEntry( + successfulParentWorkflowId, + Option("successfulWorkflowCall"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + + // Successful child workflow calls (negative check) + MetadataEntry( + successfulChildWorkflowId, + Option("successfulSubWorkflowCall"), + Option(0), + Option(0), + "executionStatus", + doneStatusMetadataValue, + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ), + MetadataEntry( + successfulChildWorkflowId, + Option("successfulSubWorkflowCall"), + Option(0), + Option(0), + "subWorkflowId", + Option(new SerialClob(successfulParentWorkflowId.toCharArray)), + Option("String"), + OffsetDateTime.now().toSystemTimestamp, + None + ) + ) ) - ).futureValue(Timeout(10.seconds)) - - database.runTestTransaction( - database.dataAccess.workflowMetadataSummaryEntries ++= Seq( - //Failed WorkflowMetadataSummaryEntry setup - WorkflowMetadataSummaryEntry(failedParentWorkflowId, Option("failedParentWorkflow"), Option("Failed"), Option(now), Option(now), Option(now), None, None, None, None), - WorkflowMetadataSummaryEntry(failedChildWorkflowId, Option("failedChildWorkflow"), Option("Failed"), Option(now), Option(now), Option(now), Option(failedParentWorkflowId), Option(failedParentWorkflowId), None, None), - WorkflowMetadataSummaryEntry(successfulParentWorkflowId, Option("successfulParentWorkflow"), Option("Succeeded"), Option(now), Option(now), Option(now), None, None, None, None), - WorkflowMetadataSummaryEntry(successfulChildWorkflowId, Option("successfulChildWorkflow"), Option("Succeeded"), Option(now), Option(now), Option(now), Option(successfulParentWorkflowId), Option(successfulParentWorkflowId), None, None), - WorkflowMetadataSummaryEntry(ignoredFailedParentWorkflowId, Option("ignoredFailedParentWorkflow"), Option("Failed"), Option(now), Option(now), Option(now), Option(ignoredFailedParentWorkflowId), Option(ignoredFailedParentWorkflowId), None, None), - WorkflowMetadataSummaryEntry(ignoredFailedChildWorkflowId, Option("ignoredFailedChildWorkflow"), Option("Failed"), Option(now), Option(now), Option(now), Option(ignoredFailedChildWorkflowId), Option(ignoredFailedChildWorkflowId), None, None) + .futureValue(Timeout(10.seconds)) + + database + .runTestTransaction( + database.dataAccess.workflowMetadataSummaryEntries ++= Seq( + // Failed WorkflowMetadataSummaryEntry setup + WorkflowMetadataSummaryEntry(failedParentWorkflowId, + Option("failedParentWorkflow"), + Option("Failed"), + Option(now), + Option(now), + Option(now), + None, + None, + None, + None + ), + WorkflowMetadataSummaryEntry( + failedChildWorkflowId, + Option("failedChildWorkflow"), + Option("Failed"), + Option(now), + Option(now), + Option(now), + Option(failedParentWorkflowId), + Option(failedParentWorkflowId), + None, + None + ), + WorkflowMetadataSummaryEntry(successfulParentWorkflowId, + Option("successfulParentWorkflow"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + None, + None, + None, + None + ), + WorkflowMetadataSummaryEntry( + successfulChildWorkflowId, + Option("successfulChildWorkflow"), + Option("Succeeded"), + Option(now), + Option(now), + Option(now), + Option(successfulParentWorkflowId), + Option(successfulParentWorkflowId), + None, + None + ), + WorkflowMetadataSummaryEntry( + ignoredFailedParentWorkflowId, + Option("ignoredFailedParentWorkflow"), + Option("Failed"), + Option(now), + Option(now), + Option(now), + Option(ignoredFailedParentWorkflowId), + Option(ignoredFailedParentWorkflowId), + None, + None + ), + WorkflowMetadataSummaryEntry( + ignoredFailedChildWorkflowId, + Option("ignoredFailedChildWorkflow"), + Option("Failed"), + Option(now), + Option(now), + Option(now), + Option(ignoredFailedChildWorkflowId), + Option(ignoredFailedChildWorkflowId), + None, + None + ) + ) ) - ).futureValue(Timeout(10.seconds)) + .futureValue(Timeout(10.seconds)) - val futureEntries: Future[Seq[MetadataEntry]] = database.getFailedJobsMetadataWithWorkflowId(failedParentWorkflowId) + val futureEntries: Future[Seq[MetadataEntry]] = + database.getFailedJobsMetadataWithWorkflowId(failedParentWorkflowId) var entriesFound = false val recordCount = Map( @@ -294,69 +1109,65 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit ) ) - whenReady(futureEntries, timeout(scaled(5 seconds))) { - entries => - entries.foreach(entry => { - entriesFound = true - val workflowId = entry.workflowExecutionUuid - recordCount.getOrElse(workflowId, None) should not be(None) - recordCount(workflowId)("actual") += 1 - val metadataValueClob = entry.metadataValue.get - val metadataValueString = metadataValueClob.getSubString(1, metadataValueClob.length().toInt) - - entry.metadataKey match { - case "stderr" => { - metadataValueString should be("test value") - } - case "backendStatus" => { - metadataValueString should be("Failed") - } - case "executionStatus" => { - metadataValueString should be("Failed") - } - case _ => fail(s"Unexpected key ${entry.metadataKey} was included in result set") - } - - entry.metadataKey should not be("subWorkflowId") - entry.callFullyQualifiedName.getOrElse("") match { - case "failedWorkflowCall" => { - entry.jobIndex.get should be(1) - entry.jobAttempt.get should be(1) - val isValidKey = List("executionStatus", "stderr").contains(entry.metadataKey) - isValidKey should be(true) - } - case "failedSubWorkflowCall" => { - entry.jobIndex should be(None) - entry.jobAttempt.get should be(2) - val isValidKey = List("backendStatus", "stderr").contains(entry.metadataKey) - isValidKey should be(true) - } - case "failedSubWorkflowCall2" => { - entry.jobIndex.get should be(2) - entry.jobAttempt.get should be(2) - val isValidKey = List("executionStatus", "stderr").contains(entry.metadataKey) - isValidKey should be(true) - } - case _ => fail(s"Entry ${entry.callFullyQualifiedName.getOrElse("N/A")} | Index: ${entry.jobIndex.get} | Attempt: ${entry.jobAttempt.get} should not be in result set") - } - }) - entriesFound should be(true) - recordCount.foreach(record => { - record._2("actual") should be(record._2("expected")) - }) + whenReady(futureEntries, timeout(scaled(5 seconds))) { entries => + entries.foreach { entry => + entriesFound = true + val workflowId = entry.workflowExecutionUuid + recordCount.getOrElse(workflowId, None) should not be None + recordCount(workflowId)("actual") += 1 + val metadataValueClob = entry.metadataValue.get + val metadataValueString = metadataValueClob.getSubString(1, metadataValueClob.length().toInt) + + entry.metadataKey match { + case "stderr" => + metadataValueString should be("test value") + case "backendStatus" => + metadataValueString should be("Failed") + case "executionStatus" => + metadataValueString should be("Failed") + case _ => fail(s"Unexpected key ${entry.metadataKey} was included in result set") + } + + entry.metadataKey should not be "subWorkflowId" + entry.callFullyQualifiedName.getOrElse("") match { + case "failedWorkflowCall" => + entry.jobIndex.get should be(1) + entry.jobAttempt.get should be(1) + val isValidKey = List("executionStatus", "stderr").contains(entry.metadataKey) + isValidKey should be(true) + case "failedSubWorkflowCall" => + entry.jobIndex should be(None) + entry.jobAttempt.get should be(2) + val isValidKey = List("backendStatus", "stderr").contains(entry.metadataKey) + isValidKey should be(true) + case "failedSubWorkflowCall2" => + entry.jobIndex.get should be(2) + entry.jobAttempt.get should be(2) + val isValidKey = List("executionStatus", "stderr").contains(entry.metadataKey) + isValidKey should be(true) + case _ => + fail( + s"Entry ${entry.callFullyQualifiedName.getOrElse("N/A")} | Index: ${entry.jobIndex.get} | Attempt: ${entry.jobAttempt.get} should not be in result set" + ) + } + } + entriesFound should be(true) + recordCount.foreach(record => record._2("actual") should be(record._2("expected"))) } } it should "clean up & close the database" taggedAs DbmsTest in { // Not relevant in Travis where all state gets nuked but useful for testing locally database.runTestTransaction(database.dataAccess.metadataEntries.delete).futureValue(Timeout(10.seconds)) - database.runTestTransaction(database.dataAccess.workflowMetadataSummaryEntries.delete).futureValue(Timeout(10.seconds)) + database + .runTestTransaction(database.dataAccess.workflowMetadataSummaryEntries.delete) + .futureValue(Timeout(10.seconds)) database.close() } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } @@ -364,7 +1175,7 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit behavior of "MetadataSlickDatabase" it should "partition metadata for summarization correctly" in { - def partition(metadata: Seq[MetadataEntry]): SummarizationPartitionedMetadata = { + def partition(metadata: Seq[MetadataEntry]): SummarizationPartitionedMetadata = MetadataSlickDatabase.partitionSummarizationMetadata( rawMetadataEntries = metadata, startMetadataKey = WorkflowMetadataKeys.StartTime, @@ -374,8 +1185,8 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit submissionMetadataKey = WorkflowMetadataKeys.SubmissionTime, parentWorkflowIdKey = WorkflowMetadataKeys.ParentWorkflowId, rootWorkflowIdKey = WorkflowMetadataKeys.RootWorkflowId, - labelMetadataKey = WorkflowMetadataKeys.Labels) - } + labelMetadataKey = WorkflowMetadataKeys.Labels + ) { // Edge condition: empty input diff --git a/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala b/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala index ad3277f4ae0..6beb304380a 100644 --- a/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala +++ b/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala @@ -22,32 +22,34 @@ import scala.util.matching.Regex class QueryTimeoutSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures { DatabaseSystem.All foreach { databaseSystem => - testOption(databaseSystem) foreach { - case (sleepCommand, errorMessageGenerator) => - behavior of s"Query timeouts on ${databaseSystem.name}" + testOption(databaseSystem) foreach { case (sleepCommand, errorMessageGenerator) => + behavior of s"Query timeouts on ${databaseSystem.name}" - val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) + val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } - } + it should "start container if required" taggedAs DbmsTest in { + containerOpt.foreach(_.start) + } - it should "fail with a timeout" taggedAs DbmsTest in { - checkDatabaseSystem(containerOpt, databaseSystem, sleepCommand, errorMessageGenerator) - } + it should "fail with a timeout" taggedAs DbmsTest in { + checkDatabaseSystem(containerOpt, databaseSystem, sleepCommand, errorMessageGenerator) + } - it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } - } + it should "stop container if required" taggedAs DbmsTest in { + containerOpt.foreach(_.stop()) + } } } private def checkDatabaseSystem(containerOpt: Option[Container], databaseSystem: DatabaseSystem, sleepCommand: String, - errorMessageGenerator: ErrorMessageGenerator): Unit = { + errorMessageGenerator: ErrorMessageGenerator + ): Unit = for { - testDatabase <- DatabaseTestKit.schemalessDatabaseFromContainerOptAndSystem(containerOpt, databaseSystem).autoClosed + testDatabase <- DatabaseTestKit + .schemalessDatabaseFromContainerOptAndSystem(containerOpt, databaseSystem) + .autoClosed } { import testDatabase.dataAccess.driver.api._ @@ -55,7 +57,7 @@ class QueryTimeoutSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val errorMessage = errorMessageGenerator(metadataGenerator) - //noinspection SqlDialectInspection + // noinspection SqlDialectInspection val future = testDatabase.runTestTransaction(sql"""#$sleepCommand""".as[Int].headOption, timeout = 5.seconds) errorMessage match { case IntErrorMessage(result) => future.futureValue(Timeout(10.seconds)) should be(Option(result)) @@ -64,21 +66,21 @@ class QueryTimeoutSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher future.failed.futureValue(Timeout(10.seconds)).getMessage should fullyMatch regex pattern } } - } - private def testOption(databaseSystem: DatabaseSystem): Option[(String, ErrorMessageGenerator)] = { + private def testOption(databaseSystem: DatabaseSystem): Option[(String, ErrorMessageGenerator)] = databaseSystem.platform match { case HsqldbDatabasePlatform => // HSQL does not document a SLEEP() function, which is essential for this test // The functionality being tested is not relevant to an HSQL user, so the omission is probably acceptable None case MariadbDatabasePlatform => - Option(( - "select sleep(10);", - (metadataGenerator: MetadataGenerator) => { - val metadata = metadataGenerator() - (metadata.databaseMajorVersion, metadata.databaseMinorVersion) match { - /* + Option( + ( + "select sleep(10);", + (metadataGenerator: MetadataGenerator) => { + val metadata = metadataGenerator() + (metadata.databaseMajorVersion, metadata.databaseMinorVersion) match { + /* The docs say "If SLEEP() is interrupted, it returns 1." - https://mariadb.com/kb/en/sleep/ @@ -101,27 +103,31 @@ class QueryTimeoutSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher Either way, we just need to make sure the sleep was interrupted and don't care if it was via an exception or returning `1`. - */ - case (major, minor) if (major >= 10 && minor >= 3) || major >= 11 => - RegexErrorMessage( + */ + case (major, minor) if (major >= 10 && minor >= 3) || major >= 11 => + RegexErrorMessage( """(\(conn=\d+\) )?Query execution was interrupted \(max_statement_time exceeded\)""".r - ) - case _ => IntErrorMessage(1) + ) + case _ => IntErrorMessage(1) + } } - } - )) + ) + ) case MysqlDatabasePlatform => - Option(( - "select sleep(10);", - _ => StringErrorMessage("Statement cancelled due to timeout or client request"), - )) + Option( + ( + "select sleep(10);", + _ => StringErrorMessage("Statement cancelled due to timeout or client request") + ) + ) case PostgresqlDatabasePlatform => - Option(( - "select pg_sleep(10);", - _ => StringErrorMessage("ERROR: canceling statement due to user request"), - )) + Option( + ( + "select pg_sleep(10);", + _ => StringErrorMessage("ERROR: canceling statement due to user request") + ) + ) } - } } object QueryTimeoutSpec { diff --git a/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala b/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala index 88fceef7063..7f7b7b7f27f 100644 --- a/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala +++ b/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala @@ -32,28 +32,33 @@ class RootAndSubworkflowLabelsSpec extends AnyFlatSpec with CromwellTimeoutSpec println(now) it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } it should "set up the test data" taggedAs DbmsTest in { - database.runTestTransaction( - { - val root = summary(name = "root") - val branch = summary(name = "branch", parent = Option("root"), root = Option("root")) - val leaf = summary(name = "leaf", parent = Option("branch"), root = Option("root")) - val random = summary(name = "random") - database.dataAccess.workflowMetadataSummaryEntries ++= Seq(root, branch, leaf, random) - } andThen { - val root = label("root") - // intentionally not labeling the branch as a negative test - val leaf = label("leaf") - val random = label("random") - database.dataAccess.customLabelEntries ++= Seq(root, /* branch, */ leaf, random) - } - ).futureValue(Timeout(10.seconds)) + database + .runTestTransaction( + { + val root = summary(name = "root") + val branch = summary(name = "branch", parent = Option("root"), root = Option("root")) + val leaf = summary(name = "leaf", parent = Option("branch"), root = Option("root")) + val random = summary(name = "random") + database.dataAccess.workflowMetadataSummaryEntries ++= Seq(root, branch, leaf, random) + } andThen { + val root = label("root") + // intentionally not labeling the branch as a negative test + val leaf = label("leaf") + val random = label("random") + database.dataAccess.customLabelEntries ++= Seq(root, /* branch, */ leaf, random) + } + ) + .futureValue(Timeout(10.seconds)) } - def summary(name: String, parent: Option[String] = None, root: Option[String] = None): WorkflowMetadataSummaryEntry = { + def summary(name: String, + parent: Option[String] = None, + root: Option[String] = None + ): WorkflowMetadataSummaryEntry = WorkflowMetadataSummaryEntry( workflowExecutionUuid = name, workflowStatus = Option("Succeeded"), @@ -65,22 +70,19 @@ class RootAndSubworkflowLabelsSpec extends AnyFlatSpec with CromwellTimeoutSpec rootWorkflowExecutionUuid = root, metadataArchiveStatus = None ) - } - def label(uuid: String): CustomLabelEntry = { + def label(uuid: String): CustomLabelEntry = CustomLabelEntry(customLabelKey = "key", customLabelValue = uuid, workflowExecutionUuid = uuid) - } it should "query root and subworkflow labels correctly" taggedAs DbmsTest in { - database.getRootAndSubworkflowLabels("root"). - futureValue(Timeout(10.seconds)) shouldBe Map( + database.getRootAndSubworkflowLabels("root").futureValue(Timeout(10.seconds)) shouldBe Map( "root" -> Map("key" -> "root"), "leaf" -> Map("key" -> "leaf") ) } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } } diff --git a/services/src/test/scala/cromwell/services/database/SchemaManager.scala b/services/src/test/scala/cromwell/services/database/SchemaManager.scala index 02ba7297883..f6e3cff0d88 100644 --- a/services/src/test/scala/cromwell/services/database/SchemaManager.scala +++ b/services/src/test/scala/cromwell/services/database/SchemaManager.scala @@ -13,7 +13,7 @@ sealed trait SchemaManager { object SchemaManager { val All: Seq[SchemaManager] = List( LiquibaseSchemaManager, - SlickSchemaManager, + SlickSchemaManager ) } diff --git a/services/src/test/scala/cromwell/services/database/SchemaManagerSpec.scala b/services/src/test/scala/cromwell/services/database/SchemaManagerSpec.scala index a22188b34b6..17dc425182c 100644 --- a/services/src/test/scala/cromwell/services/database/SchemaManagerSpec.scala +++ b/services/src/test/scala/cromwell/services/database/SchemaManagerSpec.scala @@ -67,12 +67,13 @@ class SchemaManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche for { actualDatabase <- inMemoryDatabase(databaseType, schemaManager).autoClosed expectedDatabase <- inMemoryDatabase(databaseType, otherSchemaManager).autoClosed - } { + } compare( - expectedDatabase.dataAccess.driver, expectedDatabase.database, - actualDatabase.dataAccess.driver, actualDatabase.database, + expectedDatabase.dataAccess.driver, + expectedDatabase.database, + actualDatabase.dataAccess.driver, + actualDatabase.database ) { diffResult => - import cromwell.database.migration.liquibase.DiffResultFilter._ /* @@ -80,9 +81,7 @@ class SchemaManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche See notes at the bottom of changelog.xml */ val diffFilters = StandardTypeFilters - val filteredDiffResult = diffResult - .filterChangeLogs - .filterLiquibaseObjects + val filteredDiffResult = diffResult.filterChangeLogs.filterLiquibaseObjects .filterChangedObjects(diffFilters) val totalChanged = @@ -98,14 +97,16 @@ class SchemaManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche diffToChangeLog.print(printStream) val changeSetsScoped = XML.loadString(outputStream.toString) \ "changeSet" \ "_" val changeSets = changeSetsScoped map stripNodeScope - fail(changeSets.mkString( - s"The following changes are in $schemaManager but not in $otherSchemaManager:\n ", - "\n ", - "\nEnsure that the columns/fields exist, with the same lengths in " + - s"$schemaManager and $otherSchemaManager and synchronize the two.")) + fail( + changeSets.mkString( + s"The following changes are in $schemaManager but not in $otherSchemaManager:\n ", + "\n ", + "\nEnsure that the columns/fields exist, with the same lengths in " + + s"$schemaManager and $otherSchemaManager and synchronize the two." + ) + ) } } - } } it should "match expected generated names" taggedAs DbmsTest in { @@ -113,9 +114,8 @@ class SchemaManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche for { slickDatabase <- inMemoryDatabase(databaseType, schemaManager).autoClosed - } { - schemaMetadata = getSchemaMetadata(slickDatabase) } + schemaMetadata = getSchemaMetadata(slickDatabase) var misnamed = Seq.empty[String] @@ -184,7 +184,9 @@ class SchemaManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche if (missing.nonEmpty) { failMessage += missing.mkString( s"Based on the schema in $schemaManager, please ensure that the following tables/columns exist:\n", - "\n", "\n") + "\n", + "\n" + ) } fail(failMessage) @@ -196,33 +198,29 @@ class SchemaManagerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche object SchemaManagerSpec { // strip the namespace from elems and their children - private def stripNodeScope(node: Node): Node = { + private def stripNodeScope(node: Node): Node = node match { case elem: Elem => elem.copy(scope = TopScope, child = elem.child map stripNodeScope) case other => other } - } - private def compare[ReferenceProfile <: JdbcProfile, ComparisonProfile <: JdbcProfile, T] - (referenceProfile: ReferenceProfile, - referenceDatabase: ReferenceProfile#Backend#Database, - comparisonProfile: ComparisonProfile, - comparisonDatabase: ComparisonProfile#Backend#Database)(block: DiffResult => T): T = { + private def compare[ReferenceProfile <: JdbcProfile, ComparisonProfile <: JdbcProfile, T]( + referenceProfile: ReferenceProfile, + referenceDatabase: ReferenceProfile#Backend#Database, + comparisonProfile: ComparisonProfile, + comparisonDatabase: ComparisonProfile#Backend#Database + )(block: DiffResult => T): T = DatabaseTestKit.withConnections(referenceProfile, referenceDatabase, comparisonProfile, comparisonDatabase) { LiquibaseUtils.compare(_, _)(block) } - } - private val SnakeRegex = "_([a-z])".r - private def snakeToCamel(value: String): String = { + private def snakeToCamel(value: String): String = SnakeRegex.replaceAllIn(value.toLowerCase, _.group(1).toUpperCase) - } - private def snakeAbbreviate(value: String): String = { + private def snakeAbbreviate(value: String): String = SnakeRegex.findAllMatchIn("_" + value.toLowerCase).map(_.group(1)).mkString("").toUpperCase - } private def tableClassName(tableName: String) = s"cromwell.database.sql.tables.$tableName" @@ -246,40 +244,42 @@ object SchemaManagerSpec { case class DatabaseItem(tableName: String, itemName: String) - case class SchemaMetadata(tableMetadata: Seq[MTable], columnMetadata: Seq[MColumn], indexMetadata: Seq[MIndexInfo], - primaryKeyMetadata: Seq[MPrimaryKey], foreignKeyMetadata: Seq[MForeignKey]) { - lazy val tables: Seq[TableClass] = tableMetadata.map({ table => + case class SchemaMetadata(tableMetadata: Seq[MTable], + columnMetadata: Seq[MColumn], + indexMetadata: Seq[MIndexInfo], + primaryKeyMetadata: Seq[MPrimaryKey], + foreignKeyMetadata: Seq[MForeignKey] + ) { + lazy val tables: Seq[TableClass] = tableMetadata.map { table => val tableName = snakeToCamel(table.name.name).capitalize TableClass(tableName) - }).distinct + }.distinct - lazy val columns: Seq[DatabaseItem] = columnMetadata.map({ column => + lazy val columns: Seq[DatabaseItem] = columnMetadata.map { column => val tableName = snakeToCamel(column.table.name).capitalize val columnName = snakeToCamel(column.name) DatabaseItem(tableName, columnName) - }).distinct + }.distinct - lazy val indexes: Seq[DatabaseItem] = indexMetadata.map({ index => + lazy val indexes: Seq[DatabaseItem] = indexMetadata.map { index => val tableName = snakeToCamel(index.table.name).capitalize val indexName = snakeToCamel(getIndexName(index)) DatabaseItem(tableName, indexName) - }).distinct + }.distinct - lazy val foreignKeys: Seq[DatabaseItem] = foreignKeyMetadata.map({ foreignKey => + lazy val foreignKeys: Seq[DatabaseItem] = foreignKeyMetadata.map { foreignKey => val tableName = snakeToCamel(foreignKey.fkTable.name).capitalize val indexName = snakeToCamel(foreignKey.fkName.get) DatabaseItem(tableName, indexName) - }).distinct + }.distinct lazy val slickItems: Seq[DatabaseItem] = columns ++ indexes ++ foreignKeys - def existsTableItem(tableItem: DatabaseItem): Boolean = { + def existsTableItem(tableItem: DatabaseItem): Boolean = tables.find(_.tableName == tableItem.tableName).exists(_.existsTableField(tableItem.itemName)) - } - def existsSlickMapping(tableItem: DatabaseItem): Boolean = { + def existsSlickMapping(tableItem: DatabaseItem): Boolean = tables.find(_.tableName == tableItem.tableName).exists(_.existsSlickMapping(tableItem.itemName)) - } } } diff --git a/services/src/test/scala/cromwell/services/database/SlickDeadlocksSpec.scala b/services/src/test/scala/cromwell/services/database/SlickDeadlocksSpec.scala index f0823be7337..f5e95cb4dd2 100644 --- a/services/src/test/scala/cromwell/services/database/SlickDeadlocksSpec.scala +++ b/services/src/test/scala/cromwell/services/database/SlickDeadlocksSpec.scala @@ -31,13 +31,13 @@ class SlickDeadlocksSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match it should "not happen" in { val logger = Logger(LoggerFactory.getLogger(getClass.getName)) // Test based on https://github.com/kwark/slick-deadlock/blob/82525fc/src/main/scala/SlickDeadlock.scala - val databaseConfig = ConfigFactory.parseString( - s"""|db.url = "jdbc:hsqldb:mem:$${uniqueSchema};shutdown=false;hsqldb.tx=mvcc" - |db.driver = "org.hsqldb.jdbcDriver" - |db.connectionTimeout = 3000 - |db.numThreads = 2 - |profile = "slick.jdbc.HsqldbProfile$$" - |""".stripMargin) + val databaseConfig = + ConfigFactory.parseString(s"""|db.url = "jdbc:hsqldb:mem:$${uniqueSchema};shutdown=false;hsqldb.tx=mvcc" + |db.driver = "org.hsqldb.jdbcDriver" + |db.connectionTimeout = 3000 + |db.numThreads = 2 + |profile = "slick.jdbc.HsqldbProfile$$" + |""".stripMargin) logger.info("Initializing deadlock-test database") for { database <- DatabaseTestKit.initializedDatabaseFromConfig(EngineDatabaseType, databaseConfig).autoClosed diff --git a/services/src/test/scala/cromwell/services/database/TestSlickDatabase.scala b/services/src/test/scala/cromwell/services/database/TestSlickDatabase.scala index c88ad81df79..8e1157a7797 100644 --- a/services/src/test/scala/cromwell/services/database/TestSlickDatabase.scala +++ b/services/src/test/scala/cromwell/services/database/TestSlickDatabase.scala @@ -16,8 +16,7 @@ trait TestSlickDatabase { def runTestTransaction[R](action: DBIO[R], isolationLevel: TransactionIsolation = TransactionIsolation.RepeatableRead, - timeout: Duration = Duration.Inf, - ): Future[R] = { + timeout: Duration = Duration.Inf + ): Future[R] = slickDatabase.runTransaction(action, isolationLevel, timeout) - } } diff --git a/services/src/test/scala/cromwell/services/healthmonitor/HealthMonitorServiceActorSpec.scala b/services/src/test/scala/cromwell/services/healthmonitor/HealthMonitorServiceActorSpec.scala index f4e5ff344e6..81c783656fd 100644 --- a/services/src/test/scala/cromwell/services/healthmonitor/HealthMonitorServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/healthmonitor/HealthMonitorServiceActorSpec.scala @@ -14,9 +14,15 @@ import org.scalatest.time.{Millis, Seconds, Span} import scala.concurrent.duration._ -class HealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually with AskSupport { +class HealthMonitorServiceActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with Eventually + with AskSupport { - override implicit def patienceConfig = PatienceConfig(timeout = scaled(Span(15, Seconds)), interval = Span(500, Millis)) + implicit override def patienceConfig = + PatienceConfig(timeout = scaled(Span(15, Seconds)), interval = Span(500, Millis)) behavior of "HealthMonitorServiceActor" @@ -29,14 +35,20 @@ class HealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLike wi |check-papi-backends: [] |""".stripMargin - val globalConfigString = s"""services.HealthMonitor.config: { |$serviceConfigString |} |""".stripMargin - val actor = system.actorOf(Props(new HealthMonitorServiceActor(ConfigFactory.parseString(serviceConfigString), ConfigFactory.parseString(globalConfigString), null))) + val actor = system.actorOf( + Props( + new HealthMonitorServiceActor(ConfigFactory.parseString(serviceConfigString), + ConfigFactory.parseString(globalConfigString), + null + ) + ) + ) val testProbe = TestProbe() diff --git a/services/src/test/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActorSpec.scala b/services/src/test/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActorSpec.scala index 1a9ce43bd31..12243a47ceb 100644 --- a/services/src/test/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActorSpec.scala @@ -20,14 +20,17 @@ import scala.language.postfixOps class ProtoHealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLike with Eventually { implicit val timeout = Timeout(scaled(5.seconds)) - final implicit val blockingEc: ExecutionContextExecutor = ExecutionContext.fromExecutor( + implicit final val blockingEc: ExecutionContextExecutor = ExecutionContext.fromExecutor( Executors.newCachedThreadPool() ) override val patienceConfig = PatienceConfig(timeout = scaled(20 seconds), interval = scaled(1 second)) implicit val patience = patienceConfig - private def eventualStatus(actorRef: ActorRef, ok: Boolean, systems: (MonitoredSubsystem, SubsystemStatus)*): Assertion = { + private def eventualStatus(actorRef: ActorRef, + ok: Boolean, + systems: (MonitoredSubsystem, SubsystemStatus)* + ): Assertion = { case class NameAndStatus(name: String, status: SubsystemStatus) eventually { @@ -35,7 +38,7 @@ class ProtoHealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLi assert(resp.ok == ok) val actual = resp.systems.toList.map(NameAndStatus.tupled).sortBy(_.name) - val expected = systems.map { case (m, s) => m.name -> s } map { NameAndStatus.tupled } sortBy(_.name) + val expected = systems.map { case (m, s) => m.name -> s } map NameAndStatus.tupled sortBy (_.name) assert(actual.length == expected.length) actual.zip(expected) map { case (a, e) => @@ -46,7 +49,10 @@ class ProtoHealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLi am <- a.status.messages.toList.flatten.headOption em <- e.status.messages.toList.flatten.headOption } yield am.startsWith(em) - assert(isEmpty || actualPrefixedByExpected.contains(true), s"Instead, a.status.messages = ${a.status.messages.toList.flatten} and e.status.messages = ${e.status.messages.toList.flatten}") + assert( + isEmpty || actualPrefixedByExpected.contains(true), + s"Instead, a.status.messages = ${a.status.messages.toList.flatten} and e.status.messages = ${e.status.messages.toList.flatten}" + ) } head } } @@ -106,7 +112,7 @@ class ProtoHealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLi it should "handle timed out futures" in { var first = true - def timeOutCheck(): Future[SubsystemStatus] = { + def timeOutCheck(): Future[SubsystemStatus] = if (first) { first = false Future.successful(OkStatus) @@ -116,7 +122,6 @@ class ProtoHealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLi OkStatus } } - } val timeoutSubsystem = MonitoredSubsystem("Timeout", () => timeOutCheck()) @@ -174,8 +179,8 @@ class ProtoHealthMonitorServiceActorSpec extends TestKitSuite with AnyFlatSpecLi val statusStoreMessages: List[Option[String]] = statusStores.map(_.messages.flatMap(_.headOption)) val expectedMessagePrefixes = List(None, Option("womp womp"), Option("Timed out")) - statusStoreMessages.zip(expectedMessagePrefixes) foreach { - case (a, e) => assert(a.isEmpty && e.isEmpty || a.exists(_.startsWith(e.get))) + statusStoreMessages.zip(expectedMessagePrefixes) foreach { case (a, e) => + assert(a.isEmpty && e.isEmpty || a.exists(_.startsWith(e.get))) } } } @@ -189,16 +194,16 @@ object ProtoHealthMonitorServiceActorSpec { val TimedOutStatus = SubsystemStatus(ok = false, Option(List("Timed out"))) abstract class TestHealthMonitorActor(override val serviceConfig: Config = ConfigFactory.empty()) - extends ProtoHealthMonitorServiceActor with ScaledTimeSpans { + extends ProtoHealthMonitorServiceActor + with ScaledTimeSpans { override lazy val staleThreshold = scaled(3.seconds) override lazy val failureRetryInterval = scaled(100.milliseconds) override lazy val sweepInterval = scaled(200.milliseconds) override lazy val futureTimeout = scaled(1.second) } - private def mockCheckSuccess(): Future[SubsystemStatus] = { + private def mockCheckSuccess(): Future[SubsystemStatus] = Future.successful(OkStatus) - } private def mockCheckFailure(): Future[SubsystemStatus] = { // Pay no mind, just needed to make sure the checks for both subsystems in the "binning" test get to run. diff --git a/services/src/test/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActorSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActorSpec.scala index 8b7c6979e8c..002e7f1296d 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActorSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActorSpec.scala @@ -4,7 +4,12 @@ import akka.testkit.{ImplicitSender, TestFSMRef, TestProbe} import cats.data.NonEmptyList import cromwell.core.TestKitSuite import cromwell.core.instrumentation.InstrumentationPrefixes -import cromwell.services.instrumentation.AsynchronousThrottlingGaugeMetricActor.{CalculateMetricValue, MetricCalculationInProgress, MetricValue, WaitingForMetricCalculationRequestOrMetricValue} +import cromwell.services.instrumentation.AsynchronousThrottlingGaugeMetricActor.{ + CalculateMetricValue, + MetricCalculationInProgress, + MetricValue, + WaitingForMetricCalculationRequestOrMetricValue +} import cromwell.services.instrumentation.InstrumentationService.InstrumentationServiceMessage import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpecLike @@ -14,7 +19,12 @@ import scala.concurrent.Promise import scala.concurrent.duration._ import scala.language.postfixOps -class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFlatSpecLike with ImplicitSender with Matchers with Eventually { +class AsynchronousThrottlingGaugeMetricActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with ImplicitSender + with Matchers + with Eventually { private val defaultTimeout = 5 seconds @@ -23,7 +33,10 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl val serviceRegistryProbe = TestProbe() val calculatedValPromise = Promise[Int]() val metricActor = TestFSMRef { - new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), InstrumentationPrefixes.ServicesPrefix, serviceRegistryProbe.ref) + new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), + InstrumentationPrefixes.ServicesPrefix, + serviceRegistryProbe.ref + ) } metricActor ! CalculateMetricValue(_ => calculatedValPromise.future) eventually { @@ -33,7 +46,7 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl eventually { metricActor.stateName shouldBe WaitingForMetricCalculationRequestOrMetricValue } - serviceRegistryProbe.expectMsgPF(defaultTimeout){ + serviceRegistryProbe.expectMsgPF(defaultTimeout) { case InstrumentationServiceMessage(CromwellGauge(_, actualValue)) => actualValue shouldBe calculatedVal } @@ -42,7 +55,10 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl it should "return into WaitingForMetricCalculationRequestOrMetricValue state if metric calculation resulted in error" in { val dbFailurePromise = Promise[Int]() val metricActor = TestFSMRef { - new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), InstrumentationPrefixes.ServicesPrefix, TestProbe().ref) + new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), + InstrumentationPrefixes.ServicesPrefix, + TestProbe().ref + ) } metricActor ! CalculateMetricValue(_ => dbFailurePromise.future) eventually { @@ -58,10 +74,13 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl val calculatedVal = -1L val serviceRegistryProbe = TestProbe() val metricActor = TestFSMRef { - new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), InstrumentationPrefixes.ServicesPrefix, serviceRegistryProbe.ref) + new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), + InstrumentationPrefixes.ServicesPrefix, + serviceRegistryProbe.ref + ) } metricActor ! MetricValue(calculatedVal) - serviceRegistryProbe.expectMsgPF(defaultTimeout){ + serviceRegistryProbe.expectMsgPF(defaultTimeout) { case InstrumentationServiceMessage(CromwellGauge(_, actualValue)) => actualValue shouldBe calculatedVal } @@ -73,7 +92,10 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl val serviceRegistryProbe = TestProbe() val calculatedValPromise = Promise[Int]() val metricActor = TestFSMRef { - new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), InstrumentationPrefixes.ServicesPrefix, serviceRegistryProbe.ref) + new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), + InstrumentationPrefixes.ServicesPrefix, + serviceRegistryProbe.ref + ) } metricActor ! CalculateMetricValue(_ => calculatedValPromise.future) eventually { @@ -82,7 +104,7 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl // interrupted by precalculated MetricValue metricActor ! MetricValue(precalculatedVal) - serviceRegistryProbe.expectMsgPF(defaultTimeout){ + serviceRegistryProbe.expectMsgPF(defaultTimeout) { case InstrumentationServiceMessage(CromwellGauge(_, actualValue)) => actualValue shouldBe precalculatedVal } @@ -93,7 +115,7 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl eventually { metricActor.stateName shouldBe WaitingForMetricCalculationRequestOrMetricValue } - serviceRegistryProbe.expectMsgPF(defaultTimeout){ + serviceRegistryProbe.expectMsgPF(defaultTimeout) { case InstrumentationServiceMessage(CromwellGauge(_, actualValue)) => actualValue shouldBe calculatedVal } @@ -102,7 +124,10 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl it should "successfully complete ongoing metric value calculation and ignore another calculation requests while in MetricCalculationInProgress state" in { val serviceRegistryProbe = TestProbe() val metricActor = TestFSMRef { - new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), InstrumentationPrefixes.ServicesPrefix, serviceRegistryProbe.ref) + new AsynchronousThrottlingGaugeMetricActor(NonEmptyList.of("metric"), + InstrumentationPrefixes.ServicesPrefix, + serviceRegistryProbe.ref + ) } val calculatedVal = -1 @@ -124,7 +149,7 @@ class AsynchronousThrottlingGaugeMetricActorSpec extends TestKitSuite with AnyFl eventually { metricActor.stateName shouldBe WaitingForMetricCalculationRequestOrMetricValue } - serviceRegistryProbe.expectMsgPF(defaultTimeout){ + serviceRegistryProbe.expectMsgPF(defaultTimeout) { case InstrumentationServiceMessage(CromwellGauge(_, actualValue)) => // should be calculatedVal, not calculatedValInterruptor actualValue shouldBe calculatedVal diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfigSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfigSpec.scala index 3ba42161222..8b64a8b4b0e 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfigSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverConfigSpec.scala @@ -46,7 +46,6 @@ class StackdriverConfigSpec extends TestKitSuite with AnyFlatSpecLike with Befor stackdriverConfig.cromwellPerfTestCase shouldBe Option("perf-test-1") } - it should "correctly parse config with optional values" in { val config = ConfigFactory.parseString( """ @@ -66,7 +65,6 @@ class StackdriverConfigSpec extends TestKitSuite with AnyFlatSpecLike with Befor stackdriverConfig.cromwellPerfTestCase shouldBe None } - it should "correctly add cromwell instance identifier to config" in { val globalConfig = ConfigFactory.parseString( s""" @@ -102,7 +100,6 @@ class StackdriverConfigSpec extends TestKitSuite with AnyFlatSpecLike with Befor stackdriverConfig.cromwellPerfTestCase shouldBe None } - it should "throw error for invalid auth" in { val config = ConfigFactory.parseString( """ @@ -118,7 +115,6 @@ class StackdriverConfigSpec extends TestKitSuite with AnyFlatSpecLike with Befor |`auth` scheme is invalid. Errors: NonEmptyList(`google` configuration stanza does not contain an auth named 'my-auth'. Known auth names: application-default)""".stripMargin } - it should "throw error for invalid flush rate" in { val config = ConfigFactory.parseString( """ diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActorSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActorSpec.scala index d97f63ab48d..5059dc7eb78 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActorSpec.scala @@ -12,7 +12,11 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ -class StackdriverInstrumentationServiceActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually { +class StackdriverInstrumentationServiceActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with Eventually { behavior of "StackdriverInstrumentationServiceActor" private val MaxWaitTime = 2.minutes @@ -39,7 +43,6 @@ class StackdriverInstrumentationServiceActorSpec extends TestKitSuite with AnyFl private val testGaugeBucket = CromwellBucket(List("test_prefix"), NonEmptyList.of("test", "gauge", "metric", "bucket")) - it should "correctly receive the metrics with resource labels" in { val stackdriverConfig = ConfigFactory.parseString( """ @@ -64,22 +67,28 @@ class StackdriverInstrumentationServiceActorSpec extends TestKitSuite with AnyFl val expectedGaugeMetrics = ("custom.googleapis.com/cromwell/test_prefix/test/gauge/metric/bucket", 40.0) val expectedTimingMetrics = ("custom.googleapis.com/cromwell/test_prefix/test/metric/bucket/timing", 7500.0) - val stackdriverActor = TestActorRef(new TestStackdriverInstrumentationServiceActor(stackdriverConfig, globalConfig, registryProbe)) + val stackdriverActor = + TestActorRef(new TestStackdriverInstrumentationServiceActor(stackdriverConfig, globalConfig, registryProbe)) rawMetricList foreach (metric => stackdriverActor ! InstrumentationServiceMessage(metric)) eventually { stackdriverActor.underlyingActor.metricsReceived should have length 3 - stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain (expectedCumulativeMetrics) - stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain (expectedGaugeMetrics) - stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain (expectedTimingMetrics) - - stackdriverActor.underlyingActor.metricsReceived.map(_.resourceLabels) should contain (resourceLabels) + stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain( + expectedCumulativeMetrics + ) + stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain( + expectedGaugeMetrics + ) + stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain( + expectedTimingMetrics + ) + + stackdriverActor.underlyingActor.metricsReceived.map(_.resourceLabels) should contain(resourceLabels) } } - it should "correctly receive metrics with metric labels" in { val stackdriverConfig = ConfigFactory.parseString( """ @@ -99,15 +108,18 @@ class StackdriverInstrumentationServiceActorSpec extends TestKitSuite with AnyFl val expectedCumulativeMetrics = ("custom.googleapis.com/cromwell/test_prefix/test/metric/bucket", 50.0) val metricLabels = Map("cromwell_instance_role" -> "backend", "cromwell_perf_test_case" -> "perf-test-1") - val stackdriverActor = TestActorRef(new TestStackdriverInstrumentationServiceActor(stackdriverConfig, globalConfig, registryProbe)) + val stackdriverActor = + TestActorRef(new TestStackdriverInstrumentationServiceActor(stackdriverConfig, globalConfig, registryProbe)) rawMetricList foreach (metric => stackdriverActor ! InstrumentationServiceMessage(metric)) eventually { stackdriverActor.underlyingActor.metricsReceived should have length 1 - stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain (expectedCumulativeMetrics) - stackdriverActor.underlyingActor.metricsReceived.map(_.resourceLabels) should contain (resourceLabels) - stackdriverActor.underlyingActor.metricsReceived.map(_.metricLabels) should contain (metricLabels) + stackdriverActor.underlyingActor.metricsReceived.map(m => (m.metricPath, m.metricValue)) should contain( + expectedCumulativeMetrics + ) + stackdriverActor.underlyingActor.metricsReceived.map(_.resourceLabels) should contain(resourceLabels) + stackdriverActor.underlyingActor.metricsReceived.map(_.metricLabels) should contain(metricLabels) } } } diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala index e1b708b6a72..d186864969d 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala @@ -5,8 +5,10 @@ import com.google.monitoring.v3.CreateTimeSeriesRequest import com.typesafe.config.Config import scala.jdk.CollectionConverters._ -class TestStackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends StackdriverInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { +class TestStackdriverInstrumentationServiceActor(serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef +) extends StackdriverInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { var metricsReceived = List[TimeSeriesRequest]() @@ -15,14 +17,15 @@ class TestStackdriverInstrumentationServiceActor(serviceConfig: Config, globalCo val metric = timeSeries.getMetric metricsReceived = metricsReceived :+ TimeSeriesRequest(metric.getType, - timeSeries.getPoints(0).getValue.getDoubleValue, - timeSeries.getResource.getLabelsMap.asScala.toMap, - metric.getLabelsMap.asScala.toMap) + timeSeries.getPoints(0).getValue.getDoubleValue, + timeSeries.getResource.getLabelsMap.asScala.toMap, + metric.getLabelsMap.asScala.toMap + ) } } - case class TimeSeriesRequest(metricPath: String, metricValue: Double, resourceLabels: Map[String, String], - metricLabels: Map[String, String]) + metricLabels: Map[String, String] +) diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfigSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfigSpec.scala index b109b785c42..b22ce0b2ef1 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfigSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDConfigSpec.scala @@ -9,7 +9,7 @@ import scala.concurrent.duration._ class StatsDConfigSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "StatsDConfig" - + it should "parse correct service configuration" in { val config = ConfigFactory.parseString( """ diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala index 82b3183faef..1275a64e231 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala @@ -15,7 +15,12 @@ import org.scalatest.matchers.should.Matchers import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ -class StatsDInstrumentationServiceActorBenchmarkSpec extends TestKitSuite with AnyFlatSpecLike with BeforeAndAfterAll with Matchers with Eventually { +class StatsDInstrumentationServiceActorBenchmarkSpec + extends TestKitSuite + with AnyFlatSpecLike + with BeforeAndAfterAll + with Matchers + with Eventually { behavior of "StatsDInstrumentationServiceActor" val config = ConfigFactory.parseString( @@ -28,18 +33,20 @@ class StatsDInstrumentationServiceActorBenchmarkSpec extends TestKitSuite with A ) val registryProbe = TestProbe().ref - override implicit val patienceConfig: PatienceConfig = PatienceConfig(scaled(3.seconds)) + implicit override val patienceConfig: PatienceConfig = PatienceConfig(scaled(3.seconds)) val testBucket = CromwellBucket(List("test_prefix"), NonEmptyList.of("test", "metric", "benchmark", "bucket")) - it should "have good throughput for gauges" in { - val instrumentationActor = TestActorRef(new StatsDInstrumentationServiceActor(config, ConfigFactory.load(), registryProbe)) + val instrumentationActor = + TestActorRef(new StatsDInstrumentationServiceActor(config, ConfigFactory.load(), registryProbe)) val gaugeName = instrumentationActor.underlyingActor.metricBaseName.append(testBucket.toStatsDString()).name - LazyList.range(0, 1 * 1000 * 1000, 1).foreach({ i => + LazyList.range(0, 1 * 1000 * 1000, 1).foreach { i => instrumentationActor ! InstrumentationServiceMessage(CromwellGauge(testBucket, i.toLong)) - }) + } eventually { - instrumentationActor.underlyingActor.metricRegistry.getGauges.asScala.get(gaugeName).map(_.getValue) shouldBe Some(999999) + instrumentationActor.underlyingActor.metricRegistry.getGauges.asScala + .get(gaugeName) + .map(_.getValue) shouldBe Some(999999) } } } diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorSpec.scala index ed913606191..93237709c2a 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorSpec.scala @@ -16,7 +16,11 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ -class StatsDInstrumentationServiceActorSpec extends TestKitSuite with AnyFlatSpecLike with BeforeAndAfterAll with Matchers { +class StatsDInstrumentationServiceActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with BeforeAndAfterAll + with Matchers { behavior of "StatsDInstrumentationServiceActor" val config = ConfigFactory.parseString( @@ -35,12 +39,13 @@ class StatsDInstrumentationServiceActorSpec extends TestKitSuite with AnyFlatSpe val patience = 1.second val testBucket = CromwellBucket(List("test_prefix"), NonEmptyList.of("test", "metric", "bucket")) val testGaugeBucket = CromwellBucket(List("test_prefix"), NonEmptyList.of("test", "gauge", "metric", "bucket")) - + var udpListenerActor: ActorRef = _ - + override def beforeAll(): Unit = { // Start an actor listening to the UDP port and forwarding messages to the udpProbe - udpListenerActor = system.actorOf(Props(new UDPListenerActor(new InetSocketAddress("localhost", 8125), udpProbe.ref))) + udpListenerActor = + system.actorOf(Props(new UDPListenerActor(new InetSocketAddress("localhost", 8125), udpProbe.ref))) // Give a sec to the actor to open an UDP socket Thread.sleep(1.second.toMillis) super.beforeAll() @@ -56,45 +61,63 @@ class StatsDInstrumentationServiceActorSpec extends TestKitSuite with AnyFlatSpe metric: CromwellMetric, expectedExactPackets: Set[String], expectedFuzzyPackets: Set[String] = Set.empty, - prependCromwellInstancePrefix: Boolean = false) - + prependCromwellInstancePrefix: Boolean = false + ) + // Note: The current StatsD implementation sends everything as StatsD gauges so we expect all packets to be "...|g" List( - StatsDTestBit("increment counters", CromwellIncrement(testBucket), + StatsDTestBit( + "increment counters", + CromwellIncrement(testBucket), Set("prefix_value.cromwell.test_prefix.test.metric.bucket.samples:1|g"), Set("prefix_value.cromwell.test_prefix.test.metric.bucket.m1_rate") ), - StatsDTestBit("add count", CromwellCount(testBucket, 80), + StatsDTestBit( + "add count", + CromwellCount(testBucket, 80), Set("prefix_value.cromwell.test_prefix.test.metric.bucket.samples:81|g"), Set("prefix_value.cromwell.test_prefix.test.metric.bucket.m1_rate") ), - StatsDTestBit("set gauges", CromwellGauge(testGaugeBucket, 89), - Set("prefix_value.cromwell.test_prefix.test.gauge.metric.bucket:89|g") + StatsDTestBit("set gauges", + CromwellGauge(testGaugeBucket, 89), + Set("prefix_value.cromwell.test_prefix.test.gauge.metric.bucket:89|g") ), - StatsDTestBit("set timings", CromwellTiming(testBucket.expand("timing"), 5.seconds), - Set("prefix_value.cromwell.test_prefix.test.metric.bucket.timing.stddev:0.00|g", + StatsDTestBit( + "set timings", + CromwellTiming(testBucket.expand("timing"), 5.seconds), + Set( + "prefix_value.cromwell.test_prefix.test.metric.bucket.timing.stddev:0.00|g", "prefix_value.cromwell.test_prefix.test.metric.bucket.timing.samples:1|g", "prefix_value.cromwell.test_prefix.test.metric.bucket.timing.p95:5000.00|g", "prefix_value.cromwell.test_prefix.test.metric.bucket.timing.mean:5000.00|g" ), Set("prefix_value.cromwell.test_prefix.test.metric.bucket.timing.m1_rate") ), - StatsDTestBit("increment counters with cromwell id prefix", CromwellIncrement(testBucket), + StatsDTestBit( + "increment counters with cromwell id prefix", + CromwellIncrement(testBucket), Set("prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.samples:1|g"), Set("prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.m1_rate"), prependCromwellInstancePrefix = true ), - StatsDTestBit("add count with cromwell id prefix", CromwellCount(testBucket, 80), + StatsDTestBit( + "add count with cromwell id prefix", + CromwellCount(testBucket, 80), Set("prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.samples:81|g"), Set("prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.m1_rate"), prependCromwellInstancePrefix = true ), - StatsDTestBit("set gauges with cromwell id prefix", CromwellGauge(testGaugeBucket, 89), + StatsDTestBit( + "set gauges with cromwell id prefix", + CromwellGauge(testGaugeBucket, 89), Set("prefix_value.cromwell.cromwell-101.test_prefix.test.gauge.metric.bucket:89|g"), prependCromwellInstancePrefix = true ), - StatsDTestBit("set timings with cromwell id prefix", CromwellTiming(testBucket.expand("timing"), 5.seconds), - Set("prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.timing.stddev:0.00|g", + StatsDTestBit( + "set timings with cromwell id prefix", + CromwellTiming(testBucket.expand("timing"), 5.seconds), + Set( + "prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.timing.stddev:0.00|g", "prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.timing.samples:1|g", "prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.timing.p95:5000.00|g", "prefix_value.cromwell.cromwell-101.test_prefix.test.metric.bucket.timing.mean:5000.00|g" @@ -103,38 +126,49 @@ class StatsDInstrumentationServiceActorSpec extends TestKitSuite with AnyFlatSpe prependCromwellInstancePrefix = true ) ) foreach { - case StatsDTestBit(description, metric, expectedExactPackets, expectedFuzzyPackets, prependCromwellInstancePrefix) => + case StatsDTestBit(description, + metric, + expectedExactPackets, + expectedFuzzyPackets, + prependCromwellInstancePrefix + ) => it should description in { val globalConfig = if (prependCromwellInstancePrefix) cromwellInstanceConfig else ConfigFactory.empty() - val instrumentationActor = TestActorRef(new StatsDInstrumentationServiceActor(config, globalConfig, registryProbe)) + val instrumentationActor = + TestActorRef(new StatsDInstrumentationServiceActor(config, globalConfig, registryProbe)) instrumentationActor ! InstrumentationServiceMessage(metric) - val received = udpProbe.receiveWhile(patience) { - case Udp.Received(data, _) => data.utf8String + val received = udpProbe.receiveWhile(patience) { case Udp.Received(data, _) => + data.utf8String } - expectedExactPackets foreach { packet => if (!received.contains(packet)) { - val prefix = packet.split(":").head - received.find(_.startsWith(prefix)) match { - case Some(sharedPrefix) => fail(s"Missing packet: $packet, but found: $sharedPrefix. Should this be a fuzzy packet?") - case None => fail(s"Missing packet: $packet, and no packets received with prefix $prefix") + expectedExactPackets foreach { packet => + if (!received.contains(packet)) { + val prefix = packet.split(":").head + received.find(_.startsWith(prefix)) match { + case Some(sharedPrefix) => + fail(s"Missing packet: $packet, but found: $sharedPrefix. Should this be a fuzzy packet?") + case None => fail(s"Missing packet: $packet, and no packets received with prefix $prefix") + } } - }} - expectedFuzzyPackets foreach { packet => if (!received.exists(_.contains(packet))) fail(s"Missing fuzzy packet: $packet") } + } + expectedFuzzyPackets foreach { packet => + if (!received.exists(_.contains(packet))) fail(s"Missing fuzzy packet: $packet") + } } } - + private class UDPListenerActor(remote: InetSocketAddress, sendTo: ActorRef) extends Actor with ActorLogging { - implicit val system = context.system + implicit val system = context.system IO(Udp) ! Udp.Bind(sendTo, remote) - def receive = { - case Udp.Bound(_) => context.become(ready(sender())) + def receive = { case Udp.Bound(_) => + context.become(ready(sender())) } def ready(socket: ActorRef): Receive = { - case Udp.Unbind => socket ! Udp.Unbind + case Udp.Unbind => socket ! Udp.Unbind case Udp.Unbound => context.stop(self) case other => log.error(s"received unexpected message: $other") } diff --git a/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala b/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala index c4f0c6177c0..b57ef9a931b 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala @@ -30,10 +30,9 @@ final class InMemoryKvServiceActor extends KeyValueServiceActor { override protected def kvWriteActorProps = Props.empty - private def respond(replyTo: ActorRef, action: KvAction, response: Future[KvResponse]): Unit = { + private def respond(replyTo: ActorRef, action: KvAction, response: Future[KvResponse]): Unit = response.onComplete { case Success(x) => replyTo ! x case Failure(ex) => replyTo ! KvFailure(action, ex) } - } } diff --git a/services/src/test/scala/cromwell/services/keyvalue/KvClientSpec.scala b/services/src/test/scala/cromwell/services/keyvalue/KvClientSpec.scala index f8af9a21132..00d8dd02079 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/KvClientSpec.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/KvClientSpec.scala @@ -58,5 +58,3 @@ class KvClientSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { class KvTestClientActor(val serviceRegistryActor: ActorRef) extends Actor with ActorLogging with KvClient { override def receive: Receive = kvClientReceive orElse Actor.ignoringBehavior } - - diff --git a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala index e673b6f012f..4485f23f347 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala @@ -18,7 +18,12 @@ import org.scalatest.time.{Millis, Seconds, Span} import scala.concurrent.{ExecutionContext, Future} -class KeyValueDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with RecoverMethods { +class KeyValueDatabaseSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalaFutures + with RecoverMethods { implicit val ec: ExecutionContext = ExecutionContext.global implicit val defaultPatience: PatienceConfig = PatienceConfig(scaled(Span(5, Seconds)), scaled(Span(100, Millis))) @@ -28,7 +33,8 @@ class KeyValueDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val dataAccess = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) + lazy val dataAccess = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, EngineDatabaseType, databaseSystem) val workflowId = WorkflowId.randomId().toString val callFqn = "AwesomeWorkflow.GoodJob" @@ -70,7 +76,7 @@ class KeyValueDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ) it should "start database container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } it should "upsert and retrieve kv pairs correctly" taggedAs DbmsTest in { @@ -116,13 +122,13 @@ class KeyValueDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } it should "stop container" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } } object KeyValueDatabaseSpec { - private def getFailureRegex(databaseSystem: DatabaseSystem): String = { + private def getFailureRegex(databaseSystem: DatabaseSystem): String = databaseSystem.platform match { case HsqldbDatabasePlatform => """integrity constraint violation: NOT NULL check constraint; """ + @@ -133,14 +139,12 @@ object KeyValueDatabaseSpec { """ERROR: null value in column "STORE_VALUE" """ + """(of relation "JOB_KEY_VALUE_ENTRY" )?violates not-null constraint""" } - } - private def getFailureClass(databaseSystem: DatabaseSystem): Class[_ <: Exception] = { + private def getFailureClass(databaseSystem: DatabaseSystem): Class[_ <: Exception] = databaseSystem.platform match { case HsqldbDatabasePlatform => classOf[SQLIntegrityConstraintViolationException] case MariadbDatabasePlatform => classOf[BatchUpdateException] case MysqlDatabasePlatform => classOf[BatchUpdateException] case PostgresqlDatabasePlatform => classOf[PSQLException] } - } } diff --git a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueServiceActorSpec.scala b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueServiceActorSpec.scala index 1faee0ac315..781c163164b 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueServiceActorSpec.scala @@ -16,13 +16,13 @@ class KeyValueServiceActorSpec extends ServicesSpec with Eventually { val cromwellConfig: Config = ConfigFactory.parseString( s"""services: { - | KeyValue: { - | class: "cromwell.services.keyvalue.KeyValueServiceActor" - | config { - | option1: "value1" - | } - | } - |} + | KeyValue: { + | class: "cromwell.services.keyvalue.KeyValueServiceActor" + | config { + | option1: "value1" + | } + | } + |} """.stripMargin ) @@ -30,7 +30,7 @@ class KeyValueServiceActorSpec extends ServicesSpec with Eventually { val sqlKvServiceActor: ActorRef = system.actorOf( props = SqlKeyValueServiceActor.props(emptyConfig, emptyConfig, TestProbe("serviceRegistryActor").ref), - name = "sqlKvServiceActor", + name = "sqlKvServiceActor" ) val wfID: WorkflowId = WorkflowId.randomId() @@ -53,23 +53,23 @@ class KeyValueServiceActorSpec extends ServicesSpec with Eventually { "insert a key/value" in { val kvPut1 = KvPut(KvPair(ScopedKey(wfID, jobKey1, "k1"), "v1")) - (for { - putResult <- (sqlKvServiceActor ? kvPut1).mapTo[KvResponse] - _ = putResult shouldEqual KvPutSuccess(kvPut1) - putResult <- (sqlKvServiceActor ? KvPut(kvPair2)).mapTo[KvResponse] - _ = putResult shouldEqual KvPutSuccess(KvPut(kvPair2)) - putResult <- (sqlKvServiceActor ? KvPut(kvPair3)).mapTo[KvResponse] - _ = putResult shouldEqual KvPutSuccess(KvPut(kvPair3)) - } yield ()).futureValue + (for { + putResult <- (sqlKvServiceActor ? kvPut1).mapTo[KvResponse] + _ = putResult shouldEqual KvPutSuccess(kvPut1) + putResult <- (sqlKvServiceActor ? KvPut(kvPair2)).mapTo[KvResponse] + _ = putResult shouldEqual KvPutSuccess(KvPut(kvPair2)) + putResult <- (sqlKvServiceActor ? KvPut(kvPair3)).mapTo[KvResponse] + _ = putResult shouldEqual KvPutSuccess(KvPut(kvPair3)) + } yield ()).futureValue } "return error if key doesn't exist" in { val scopedKeyNeverPut = ScopedKey(wfID, jobKey2, "k2") - (for { - getResult <- (sqlKvServiceActor ? KvGet(scopedKeyNeverPut)).mapTo[KvResponse] - _ = getResult shouldEqual KvKeyLookupFailed(KvGet(scopedKeyNeverPut)) - } yield ()).futureValue + (for { + getResult <- (sqlKvServiceActor ? KvGet(scopedKeyNeverPut)).mapTo[KvResponse] + _ = getResult shouldEqual KvKeyLookupFailed(KvGet(scopedKeyNeverPut)) + } yield ()).futureValue } "be able to overwrite values" in { diff --git a/services/src/test/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActorSpec.scala b/services/src/test/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActorSpec.scala index 531f7106460..e5d6f9d9bfc 100644 --- a/services/src/test/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActorSpec.scala @@ -19,9 +19,14 @@ object LoadControllerServiceActorSpec { val Config = ConfigFactory.parseString("control-frequency = 1 second") } -class LoadControllerServiceActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually with ImplicitSender { +class LoadControllerServiceActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with Eventually + with ImplicitSender { behavior of "LoadControllerServiceActor" - override implicit val patienceConfig: PatienceConfig = PatienceConfig(scaled(2.seconds)) + implicit override val patienceConfig: PatienceConfig = PatienceConfig(scaled(2.seconds)) it should "record metrics" in { val loadActor = TestActorRef(new LoadControllerServiceActor(Config, Config, TestProbe().ref)) diff --git a/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala b/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala index 5a908aaf092..83737c37d11 100644 --- a/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala @@ -4,13 +4,22 @@ import akka.actor.{Actor, ActorRef, Props} import akka.testkit.TestProbe import com.typesafe.config.{Config, ConfigFactory} import cromwell.core.TestKitSuite -import cromwell.services.metadata.MetadataQuerySpec.{CannedResponseReadMetadataWorker, MetadataServiceActor_CustomizeRead} -import cromwell.services.metadata.MetadataService.{BuildMetadataJsonAction, MetadataServiceResponse, QueryForWorkflowsMatchingParameters, WorkflowQueryResponse, WorkflowQuerySuccess} +import cromwell.services.metadata.MetadataQuerySpec.{ + CannedResponseReadMetadataWorker, + MetadataServiceActor_CustomizeRead +} +import cromwell.services.metadata.MetadataService.{ + BuildMetadataJsonAction, + MetadataServiceResponse, + QueryForWorkflowsMatchingParameters, + WorkflowQueryResponse, + WorkflowQuerySuccess +} import cromwell.services.metadata.impl.{MetadataServiceActor, MetadataServiceActorSpec} import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -class MetadataQuerySpec extends TestKitSuite with AnyFlatSpecLike with Matchers { +class MetadataQuerySpec extends TestKitSuite with AnyFlatSpecLike with Matchers { it should "correctly forward requests to read workers and responses back to requesters" in { @@ -26,7 +35,9 @@ class MetadataQuerySpec extends TestKitSuite with AnyFlatSpecLike with Matchers val requester = TestProbe("MetadataServiceClientProbe") def readWorkerProps() = Props(new CannedResponseReadMetadataWorker(Map(request -> response))) val serviceRegistry = TestProbe("ServiceRegistryProbe") - val metadataService = system.actorOf(MetadataServiceActor_CustomizeRead.props(readWorkerProps, serviceRegistry), "MetadataServiceUnderTest") + val metadataService = system.actorOf(MetadataServiceActor_CustomizeRead.props(readWorkerProps, serviceRegistry), + "MetadataServiceUnderTest" + ) requester.send(metadataService, request) requester.expectMsg(response) @@ -35,12 +46,17 @@ class MetadataQuerySpec extends TestKitSuite with AnyFlatSpecLike with Matchers } - object MetadataQuerySpec { - final class MetadataServiceActor_CustomizeRead(config: Config, serviceRegistryActor: ActorRef, readWorkerMaker: () => Props) - extends MetadataServiceActor(MetadataServiceActorSpec.globalConfigToMetadataServiceConfig(config), config, serviceRegistryActor) { - - override def readMetadataWorkerActorProps(): Props = readWorkerMaker.apply().withDispatcher(cromwell.core.Dispatcher.ServiceDispatcher) + final class MetadataServiceActor_CustomizeRead(config: Config, + serviceRegistryActor: ActorRef, + readWorkerMaker: () => Props + ) extends MetadataServiceActor(MetadataServiceActorSpec.globalConfigToMetadataServiceConfig(config), + config, + serviceRegistryActor + ) { + + override def readMetadataWorkerActorProps(): Props = + readWorkerMaker.apply().withDispatcher(cromwell.core.Dispatcher.ServiceDispatcher) } object MetadataServiceActor_CustomizeRead { @@ -50,10 +66,10 @@ object MetadataQuerySpec { Props(new MetadataServiceActor_CustomizeRead(config, serviceRegistryProbe.ref, readActorProps)) } - - final class CannedResponseReadMetadataWorker(cannedResponses: Map[BuildMetadataJsonAction, MetadataServiceResponse]) extends Actor { - override def receive: Receive = { - case msg: BuildMetadataJsonAction => sender() ! cannedResponses.getOrElse(msg, throw new Exception(s"Unexpected inbound message: $msg")) + final class CannedResponseReadMetadataWorker(cannedResponses: Map[BuildMetadataJsonAction, MetadataServiceResponse]) + extends Actor { + override def receive: Receive = { case msg: BuildMetadataJsonAction => + sender() ! cannedResponses.getOrElse(msg, throw new Exception(s"Unexpected inbound message: $msg")) } } } diff --git a/services/src/test/scala/cromwell/services/metadata/MetadataServiceSpec.scala b/services/src/test/scala/cromwell/services/metadata/MetadataServiceSpec.scala index da4587c55f3..d3d596e45ce 100644 --- a/services/src/test/scala/cromwell/services/metadata/MetadataServiceSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/MetadataServiceSpec.scala @@ -32,12 +32,16 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val womArray = WomArray(WomArrayType(WomStringType), Seq(WomString("Hello"), WomString("world!"))) val emptyWomArray = WomArray(WomArrayType(WomStringType), Seq.empty) - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), womArray).toList should contain theSameElementsInOrderAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + womArray + ).toList should contain theSameElementsInOrderAs List( MetadataEvent(MetadataKey(workflowId, None, "root[0]"), MetadataValue("Hello")), MetadataEvent(MetadataKey(workflowId, None, "root[1]"), MetadataValue("world!")) ) - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), emptyWomArray).toList should contain theSameElementsAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + emptyWomArray + ).toList should contain theSameElementsAs List( MetadataEvent.empty(MetadataKey(workflowId, None, "root[]")) ) } @@ -45,18 +49,24 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "convert a WomMap to MetadataEvents" in { import MetadataService._ val workflowId = WorkflowId.randomId() - val womMap = WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("Hello") -> WomString("world!"), - WomString("Goodbye") -> WomString("world!") - )) + val womMap = WomMap(WomMapType(WomStringType, WomStringType), + Map( + WomString("Hello") -> WomString("world!"), + WomString("Goodbye") -> WomString("world!") + ) + ) val emptyWomMap = WomMap(WomMapType(WomStringType, WomStringType), Map.empty) - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), womMap).toList should contain theSameElementsInOrderAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + womMap + ).toList should contain theSameElementsInOrderAs List( MetadataEvent(MetadataKey(workflowId, None, "root:Hello"), MetadataValue("world!")), MetadataEvent(MetadataKey(workflowId, None, "root:Goodbye"), MetadataValue("world!")) ) - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), emptyWomMap).toList should contain theSameElementsAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + emptyWomMap + ).toList should contain theSameElementsAs List( MetadataEvent.empty(MetadataKey(workflowId, None, "root")) ) } @@ -82,7 +92,9 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ) ) - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), womMaybePopulatedFileOuter).toList should contain theSameElementsInOrderAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + womMaybePopulatedFileOuter + ).toList should contain theSameElementsInOrderAs List( MetadataEvent(MetadataKey(workflowId, None, "root:class"), MetadataValue("File")), MetadataEvent(MetadataKey(workflowId, None, "root:location"), MetadataValue("fileValue")), MetadataEvent(MetadataKey(workflowId, None, "root:checksum"), MetadataValue("checksum")), @@ -119,7 +131,9 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ) ) - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), womMaybeListedDirectoryOuter).toList should contain theSameElementsInOrderAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + womMaybeListedDirectoryOuter + ).toList should contain theSameElementsInOrderAs List( MetadataEvent(MetadataKey(workflowId, None, "root:class"), MetadataValue("Directory")), MetadataEvent(MetadataKey(workflowId, None, "root:location"), MetadataValue("directoryValue")), MetadataEvent(MetadataKey(workflowId, None, "root:listing[0]:class"), MetadataValue("Directory")), @@ -137,12 +151,14 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ("womValue", "metadataValue"), (WomString("hi"), MetadataValue("hi", MetadataString)), (WomInteger(1), MetadataValue("1", MetadataInt)), - (WomFloat(1F), MetadataValue("1.0", MetadataNumber)), + (WomFloat(1f), MetadataValue("1.0", MetadataNumber)), (WomBoolean(true), MetadataValue("true", MetadataBoolean)) ) forAll(values) { (womValue, metadataValue) => - womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), womValue).toList should contain theSameElementsAs List( + womValueToMetadataEvents(MetadataKey(workflowId, None, "root"), + womValue + ).toList should contain theSameElementsAs List( metadata.MetadataEvent(MetadataKey(workflowId, None, "root"), metadataValue) ) } @@ -192,7 +208,8 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val (outerPrefix, outerCausedBys, outerFailureId) = validateExceptionMessage(events.head, workflowId, tMsg) val (cause1Prefix, cause1CausedBys, cause1FailureId) = validateExceptionMessage(events(1), workflowId, causeMsg) - val (cause2Prefix, cause2CausedBys, cause2FailureId) = validateExceptionMessage(events(2), workflowId, innerCauseMsg) + val (cause2Prefix, cause2CausedBys, cause2FailureId) = + validateExceptionMessage(events(2), workflowId, innerCauseMsg) events(3).key.key should be(s"$cause2Prefix[$cause2FailureId]$cause2CausedBys:causedBy[]") outerPrefix should be(pathToFailures) @@ -236,7 +253,8 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc runtimeCausedBys should be("") // Aggregate exception: - val (aggregatePrefix, aggregateCausedBys, aggregateFailureId) = validateExceptionMessage(events(1), workflowId, causeContext) + val (aggregatePrefix, aggregateCausedBys, aggregateFailureId) = + validateExceptionMessage(events(1), workflowId, causeContext) aggregatePrefix should be(pathToFailures) aggregateCausedBys should be(":causedBy[0]") aggregateFailureId should be(runtimeFailureId) @@ -250,7 +268,8 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc // cause2, caused by innerCause caused by [] val (cause2Prefix, cause2CausedBys, cause2FailureId) = validateExceptionMessage(events(4), workflowId, cause2Msg) - val (innerCausePrefix, innerCauseCausedBys, innerCauseFailureIds) = validateExceptionMessage(events(5), workflowId, innerCauseMsg) + val (innerCausePrefix, innerCauseCausedBys, innerCauseFailureIds) = + validateExceptionMessage(events(5), workflowId, innerCauseMsg) cause2Prefix should be(pathToFailures) cause2CausedBys should be(":causedBy[0]:causedBy[1]") cause2FailureId should be(runtimeFailureId) @@ -277,6 +296,9 @@ class MetadataServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc case failureMessageRegex(prefix, failureIndex, causedBys) => (prefix, causedBys, failureIndex) case _ => fail("Unexpected failure key format: " + k.key) } - case _ => fail("throwableToMetadataEvents generated a metadata event without a metadata value! Bad throwableToMetadataEvents! Very bad!") + case _ => + fail( + "throwableToMetadataEvents generated a metadata event without a metadata value! Bad throwableToMetadataEvents! Very bad!" + ) } } diff --git a/services/src/test/scala/cromwell/services/metadata/MetadataStatisticsRecorderSpec.scala b/services/src/test/scala/cromwell/services/metadata/MetadataStatisticsRecorderSpec.scala index 2096e3f9553..d10d8146250 100644 --- a/services/src/test/scala/cromwell/services/metadata/MetadataStatisticsRecorderSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/MetadataStatisticsRecorderSpec.scala @@ -16,10 +16,18 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { behavior of "MetadataStatisticsRecorder" - def uninterestingWriteEvent(workflowId: WorkflowId)(): MetadataEvent = MetadataEvent(MetadataKey(workflowId, None, "foo"), MetadataValue(WomInteger(1))) - def parentNotificationEvent(rootWorkflowId: WorkflowId, parentWorkflowId: WorkflowId, subWorkflowId: WorkflowId): Vector[MetadataEvent] = Vector( - MetadataEvent(MetadataKey(subWorkflowId, None, "rootWorkflowId"), MetadataValue(WomString(rootWorkflowId.toString))), - MetadataEvent(MetadataKey(subWorkflowId, None, "parentWorkflowId"), MetadataValue(WomString(parentWorkflowId.toString))) + def uninterestingWriteEvent(workflowId: WorkflowId)(): MetadataEvent = + MetadataEvent(MetadataKey(workflowId, None, "foo"), MetadataValue(WomInteger(1))) + def parentNotificationEvent(rootWorkflowId: WorkflowId, + parentWorkflowId: WorkflowId, + subWorkflowId: WorkflowId + ): Vector[MetadataEvent] = Vector( + MetadataEvent(MetadataKey(subWorkflowId, None, "rootWorkflowId"), + MetadataValue(WomString(rootWorkflowId.toString)) + ), + MetadataEvent(MetadataKey(subWorkflowId, None, "parentWorkflowId"), + MetadataValue(WomString(parentWorkflowId.toString)) + ) ) it should "count rows for one workflow and create alerts every interval" in { @@ -28,7 +36,9 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { (1 to 10) foreach { i => recorder.processEventsAndGenerateAlerts(9 of uninterestingWriteEvent(workflowId)) should be(Vector.empty) - recorder.processEventsAndGenerateAlerts(1 of uninterestingWriteEvent(workflowId)) should be(Vector(HeavyMetadataAlert(workflowId, 10L * i))) + recorder.processEventsAndGenerateAlerts(1 of uninterestingWriteEvent(workflowId)) should be( + Vector(HeavyMetadataAlert(workflowId, 10L * i)) + ) () } } @@ -42,7 +52,9 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { val bigMetadataDump = Math.abs(Random.nextInt(101)) + 10 val expectedCountAfterProcessing = runningCounter + bigMetadataDump - recorder.processEventsAndGenerateAlerts(bigMetadataDump of uninterestingWriteEvent(workflowId)) should be(Vector(HeavyMetadataAlert(workflowId, expectedCountAfterProcessing))) + recorder.processEventsAndGenerateAlerts(bigMetadataDump of uninterestingWriteEvent(workflowId)) should be( + Vector(HeavyMetadataAlert(workflowId, expectedCountAfterProcessing)) + ) runningCounter = expectedCountAfterProcessing () } @@ -60,11 +72,13 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { (7 of uninterestingWriteEvent(workflowId3)) ) should be(Vector.empty) - recorder.processEventsAndGenerateAlerts( - (3 of uninterestingWriteEvent(workflowId1)) ++ - (5 of uninterestingWriteEvent(workflowId2)) ++ - (7 of uninterestingWriteEvent(workflowId3)) - ).toSet should be(Set(HeavyMetadataAlert(workflowId2, 10), HeavyMetadataAlert(workflowId3, 14))) + recorder + .processEventsAndGenerateAlerts( + (3 of uninterestingWriteEvent(workflowId1)) ++ + (5 of uninterestingWriteEvent(workflowId2)) ++ + (7 of uninterestingWriteEvent(workflowId3)) + ) + .toSet should be(Set(HeavyMetadataAlert(workflowId2, 10), HeavyMetadataAlert(workflowId3, 14))) recorder.processEventsAndGenerateAlerts( (3 of uninterestingWriteEvent(workflowId1)) ++ @@ -72,11 +86,15 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { (7 of uninterestingWriteEvent(workflowId3)) ) should be(Vector.empty) - recorder.processEventsAndGenerateAlerts( - (3 of uninterestingWriteEvent(workflowId1)) ++ - (5 of uninterestingWriteEvent(workflowId2)) ++ - (7 of uninterestingWriteEvent(workflowId3)) - ).toSet should be(Set(HeavyMetadataAlert(workflowId1, 12), HeavyMetadataAlert(workflowId2, 20), HeavyMetadataAlert(workflowId3, 28))) + recorder + .processEventsAndGenerateAlerts( + (3 of uninterestingWriteEvent(workflowId1)) ++ + (5 of uninterestingWriteEvent(workflowId2)) ++ + (7 of uninterestingWriteEvent(workflowId3)) + ) + .toSet should be( + Set(HeavyMetadataAlert(workflowId1, 12), HeavyMetadataAlert(workflowId2, 20), HeavyMetadataAlert(workflowId3, 28)) + ) } it should "be able to accumulate counts from subworkflows" in { @@ -89,41 +107,55 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { // Recording SW1 parentage adds 2 events against subWorkflowId1 (and thus rootWorkflowId) withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(parentNotificationEvent(rootWorkflowId, rootWorkflowId, subWorkflow1Id)) should be(Vector.empty) + recorder.processEventsAndGenerateAlerts( + parentNotificationEvent(rootWorkflowId, rootWorkflowId, subWorkflow1Id) + ) should be(Vector.empty) } // Recording SW2 parentage adds 2 events against subWorkflowId2 (and thus subWorkflow1Id and thus rootWorkflowId) withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(parentNotificationEvent(rootWorkflowId, subWorkflow1Id, subWorkflow2Id)) should be(Vector.empty) + recorder.processEventsAndGenerateAlerts( + parentNotificationEvent(rootWorkflowId, subWorkflow1Id, subWorkflow2Id) + ) should be(Vector.empty) } // To get started, add 7 events to the root subworkflow: withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(rootWorkflowId)) should be(Vector(HeavyMetadataAlert(rootWorkflowId, 11))) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(rootWorkflowId)) should be( + Vector(HeavyMetadataAlert(rootWorkflowId, 11)) + ) } // Current standing: root: 11, sub1: 4, sub2: 2 withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be(Vector(HeavyMetadataAlert(subWorkflow1Id, 11))) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be( + Vector(HeavyMetadataAlert(subWorkflow1Id, 11)) + ) } // Current standing: root: 18, sub1: 11, sub2: 2 withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow2Id)) should be(Vector(HeavyMetadataAlert(rootWorkflowId, 25))) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow2Id)) should be( + Vector(HeavyMetadataAlert(rootWorkflowId, 25)) + ) } // Current standing: root: 25, sub1: 18, sub2: 9 withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be(Vector(HeavyMetadataAlert(subWorkflow1Id, 25))) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be( + Vector(HeavyMetadataAlert(subWorkflow1Id, 25)) + ) } // Current standing: root: 32, sub1: 25, sub2: 9 withClue(recorder.statusString()) { - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow2Id)).toSet should be(Set(HeavyMetadataAlert(rootWorkflowId, 39), HeavyMetadataAlert(subWorkflow2Id, 16))) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow2Id)).toSet should be( + Set(HeavyMetadataAlert(rootWorkflowId, 39), HeavyMetadataAlert(subWorkflow2Id, 16)) + ) } // Current standing: root: 39, sub1: 32, sub2: 16 @@ -135,8 +167,12 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { val subWorkflow1Id = WorkflowId(UUID.randomUUID()) val subWorkflow2Id = WorkflowId(UUID.randomUUID()) - recorder.processEventsAndGenerateAlerts(parentNotificationEvent(rootWorkflowId, rootWorkflowId, subWorkflow1Id)) should be(Vector.empty) - recorder.processEventsAndGenerateAlerts(parentNotificationEvent(rootWorkflowId, subWorkflow1Id, subWorkflow2Id)) should be(Vector.empty) + recorder.processEventsAndGenerateAlerts( + parentNotificationEvent(rootWorkflowId, rootWorkflowId, subWorkflow1Id) + ) should be(Vector.empty) + recorder.processEventsAndGenerateAlerts( + parentNotificationEvent(rootWorkflowId, subWorkflow1Id, subWorkflow2Id) + ) should be(Vector.empty) // If we were accumulating these would alert, but we see nothing if not accumulating: recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be(Vector.empty) @@ -144,7 +180,11 @@ class MetadataStatisticsRecorderSpec extends AnyFlatSpec with Matchers { // When we trip the limits, we should only see alerts for individual workflows. // Note: it's 16 not 14 because of the two parent notification entries above - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be(Vector(HeavyMetadataAlert(subWorkflow1Id, 16))) - recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow2Id)) should be(Vector(HeavyMetadataAlert(subWorkflow2Id, 16))) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow1Id)) should be( + Vector(HeavyMetadataAlert(subWorkflow1Id, 16)) + ) + recorder.processEventsAndGenerateAlerts(7 of uninterestingWriteEvent(subWorkflow2Id)) should be( + Vector(HeavyMetadataAlert(subWorkflow2Id, 16)) + ) } } diff --git a/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala b/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala index 344b01ef410..fff625d2500 100644 --- a/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala @@ -25,10 +25,10 @@ class QueryForWorkflowsMatchingParametersSpec extends AnyWordSpec with CromwellT r.endDate should be(Symbol("empty")) r.names should be(Symbol("empty")) r.statuses should be(Symbol("empty")) - r.labelsAnd should be (Symbol("empty")) - r.labelsOr should be (Symbol("empty")) - r.excludeLabelsAnd should be (Symbol("empty")) - r.excludeLabelsOr should be (Symbol("empty")) + r.labelsAnd should be(Symbol("empty")) + r.labelsOr should be(Symbol("empty")) + r.excludeLabelsAnd should be(Symbol("empty")) + r.excludeLabelsOr should be(Symbol("empty")) r.submissionTime should be(Symbol("empty")) case Invalid(fs) => throw new RuntimeException(fs.toList.mkString(", ")) @@ -200,7 +200,7 @@ class QueryForWorkflowsMatchingParametersSpec extends AnyWordSpec with CromwellT ) val result = WorkflowQueryParameters.runValidation(rawParameters) result match { - case Valid(_) => //good + case Valid(_) => // good case Invalid(fs) => fs.toList should have size 1 } @@ -230,7 +230,7 @@ class QueryForWorkflowsMatchingParametersSpec extends AnyWordSpec with CromwellT ) val result = WorkflowQueryParameters.runValidation(rawParameters) result match { - case Valid(_) => //good + case Valid(_) => // good case Invalid(fs) => fs.toList should have size 1 } diff --git a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala index 2a812419775..c97969abf02 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala @@ -38,8 +38,13 @@ object MetadataDatabaseAccessSpec { val Subworkflow2Name = "test_subworkflow_2" } -class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures - with BeforeAndAfterAll with Eventually { +class MetadataDatabaseAccessSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with ScalaFutures + with BeforeAndAfterAll + with Eventually { import MetadataDatabaseAccessSpec._ implicit val ec: ExecutionContext = ExecutionContext.global @@ -53,9 +58,11 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi lazy val dataAccess: MetadataDatabaseAccess with MetadataServicesStore = new MetadataDatabaseAccess with MetadataServicesStore { - override val metadataDatabaseInterface: MetadataSlickDatabase = { - DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) - } + override val metadataDatabaseInterface: MetadataSlickDatabase = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, + MetadataDatabaseType, + databaseSystem + ) } def publishMetadataEvents(baseKey: MetadataKey, keyValues: Array[(String, String)]): Future[Unit] = { @@ -65,9 +72,13 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi dataAccess.addMetadataEvents(events) } - def baseWorkflowMetadata(name: String, labels: Set[Label] = Set.empty, workflowId: WorkflowId = WorkflowId.randomId()): Future[WorkflowId] = { + def baseWorkflowMetadata(name: String, + labels: Set[Label] = Set.empty, + workflowId: WorkflowId = WorkflowId.randomId() + ): Future[WorkflowId] = { val defaultLabels = Set(Label("cromwell-workflow-name", name)) - val labelMetadata = (labels ++ defaultLabels).map(label => (s"${WorkflowMetadataKeys.Labels}:${label.key}", label.value)).toArray + val labelMetadata = + (labels ++ defaultLabels).map(label => (s"${WorkflowMetadataKeys.Labels}:${label.key}", label.value)).toArray val workflowKey = MetadataKey(workflowId, jobKey = None, key = null) def keyAndValue(name: String) = Array( @@ -80,14 +91,17 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi publishMetadataEvents(workflowKey, keyAndValue(name)).map(_ => workflowId) } - def subworkflowMetadata(parentWorkflowId: WorkflowId, subworkflowName: String, workflowId: WorkflowId = WorkflowId.randomId()): Future[WorkflowId] = { + def subworkflowMetadata(parentWorkflowId: WorkflowId, + subworkflowName: String, + workflowId: WorkflowId = WorkflowId.randomId() + ): Future[WorkflowId] = { val workflowKey = MetadataKey(workflowId, jobKey = None, key = null) val metadataKeys = Array( (WorkflowMetadataKeys.Status, WorkflowRunning.toString), (WorkflowMetadataKeys.Name, subworkflowName), (WorkflowMetadataKeys.StartTime, OffsetDateTime.now.toUtcMilliString), (WorkflowMetadataKeys.ParentWorkflowId, parentWorkflowId.toString), - (WorkflowMetadataKeys.RootWorkflowId, parentWorkflowId.toString), + (WorkflowMetadataKeys.RootWorkflowId, parentWorkflowId.toString) ) publishMetadataEvents(workflowKey, metadataKeys).map(_ => workflowId) @@ -98,13 +112,14 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val keyAndValue = Array( (WorkflowMetadataKeys.Status, WorkflowRunning.toString), (WorkflowMetadataKeys.Status, WorkflowSucceeded.toString), - (WorkflowMetadataKeys.EndTime, OffsetDateTime.now.toUtcMilliString)) + (WorkflowMetadataKeys.EndTime, OffsetDateTime.now.toUtcMilliString) + ) publishMetadataEvents(workflowKey, keyAndValue).map(_ => id) } it should "start container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } it should "properly get next workflow to archive and number of workflows left to archive" taggedAs DbmsTest in { @@ -125,14 +140,21 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi // both workflows should be available for archiving eventually(Timeout(2.minutes)) { - val workflowsLeftToArchiveFuture = dataAccess.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(terminalWorkflowStatuses, OffsetDateTime.now().minusSeconds(10)) + val workflowsLeftToArchiveFuture = dataAccess.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp( + terminalWorkflowStatuses, + OffsetDateTime.now().minusSeconds(10) + ) val workflowsLeftToArchiveResponse = Await.result(workflowsLeftToArchiveFuture, defaultTimeout) workflowsLeftToArchiveResponse shouldBe 2 } // check that the first workflow should be the one to be archived first val workflowToArchiveResponse1 = eventually(Timeout(2.minutes)) { - val workflowToArchiveFuture = dataAccess.queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(terminalWorkflowStatuses, OffsetDateTime.now().minusSeconds(10), 1) + val workflowToArchiveFuture = + dataAccess.queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(terminalWorkflowStatuses, + OffsetDateTime.now().minusSeconds(10), + 1 + ) val workflowToArchiveResponse = Await.result(workflowToArchiveFuture, defaultTimeout) workflowToArchiveResponse should not be empty workflowToArchiveResponse @@ -142,19 +164,27 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi workflowToArchiveResponse1.head.workflowExecutionUuid shouldBe workflowId1.toString // assume first workflow has been archived - val updateMetadataArchiveStatusFuture = dataAccess.updateMetadataArchiveStatus(workflowId1, MetadataArchiveStatus.Archived) + val updateMetadataArchiveStatusFuture = + dataAccess.updateMetadataArchiveStatus(workflowId1, MetadataArchiveStatus.Archived) Await.result(updateMetadataArchiveStatusFuture, defaultTimeout) // only 1 workflow should be left for archiving eventually(Timeout(2.minutes)) { - val workflowsLeftToArchiveFuture = dataAccess.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(terminalWorkflowStatuses, OffsetDateTime.now().minusSeconds(10)) + val workflowsLeftToArchiveFuture = dataAccess.countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp( + terminalWorkflowStatuses, + OffsetDateTime.now().minusSeconds(10) + ) val workflowsLeftToArchiveResponse = Await.result(workflowsLeftToArchiveFuture, defaultTimeout) workflowsLeftToArchiveResponse shouldBe 1 } // check that the second workflow should be the one to be archived next val workflowToArchiveResponse2 = eventually(Timeout(2.minutes)) { - val workflowToArchiveFuture = dataAccess.queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(terminalWorkflowStatuses, OffsetDateTime.now().minusSeconds(10), 1) + val workflowToArchiveFuture = + dataAccess.queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(terminalWorkflowStatuses, + OffsetDateTime.now().minusSeconds(10), + 1 + ) val workflowToArchiveResponse = Await.result(workflowToArchiveFuture, defaultTimeout) workflowToArchiveResponse should not be empty workflowToArchiveResponse @@ -167,23 +197,23 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi it should "return pagination metadata only when page and pagesize query params are specified" taggedAs DbmsTest in { (for { _ <- baseWorkflowMetadata(Workflow1Name) - //get metadata when page and pagesize are specified - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.Page.name -> "1", WorkflowQueryKey.PageSize.name -> "50"))) map { case (_, meta) => + // get metadata when page and pagesize are specified + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.Page.name -> "1", WorkflowQueryKey.PageSize.name -> "50")) + ) map { case (_, meta) => meta match { case Some(_) => case None => fail("Should have metadata when page and pagesize are specified.") } } - //don't get metadata when page and pagesize are not specified - _ <- dataAccess.queryWorkflowSummaries( - WorkflowQueryParameters(Seq())) map { case (_, meta) => + // don't get metadata when page and pagesize are not specified + _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq())) map { case (_, meta) => meta match { case Some(_) => fail("Should not have metadata when page and pagesize are not specified") case None => } } - } yield()).futureValue + } yield ()).futureValue } it should "sort metadata events by timestamp from older to newer" taggedAs DbmsTest in { @@ -194,9 +224,14 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val yesterday = now.minusDays(1) val tomorrow = now.plusDays(1) - val yesterdayEvent = MetadataEvent(workflowKey.copy(key = WorkflowMetadataKeys.WorkflowRoot), Option(MetadataValue("A")), yesterday) - val nowEvent = MetadataEvent(workflowKey.copy(key = WorkflowMetadataKeys.WorkflowRoot), Option(MetadataValue("B")), now) - val tomorrowEvent = MetadataEvent(workflowKey.copy(key = WorkflowMetadataKeys.WorkflowRoot), Option(MetadataValue("C")), tomorrow) + val yesterdayEvent = MetadataEvent(workflowKey.copy(key = WorkflowMetadataKeys.WorkflowRoot), + Option(MetadataValue("A")), + yesterday + ) + val nowEvent = + MetadataEvent(workflowKey.copy(key = WorkflowMetadataKeys.WorkflowRoot), Option(MetadataValue("B")), now) + val tomorrowEvent = + MetadataEvent(workflowKey.copy(key = WorkflowMetadataKeys.WorkflowRoot), Option(MetadataValue("C")), tomorrow) val events = Vector(tomorrowEvent, yesterdayEvent, nowEvent) @@ -208,19 +243,26 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi (for { workflow1Id <- baseWorkflowMetadata(Workflow1Name) expected <- unorderedEvents(workflow1Id) - response <- dataAccess.queryMetadataEvents(MetadataQuery(workflow1Id, None, Option(WorkflowMetadataKeys.WorkflowRoot), None, None, expandSubWorkflows = false), 5.seconds) + response <- dataAccess.queryMetadataEvents(MetadataQuery(workflow1Id, + None, + Option(WorkflowMetadataKeys.WorkflowRoot), + None, + None, + expandSubWorkflows = false + ), + 5.seconds + ) _ = response shouldBe expected - } yield()).futureValue + } yield ()).futureValue } - def assertRowsProcessedAndSummarizationComplete(summaryResult: SummaryResult) = { + def assertRowsProcessedAndSummarizationComplete(summaryResult: SummaryResult) = withClue(s"asserting correctness of $summaryResult") { summaryResult.rowsProcessedIncreasing should be > 0L summaryResult.rowsProcessedDecreasing should be(0L) summaryResult.decreasingGap should be(0L) } - } it should "create and query a workflow" taggedAs DbmsTest in { @@ -242,14 +284,14 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi // Query with no filters (workflowQueryResult, workflowQueryResult2) <- - dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq.empty)) map { case (response, _) => - val result = response.results find { r => r.name.contains(Workflow1Name) && r.end.isDefined } getOrElse - fail(s"$Workflow1Name with an end not found in ${response.results}") - val result2 = response.results find { - _.name.contains(Workflow2Name) - } getOrElse fail(s"$Workflow2Name not found in ${response.results}") - (result, result2) - } + dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq.empty)) map { case (response, _) => + val result = response.results find { r => r.name.contains(Workflow1Name) && r.end.isDefined } getOrElse + fail(s"$Workflow1Name with an end not found in ${response.results}") + val result2 = response.results find { + _.name.contains(Workflow2Name) + } getOrElse fail(s"$Workflow2Name not found in ${response.results}") + (result, result2) + } // Filter by name _ <- dataAccess.queryWorkflowSummaries( WorkflowQueryParameters(Seq(WorkflowQueryKey.Name.name -> Workflow1Name)) @@ -257,95 +299,133 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val resultsByName = response.results groupBy { _.name } - withClue("Filter by name") { resultsByName.keys.toSet.flatten should equal(Set(Workflow1Name)) } + withClue("Filter by name")(resultsByName.keys.toSet.flatten should equal(Set(Workflow1Name))) } // Filter by multiple names _ <- dataAccess.queryWorkflowSummaries( - WorkflowQueryParameters(Seq( - WorkflowQueryKey.Name.name -> Workflow1Name, WorkflowQueryKey.Name.name -> Workflow2Name)) + WorkflowQueryParameters( + Seq(WorkflowQueryKey.Name.name -> Workflow1Name, WorkflowQueryKey.Name.name -> Workflow2Name) + ) ) map { case (response, _) => val resultsByName = response.results groupBy { _.name } - withClue("Filter by multiple names") { resultsByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) } + withClue("Filter by multiple names") { + resultsByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) + } } // Filter by workflow id - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - Seq(WorkflowQueryKey.Id.name -> workflow1Id.toString))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.Id.name -> workflow1Id.toString)) + ) map { case (response, _) => val resultsById = response.results groupBy { _.name } - withClue("Filter by workflow ID") { resultsById.keys.toSet.flatten should equal(Set(Workflow1Name)) } + withClue("Filter by workflow ID")(resultsById.keys.toSet.flatten should equal(Set(Workflow1Name))) } // Filter by multiple workflow ids - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - Seq(workflow1Id, workflow2Id).map(id => WorkflowQueryKey.Id.name -> id.toString))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(workflow1Id, workflow2Id).map(id => WorkflowQueryKey.Id.name -> id.toString)) + ) map { case (response, _) => val resultsById = response.results groupBy { _.name } - withClue("Filter by multiple workflow IDs") { resultsById.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) } + withClue("Filter by multiple workflow IDs") { + resultsById.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) + } } // Filter by workflow id within random Ids - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - (randomIds :+ workflow1Id.toString).map(id => WorkflowQueryKey.Id.name -> id))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters((randomIds :+ workflow1Id.toString).map(id => WorkflowQueryKey.Id.name -> id)) + ) map { case (response, _) => val resultsById = response.results groupBy { _.name } - withClue("Filter by workflow ID within random IDs") { resultsById.keys.toSet.flatten should equal(Set(Workflow1Name)) } + withClue("Filter by workflow ID within random IDs") { + resultsById.keys.toSet.flatten should equal(Set(Workflow1Name)) + } } // Filter by status - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.Status.name -> "Submitted"))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.Status.name -> "Submitted")) + ) map { case (response, _) => val resultsByStatus = response.results groupBy (_.status) - withClue("Filter by status") { resultsByStatus.keys.toSet.flatten should equal(Set("Submitted")) } + withClue("Filter by status")(resultsByStatus.keys.toSet.flatten should equal(Set("Submitted"))) } // Filter by multiple statuses - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.Status.name -> "Submitted", - WorkflowQueryKey.Status.name -> "Succeeded"))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.Status.name -> "Submitted", WorkflowQueryKey.Status.name -> "Succeeded") + ) + ) map { case (response, _) => val resultsByStatus = response.results groupBy (_.status) - withClue("Filter by multiple statuses") { resultsByStatus.keys.toSet.flatten should equal(Set("Submitted", "Succeeded")) } + withClue("Filter by multiple statuses") { + resultsByStatus.keys.toSet.flatten should equal(Set("Submitted", "Succeeded")) + } } // Filter by label using AND - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.LabelAndKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}"))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.LabelAndKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}") + ) + ) map { case (response, _) => val resultByName = response.results groupBy (_.name) - withClue("Filter by label using AND") { resultByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) } + withClue("Filter by label using AND") { + resultByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) + } } // Filter by multiple labels using AND - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - Seq(testLabel2, testLabel3) - .map(label => WorkflowQueryKey.LabelAndKeyValue.name -> s"${label.key}:${label.value}")) + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(testLabel2, testLabel3) + .map(label => WorkflowQueryKey.LabelAndKeyValue.name -> s"${label.key}:${label.value}") + ) ) map { case (response, _) => val resultByName = response.results groupBy (_.name) - withClue("Filter by multiple labels using AND") { resultByName.keys.toSet.flatten should equal(Set(Workflow2Name)) } + withClue("Filter by multiple labels using AND") { + resultByName.keys.toSet.flatten should equal(Set(Workflow2Name)) + } } // Filter by label using OR - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.LabelOrKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}"))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.LabelOrKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}") + ) + ) map { case (response, _) => val resultByName = response.results groupBy (_.name) - withClue("Filter by label using OR") { resultByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) } + withClue("Filter by label using OR") { + resultByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) + } } // Filter by multiple labels using OR - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - Seq(testLabel2, testLabel3) - .map(label => WorkflowQueryKey.LabelOrKeyValue.name -> s"${label.key}:${label.value}")) + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(testLabel2, testLabel3) + .map(label => WorkflowQueryKey.LabelOrKeyValue.name -> s"${label.key}:${label.value}") + ) ) map { case (response, _) => val resultByName = response.results groupBy (_.name) - withClue("Filter by multiple label using OR") { resultByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) } + withClue("Filter by multiple label using OR") { + resultByName.keys.toSet.flatten should equal(Set(Workflow1Name, Workflow2Name)) + } } // Filter by exclude label using AND - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.ExcludeLabelAndKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}"))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.ExcludeLabelAndKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}") + ) + ) map { case (response, _) => val resultByName = response.results groupBy (_.name) withClue("Filter by exclude label using AND") { resultByName.keys.toSet.flatten should contain(Workflow1Name) } } // Filter by multiple exclude labels using AND - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - Seq(testLabel2, testLabel3) - .map(label => WorkflowQueryKey.ExcludeLabelAndKeyValue.name -> s"${label.key}:${label.value}")) + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(testLabel2, testLabel3) + .map(label => WorkflowQueryKey.ExcludeLabelAndKeyValue.name -> s"${label.key}:${label.value}") + ) ) map { case (response, _) => val resultByName = response.results groupBy (_.name) val ids = response.results.map(_.id) @@ -356,17 +436,22 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi } } // Filter by exclude label using OR - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.ExcludeLabelOrKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}"))) map { case (response, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.ExcludeLabelOrKeyValue.name -> s"${testLabel2.key}:${testLabel2.value}") + ) + ) map { case (response, _) => val resultByName = response.results groupBy (_.name) withClue("Filter to exclude label using OR") { resultByName.keys.toSet.flatten should contain(Workflow1Name) } } // Filter by multiple exclude labels using OR - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters( - Seq(testLabel2, testLabel3) - .map(label => WorkflowQueryKey.ExcludeLabelOrKeyValue.name -> s"${label.key}:${label.value}")) + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(testLabel2, testLabel3) + .map(label => WorkflowQueryKey.ExcludeLabelOrKeyValue.name -> s"${label.key}:${label.value}") + ) ) map { case (response, _) => // NOTE: On persistent databases other workflows will be returned. Just verify that our two workflows are not. val ids = response.results.map(_.id) @@ -376,49 +461,56 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi } } // Filter by start date - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.StartDate.name -> workflowQueryResult2.start.get.toUtcMilliString))) map { - case (response, _) => - response.results partition { - r => r.start.isDefined && r.start.get.compareTo(workflowQueryResult.start.get) >= 0 - } match { - case (y, n) if y.nonEmpty && n.isEmpty => // good - case (y, n) => fail(s"Found ${y.size} later workflows and ${n.size} earlier") - } + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.StartDate.name -> workflowQueryResult2.start.get.toUtcMilliString) + ) + ) map { case (response, _) => + response.results partition { r => + r.start.isDefined && r.start.get.compareTo(workflowQueryResult.start.get) >= 0 + } match { + case (y, n) if y.nonEmpty && n.isEmpty => // good + case (y, n) => fail(s"Found ${y.size} later workflows and ${n.size} earlier") + } } // Filter by end date - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.EndDate.name -> workflowQueryResult.end.get.toUtcMilliString))) map { - case (response, _) => - response.results partition { - r => r.end.isDefined && r.end.get.compareTo(workflowQueryResult.end.get) <= 0 - } match { - case (y, n) if y.nonEmpty && n.isEmpty => // good - case (y, n) => fail(s"Found ${y.size} earlier workflows and ${n.size} later") - } + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.EndDate.name -> workflowQueryResult.end.get.toUtcMilliString)) + ) map { case (response, _) => + response.results partition { r => + r.end.isDefined && r.end.get.compareTo(workflowQueryResult.end.get) <= 0 + } match { + case (y, n) if y.nonEmpty && n.isEmpty => // good + case (y, n) => fail(s"Found ${y.size} earlier workflows and ${n.size} later") + } } // Filter by submission time - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.SubmissionTime.name -> workflowQueryResult2.submission.get.toUtcMilliString))) map { - case (response, _) => - response.results partition { r => - r.submission.isDefined && r.submission.get.compareTo(workflowQueryResult2.submission.get) <= 0 - } match { - case (y, n) if y.nonEmpty && n.isEmpty => // good - case (y, n) => - fail(s"Found ${y.size} earlier workflows and ${n.size} later while filtering by submission timestamp") - } + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.SubmissionTime.name -> workflowQueryResult2.submission.get.toUtcMilliString) + ) + ) map { case (response, _) => + response.results partition { r => + r.submission.isDefined && r.submission.get.compareTo(workflowQueryResult2.submission.get) <= 0 + } match { + case (y, n) if y.nonEmpty && n.isEmpty => // good + case (y, n) => + fail(s"Found ${y.size} earlier workflows and ${n.size} later while filtering by submission timestamp") + } } // Check for labels in query response - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.AdditionalQueryResultFields.name -> "labels"))) map { - case (response, _) => - response.results filter { - workflowQueryResult => List(workflow1Id.toString, workflow1Id.toString).contains(workflowQueryResult.id) - } partition { r => r.labels.isDefined } match { - case (y, n) if y.nonEmpty && n.isEmpty => //good - case (y, n) => fail(s"Something went horribly wrong since labels were populated for ${y.size} and were missing for ${n.size} workflow(s)!") - } + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.AdditionalQueryResultFields.name -> "labels")) + ) map { case (response, _) => + response.results filter { workflowQueryResult => + List(workflow1Id.toString, workflow1Id.toString).contains(workflowQueryResult.id) + } partition { r => r.labels.isDefined } match { + case (y, n) if y.nonEmpty && n.isEmpty => // good + case (y, n) => + fail( + s"Something went horribly wrong since labels were populated for ${y.size} and were missing for ${n.size} workflow(s)!" + ) + } } } yield ()).futureValue(Timeout(scaled(Span(30, Seconds))), Interval(scaled(Span(500, Millis)))) } @@ -430,18 +522,28 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi _ <- baseWorkflowMetadata(uniqueWorkflow3Name) // refresh the metadata _ <- dataAccess.refreshWorkflowMetadataSummaries(1000) map assertRowsProcessedAndSummarizationComplete - //get totalResultsCount when page and pagesize are specified - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - // name being added to the query parameters so as to exclude workflows being populated by the other tests in this spec - WorkflowQueryKey.Name.name -> uniqueWorkflow3Name, - WorkflowQueryKey.Page.name -> "1", WorkflowQueryKey.PageSize.name -> "1"))) map { case (resp, _) => + // get totalResultsCount when page and pagesize are specified + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq( + // name being added to the query parameters so as to exclude workflows being populated by the other tests in this spec + WorkflowQueryKey.Name.name -> uniqueWorkflow3Name, + WorkflowQueryKey.Page.name -> "1", + WorkflowQueryKey.PageSize.name -> "1" + ) + ) + ) map { case (resp, _) => resp.totalResultsCount match { case 2 => - case 1 => fail("totalResultsCount is suspiciously equal to the pageSize and not the expected total results count. Please fix!") - case other => fail(s"totalResultsCount is expected to be 2 but is actually $other. Something has gone horribly wrong!") + case 1 => + fail( + "totalResultsCount is suspiciously equal to the pageSize and not the expected total results count. Please fix!" + ) + case other => + fail(s"totalResultsCount is expected to be 2 but is actually $other. Something has gone horribly wrong!") } } - } yield()).futureValue + } yield ()).futureValue } it should "revert to an prior label value" taggedAs DbmsTest in { @@ -488,10 +590,14 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi // refresh metadata _ <- dataAccess.refreshWorkflowMetadataSummaries(1000) // include subworkflows - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq(WorkflowQueryKey.IncludeSubworkflows.name -> true.toString))) map { case (resp, _) => + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.IncludeSubworkflows.name -> true.toString)) + ) map { case (resp, _) => val resultByName = resp.results groupBy (_.name) withClue("include subworkflows:") { - List(ParentWorkflowName, SubworkflowName, ParentWorkflow2Name, Subworkflow2Name).foreach { n => resultByName.keys.toSet.flatten should contain(n) } + List(ParentWorkflowName, SubworkflowName, ParentWorkflow2Name, Subworkflow2Name).foreach { n => + resultByName.keys.toSet.flatten should contain(n) + } } } // exclude subworkflows - make sure we get 2 results in the right order: @@ -521,22 +627,29 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi case (resp, _) => val resultByName = resp.results groupBy (_.name) // This time we can do a strict equality check rather than 'contains', because we know these two should be the most recent (non-sub-) workflows: - withClue("exclude subworkflows and assert page size:") { resultByName.keys.flatten should be(Set(ParentWorkflowName, ParentWorkflow2Name)) } + withClue("exclude subworkflows and assert page size:") { + resultByName.keys.flatten should be(Set(ParentWorkflowName, ParentWorkflow2Name)) + } } - // check for parentWorkflow1 in query response - _ <- dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq( - WorkflowQueryKey.AdditionalQueryResultFields.name -> "parentWorkflowId", - WorkflowQueryKey.Name.name -> SubworkflowName))) map { - case (response, _) => - response.results partition { r => r.parentWorkflowId.isDefined} match { - case (y, n) if y.nonEmpty && n.isEmpty => //good - case (y, n) => fail(s"parentWorkflow1 should be populated for a subworkflow. It was populated correctly for ${y.size} " + - s"and was missing in ${n.size} subworkflow(s). Something went horribly wrong!") - } + _ <- dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters( + Seq(WorkflowQueryKey.AdditionalQueryResultFields.name -> "parentWorkflowId", + WorkflowQueryKey.Name.name -> SubworkflowName + ) + ) + ) map { case (response, _) => + response.results partition { r => r.parentWorkflowId.isDefined } match { + case (y, n) if y.nonEmpty && n.isEmpty => // good + case (y, n) => + fail( + s"parentWorkflow1 should be populated for a subworkflow. It was populated correctly for ${y.size} " + + s"and was missing in ${n.size} subworkflow(s). Something went horribly wrong!" + ) + } } - } yield()).futureValue + } yield ()).futureValue } it should "properly query metadata summaries based on archived status and timestamp and update archive status after metadata deletion" taggedAs DbmsTest in { @@ -550,25 +663,37 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val refreshSummariesFuture = dataAccess.refreshWorkflowMetadataSummaries(1000) Await.result(refreshSummariesFuture, defaultTimeout) - val updateMetadataArchiveStatusFuture = dataAccess.updateMetadataArchiveStatus(workflowId1, MetadataArchiveStatus.Archived) + val updateMetadataArchiveStatusFuture = + dataAccess.updateMetadataArchiveStatus(workflowId1, MetadataArchiveStatus.Archived) Await.result(updateMetadataArchiveStatusFuture, defaultTimeout) - val responseEmptyFuture = dataAccess.queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp(Option("Archived"), OffsetDateTime.now().minusMinutes(1), 200) + val responseEmptyFuture = + dataAccess.queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp(Option("Archived"), + OffsetDateTime.now().minusMinutes(1), + 200 + ) val responseEmpty = Await.result(responseEmptyFuture, defaultTimeout) responseEmpty shouldBe empty eventually(Timeout(2.minutes)) { - val responseNonEmptyFuture = dataAccess.queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp (Option ("Archived"), OffsetDateTime.now().minusMinutes(1), 200) + val responseNonEmptyFuture = + dataAccess.queryWorkflowIdsByArchiveStatusAndOlderThanTimestamp(Option("Archived"), + OffsetDateTime.now().minusMinutes(1), + 200 + ) val responseNonEmpty = Await.result(responseNonEmptyFuture, 10.seconds) responseNonEmpty should not be empty responseNonEmpty should contain allElementsOf Seq(workflowId1.toString) responseNonEmpty should contain noElementsOf Seq(workflowId2.toString) } - val deleteMetadataFuture = dataAccess.deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(workflowId1, Option("ArchivedAndDeleted")) + val deleteMetadataFuture = + dataAccess.deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(workflowId1, Option("ArchivedAndDeleted")) Await.result(deleteMetadataFuture, defaultTimeout) - val summaryResponseFuture = dataAccess.queryWorkflowSummaries(WorkflowQueryParameters(Seq(WorkflowQueryKey.Id.name -> workflowId1.toString))) + val summaryResponseFuture = dataAccess.queryWorkflowSummaries( + WorkflowQueryParameters(Seq(WorkflowQueryKey.Id.name -> workflowId1.toString)) + ) val summaryResponse = Await.result(summaryResponseFuture, defaultTimeout) summaryResponse._1.results should not be empty summaryResponse._1.results.head.metadataArchiveStatus.toString shouldBe "ArchivedAndDeleted" @@ -576,21 +701,30 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi it should "error when deleting metadata for a root workflow that does not exist" taggedAs DbmsTest in { dataAccess - .deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(WorkflowId.fromString("00000000-0000-0000-0000-000000000000"), None) - .failed.futureValue(Timeout(10.seconds)) - .getMessage should be("""Metadata deletion precondition failed: workflow ID "00000000-0000-0000-0000-000000000000" did not have a status in the summary table""") + .deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus( + WorkflowId.fromString("00000000-0000-0000-0000-000000000000"), + None + ) + .failed + .futureValue(Timeout(10.seconds)) + .getMessage should be( + """Metadata deletion precondition failed: workflow ID "00000000-0000-0000-0000-000000000000" did not have a status in the summary table""" + ) } it should "refuse to delete a non-terminal workflow" taggedAs DbmsTest in { val test = for { - rootWorkflowId <- baseWorkflowMetadata("root workflow name", workflowId = WorkflowId.fromString("33333333-3333-3333-3333-333333333333")) + rootWorkflowId <- baseWorkflowMetadata("root workflow name", + workflowId = + WorkflowId.fromString("33333333-3333-3333-3333-333333333333") + ) _ <- dataAccess.refreshWorkflowMetadataSummaries(1000) _ <- dataAccess.deleteAllMetadataEntriesForWorkflowAndUpdateArchiveStatus(rootWorkflowId, None) } yield () - test - .failed.futureValue(Timeout(10.seconds)) - .getMessage should be(s"""Metadata deletion precondition failed: workflow ID "33333333-3333-3333-3333-333333333333" was in non-terminal status "Submitted"""") + test.failed.futureValue(Timeout(10.seconds)).getMessage should be( + s"""Metadata deletion precondition failed: workflow ID "33333333-3333-3333-3333-333333333333" was in non-terminal status "Submitted"""" + ) } it should "close the database" taggedAs DbmsTest in { @@ -598,7 +732,7 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } } diff --git a/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala index 22ff8c78d5e..f738e56d395 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala @@ -26,20 +26,19 @@ class MetadataServiceActorSpec extends ServicesSpec { val config: Config = ConfigFactory.parseString(Config) lazy val actor: ActorRef = system.actorOf( - props = - MetadataServiceActor.props( - serviceConfig = globalConfigToMetadataServiceConfig(config), - globalConfig = config, - serviceRegistryActor = TestProbe("serviceRegistryActor").ref, - ), - name = "MetadataServiceActor-for-MetadataServiceActorSpec", + props = MetadataServiceActor.props( + serviceConfig = globalConfigToMetadataServiceConfig(config), + globalConfig = config, + serviceRegistryActor = TestProbe("serviceRegistryActor").ref + ), + name = "MetadataServiceActor-for-MetadataServiceActorSpec" ) val workflowId: WorkflowId = WorkflowId.randomId() - /* + /* Simple store / retrieve - */ + */ val key1: MetadataKey = MetadataKey(workflowId, None, "key1") val key2: MetadataKey = MetadataKey(workflowId, None, "key2") @@ -71,50 +70,64 @@ class MetadataServiceActorSpec extends ServicesSpec { val query4: MetadataQuery = MetadataQuery.forWorkflow(workflowId) val query5: MetadataQuery = MetadataQuery.forJob(workflowId, supJob) - val testCases: List[(String, MetadataQuery, String)] = List[(String, MetadataQuery, String)] ( - ("query1", query1, s"""{ - | "key1": "value2", - | "calls": {}, - | "id": "$workflowId" - |}""".stripMargin), - ("query2", query2, s"""{ - | "key2": "value1", - | "calls": {}, - | "id": "$workflowId" - |}""".stripMargin), - ("query3", query3, s"""{ - | "calls": { - | "sup.sup": [{ - | "dog": {}, - | "attempt": 1, - | "shardIndex": -1 - | }] - | }, - | "id": "$workflowId" - |}""".stripMargin), - ("query4", query4, s"""{ - | "key1": "value2", - | "key2": "value1", - | "calls": { - | "sup.sup": [{ - | "dog": {}, - | "attempt": 1, - | "shardIndex": -1 - | }] - | }, - | "id": "$workflowId" - |}""".stripMargin), - ("query5", query5, s"""{ - | "calls": { - | "sup.sup": [{ - | "dog": {}, - | "attempt": 1, - | "shardIndex": -1 - | }] - | }, - | "id": "$workflowId" - |}""".stripMargin), - + val testCases: List[(String, MetadataQuery, String)] = List[(String, MetadataQuery, String)]( + ("query1", + query1, + s"""{ + | "key1": "value2", + | "calls": {}, + | "id": "$workflowId" + |}""".stripMargin + ), + ("query2", + query2, + s"""{ + | "key2": "value1", + | "calls": {}, + | "id": "$workflowId" + |}""".stripMargin + ), + ("query3", + query3, + s"""{ + | "calls": { + | "sup.sup": [{ + | "dog": {}, + | "attempt": 1, + | "shardIndex": -1 + | }] + | }, + | "id": "$workflowId" + |}""".stripMargin + ), + ("query4", + query4, + s"""{ + | "key1": "value2", + | "key2": "value1", + | "calls": { + | "sup.sup": [{ + | "dog": {}, + | "attempt": 1, + | "shardIndex": -1 + | }] + | }, + | "id": "$workflowId" + |}""".stripMargin + ), + ("query5", + query5, + s"""{ + | "calls": { + | "sup.sup": [{ + | "dog": {}, + | "attempt": 1, + | "shardIndex": -1 + | }] + | }, + | "id": "$workflowId" + |}""".stripMargin + ) ) actorName should { @@ -126,10 +139,10 @@ class MetadataServiceActorSpec extends ServicesSpec { } testCases foreach { case (name, query, expectation) => - s"perform $name correctly" in { eventually(Timeout(10.seconds), Interval(2.seconds)) { - val response = Await.result((actor ? GetMetadataAction(query)).mapTo[SuccessfulMetadataJsonResponse], 1.seconds) + val response = + Await.result((actor ? GetMetadataAction(query)).mapTo[SuccessfulMetadataJsonResponse], 1.seconds) response.responseJson shouldBe expectation.parseJson } @@ -138,19 +151,26 @@ class MetadataServiceActorSpec extends ServicesSpec { "be able to query by Unarchived metadataArchiveStatus" in { - val summarizableStatusUpdate = PutMetadataAction(MetadataEvent( - MetadataKey(workflowId, None, WorkflowMetadataKeys.Status), - Option(MetadataValue("Running")), - moment.plusSeconds(1) - )) + val summarizableStatusUpdate = PutMetadataAction( + MetadataEvent( + MetadataKey(workflowId, None, WorkflowMetadataKeys.Status), + Option(MetadataValue("Running")), + moment.plusSeconds(1) + ) + ) actor ! summarizableStatusUpdate eventually(Timeout(10.seconds), Interval(2.seconds)) { - val queryEverythingResponse = Await.result((actor ? QueryForWorkflowsMatchingParameters(List.empty)).mapTo[WorkflowQuerySuccess], 1.seconds) + val queryEverythingResponse = + Await.result((actor ? QueryForWorkflowsMatchingParameters(List.empty)).mapTo[WorkflowQuerySuccess], 1.seconds) queryEverythingResponse.response.results.length should be(1) println(queryEverythingResponse) - val response1 = Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "Unarchived")))).mapTo[WorkflowQuerySuccess], 1.seconds) + val response1 = + Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "Unarchived")))) + .mapTo[WorkflowQuerySuccess], + 1.seconds + ) // We submitted one workflow, so we should should see one value here: response1.response.results.length should be(1) } @@ -158,21 +178,35 @@ class MetadataServiceActorSpec extends ServicesSpec { "be able to query by Archived metadataArchiveStatus" in { - val response2 = Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "Archived")))).mapTo[WorkflowQuerySuccess], 1.seconds) + val response2 = + Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "Archived")))) + .mapTo[WorkflowQuerySuccess], + 1.seconds + ) // That workflow hasn't been marked as archived so this result set is empty: response2.response.results.length should be(0) } "be able to query by ArchiveFailed metadataArchiveStatus" in { - val response3 = Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "ArchiveFailed")))).mapTo[WorkflowQuerySuccess], 1.seconds) + val response3 = + Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "ArchiveFailed")))) + .mapTo[WorkflowQuerySuccess], + 1.seconds + ) // That workflow hasn't been marked as archived so this result set is empty: response3.response.results.length should be(0) } "not be able to query by an invalid metadataArchiveStatus" in { - val response4 = Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "!!OOPS!!")))).mapTo[WorkflowQueryFailure], 1.seconds) + val response4 = + Await.result((actor ? QueryForWorkflowsMatchingParameters(List(("metadataArchiveStatus", "!!OOPS!!")))) + .mapTo[WorkflowQueryFailure], + 1.seconds + ) // That workflow hasn't been marked as archived so this result set is empty: - response4.reason.getMessage should be("Unrecognized 'metadata archive status' value(s): No such MetadataArchiveStatus: !!OOPS!!") + response4.reason.getMessage should be( + "Unrecognized 'metadata archive status' value(s): No such MetadataArchiveStatus: !!OOPS!!" + ) } } } @@ -184,12 +218,15 @@ object MetadataServiceActorSpec { |services.MetadataService.config.db-flush-rate = 100 millis """.stripMargin - val ConfigWithoutSummarizer: String = Config + """ - |services.MetadataService.config.metadata-summary-refresh-interval = "Inf" + val ConfigWithoutSummarizer: String = + Config + """ + |services.MetadataService.config.metadata-summary-refresh-interval = "Inf" """.stripMargin // Use this to convert the above "global" configs into metadata service specific "service config"s: - def globalConfigToMetadataServiceConfig(config: Config): Config = if (config.hasPath("services.MetadataService.config")) { + def globalConfigToMetadataServiceConfig(config: Config): Config = if ( + config.hasPath("services.MetadataService.config") + ) { config.getConfig("services.MetadataService.config") } else { ConfigFactory.empty() diff --git a/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala b/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala index 96418047723..1a4783dee87 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala @@ -16,16 +16,15 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class WriteMetadataActorBenchmark extends TestKitSuite with AnyFlatSpecLike with Eventually with Matchers { - override implicit val patienceConfig = PatienceConfig(scaled(30.seconds), 1.second) + implicit override val patienceConfig = PatienceConfig(scaled(30.seconds), 1.second) behavior of "WriteMetadataActor" val workflowId = WorkflowId.randomId() val registry = TestProbe().ref - def makeEvent = { + def makeEvent = MetadataEvent(MetadataKey(workflowId, None, "metadata_key"), MetadataValue("My Value")) - } def time[T](description: String)(thunk: => T): T = { val t1 = System.currentTimeMillis @@ -38,16 +37,18 @@ class WriteMetadataActorBenchmark extends TestKitSuite with AnyFlatSpecLike with private val databaseSystem = MysqlEarliestDatabaseSystem private val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - private lazy val dataAccess = DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) + private lazy val dataAccess = + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) it should "start container if required" taggedAs IntegrationTest in { - containerOpt.foreach { _.start } + containerOpt.foreach(_.start) } it should "provide good throughput" taggedAs IntegrationTest in { - val writeActor = TestFSMRef(new WriteMetadataActor(1000, 5.seconds, registry, Int.MaxValue, MetadataStatisticsDisabled) { - override val metadataDatabaseInterface: MetadataSlickDatabase = dataAccess - }) + val writeActor = + TestFSMRef(new WriteMetadataActor(1000, 5.seconds, registry, Int.MaxValue, MetadataStatisticsDisabled) { + override val metadataDatabaseInterface: MetadataSlickDatabase = dataAccess + }) time("metadata write") { (0 to 1 * 1000 * 1000) @@ -67,6 +68,6 @@ class WriteMetadataActorBenchmark extends TestKitSuite with AnyFlatSpecLike with } it should "stop container if required" taggedAs IntegrationTest in { - containerOpt.foreach { _.stop() } + containerOpt.foreach(_.stop()) } } diff --git a/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorSpec.scala index 6ed7fa6c402..9b9e4f75ccd 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorSpec.scala @@ -10,7 +10,13 @@ import cromwell.core.{TestKitSuite, WorkflowId} import cromwell.database.sql.joins.MetadataJobQueryValue import cromwell.database.sql.tables.{InformationSchemaEntry, MetadataEntry, WorkflowMetadataSummaryEntry} import cromwell.database.sql.{MetadataSqlDatabase, SqlDatabase} -import cromwell.services.metadata.MetadataService.{MetadataWriteAction, MetadataWriteFailure, MetadataWriteSuccess, PutMetadataAction, PutMetadataActionAndRespond} +import cromwell.services.metadata.MetadataService.{ + MetadataWriteAction, + MetadataWriteFailure, + MetadataWriteSuccess, + PutMetadataAction, + PutMetadataActionAndRespond +} import cromwell.services.metadata.impl.MetadataStatisticsRecorder.MetadataStatisticsDisabled import cromwell.services.metadata.impl.WriteMetadataActorSpec.BatchSizeCountingWriteMetadataActor import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue} @@ -36,17 +42,22 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc override val metadataDatabaseInterface = mockDatabaseInterface(0) }) - def metadataEvent(index: Int) = PutMetadataAction(MetadataEvent(MetadataKey(WorkflowId.randomId(), None, s"metadata_key_$index"), MetadataValue(s"hello_$index"))) + def metadataEvent(index: Int) = PutMetadataAction( + MetadataEvent(MetadataKey(WorkflowId.randomId(), None, s"metadata_key_$index"), MetadataValue(s"hello_$index")) + ) - val probes = (0 until 27).map({ _ => - val probe = TestProbe() - probe - }).zipWithIndex.map { - case (probe, index) => probe -> metadataEvent(index) - } + val probes = (0 until 27) + .map { _ => + val probe = TestProbe() + probe + } + .zipWithIndex + .map { case (probe, index) => + probe -> metadataEvent(index) + } - probes foreach { - case (probe, msg) => probe.send(writeActor, msg) + probes foreach { case (probe, msg) => + probe.send(writeActor, msg) } eventually { @@ -64,24 +75,34 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc override val metadataDatabaseInterface = mockDatabaseInterface(failureRate) }) - def metadataEvent(index: Int, probe: ActorRef) = PutMetadataActionAndRespond(List(MetadataEvent(MetadataKey(WorkflowId.randomId(), None, s"metadata_key_$index"), MetadataValue(s"hello_$index"))), probe) - - val probes = (0 until 43).map({ _ => - val probe = TestProbe() - probe - }).zipWithIndex.map { - case (probe, index) => probe -> metadataEvent(index, probe.ref) - } - - probes foreach { - case (probe, msg) => probe.send(writeActor, msg) + def metadataEvent(index: Int, probe: ActorRef) = + PutMetadataActionAndRespond(List( + MetadataEvent(MetadataKey(WorkflowId.randomId(), None, s"metadata_key_$index"), + MetadataValue(s"hello_$index") + ) + ), + probe + ) + + val probes = (0 until 43) + .map { _ => + val probe = TestProbe() + probe + } + .zipWithIndex + .map { case (probe, index) => + probe -> metadataEvent(index, probe.ref) + } + + probes foreach { case (probe, msg) => + probe.send(writeActor, msg) } - probes.foreach { - case (probe, msg) => probe.expectMsg(MetadataWriteSuccess(msg.events)) + probes.foreach { case (probe, msg) => + probe.expectMsg(MetadataWriteSuccess(msg.events)) } eventually { - writeActor.underlyingActor.failureCount should be (5 * failureRate) + writeActor.underlyingActor.failureCount should be(5 * failureRate) } writeActor.stop() @@ -94,24 +115,32 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc override val metadataDatabaseInterface = mockDatabaseInterface(100) }) - def metadataEvent(index: Int, probe: ActorRef) = PutMetadataActionAndRespond(List(MetadataEvent(MetadataKey(WorkflowId.randomId(), None, s"metadata_key_$index"), MetadataValue(s"hello_$index"))), probe) - - val probes = (0 until 43).map({ _ => - val probe = TestProbe() + def metadataEvent(index: Int, probe: ActorRef) = PutMetadataActionAndRespond( + List( + MetadataEvent(MetadataKey(WorkflowId.randomId(), None, s"metadata_key_$index"), MetadataValue(s"hello_$index")) + ), probe - }).zipWithIndex.map { - case (probe, index) => probe -> metadataEvent(index, probe.ref) - } + ) + + val probes = (0 until 43) + .map { _ => + val probe = TestProbe() + probe + } + .zipWithIndex + .map { case (probe, index) => + probe -> metadataEvent(index, probe.ref) + } - probes foreach { - case (probe, msg) => probe.send(writeActor, msg) + probes foreach { case (probe, msg) => + probe.send(writeActor, msg) } - probes.foreach { - case (probe, msg) => probe.expectMsg(MetadataWriteFailure(WriteMetadataActorSpec.IntermittentException, msg.events)) + probes.foreach { case (probe, msg) => + probe.expectMsg(MetadataWriteFailure(WriteMetadataActorSpec.IntermittentException, msg.events)) } eventually { - writeActor.underlyingActor.failureCount should be (5 * 10) + writeActor.underlyingActor.failureCount should be(5 * 10) } writeActor.stop() @@ -127,8 +156,7 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc override def connectionDescription: String = "Mock Database" - override def existsMetadataEntries()( - implicit ec: ExecutionContext): Nothing = notImplemented() + override def existsMetadataEntries()(implicit ec: ExecutionContext): Nothing = notImplemented() var requestsSinceLastSuccess = 0 // Return successful @@ -140,8 +168,8 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc submissionMetadataKey: String, parentWorkflowIdKey: String, rootWorkflowIdKey: String, - labelMetadataKey: String) - (implicit ec: ExecutionContext): Future[Unit] = { + labelMetadataKey: String + )(implicit ec: ExecutionContext): Future[Unit] = if (requestsSinceLastSuccess == failuresBetweenEachSuccess) { requestsSinceLastSuccess = 0 Future.successful(()) @@ -149,50 +177,50 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc requestsSinceLastSuccess += 1 Future.failed(WriteMetadataActorSpec.IntermittentException) } - } - override def metadataEntryExists(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Nothing = notImplemented() + override def metadataEntryExists(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Nothing = + notImplemented() - override def metadataSummaryEntryExists(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Nothing = notImplemented() + override def metadataSummaryEntryExists(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Nothing = + notImplemented() - override def queryMetadataEntries(workflowExecutionUuid: String, - timeout: Duration) - (implicit ec: ExecutionContext): Nothing = notImplemented() + override def queryMetadataEntries(workflowExecutionUuid: String, timeout: Duration)(implicit + ec: ExecutionContext + ): Nothing = notImplemented() override def streamMetadataEntries(workflowExecutionUuid: String): Nothing = notImplemented() - override def queryMetadataEntries(workflowExecutionUuid: String, - metadataKey: String, - timeout: Duration)(implicit ec: ExecutionContext): Nothing = notImplemented() + override def queryMetadataEntries(workflowExecutionUuid: String, metadataKey: String, timeout: Duration)(implicit + ec: ExecutionContext + ): Nothing = notImplemented() override def queryMetadataEntries(workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], - timeout: Duration)(implicit ec: ExecutionContext): Nothing = notImplemented() + timeout: Duration + )(implicit ec: ExecutionContext): Nothing = notImplemented() override def queryMetadataEntries(workflowUuid: String, metadataKey: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], - timeout: Duration)(implicit ec: ExecutionContext): Nothing = notImplemented() + timeout: Duration + )(implicit ec: ExecutionContext): Nothing = notImplemented() override def queryMetadataEntryWithKeyConstraints(workflowExecutionUuid: String, - metadataKeysToFilterFor: List[String], - metadataKeysToFilterAgainst: List[String], - metadataJobQueryValue: MetadataJobQueryValue, - timeout: Duration) - (implicit ec: ExecutionContext): Nothing = notImplemented() - - override def summarizeIncreasing(labelMetadataKey: String, - limit: Int, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) - => WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): Nothing = notImplemented() + metadataKeysToFilterFor: List[String], + metadataKeysToFilterAgainst: List[String], + metadataJobQueryValue: MetadataJobQueryValue, + timeout: Duration + )(implicit ec: ExecutionContext): Nothing = notImplemented() + + override def summarizeIncreasing( + labelMetadataKey: String, + limit: Int, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): Nothing = notImplemented() /** * Retrieves a window of summarizable metadata satisfying the specified criteria. @@ -200,23 +228,23 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc * @param buildUpdatedSummary Takes in the optional existing summary and the metadata, returns the new summary. * @return A `Future` with the maximum metadataEntryId summarized by the invocation of this method. */ - override def summarizeDecreasing(summaryNameDecreasing: String, - summaryNameIncreasing: String, - labelMetadataKey: String, - limit: Int, - buildUpdatedSummary: - (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) - => WorkflowMetadataSummaryEntry) - (implicit ec: ExecutionContext): Nothing = notImplemented() - - override def getWorkflowStatus(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Nothing = notImplemented() + override def summarizeDecreasing( + summaryNameDecreasing: String, + summaryNameIncreasing: String, + labelMetadataKey: String, + limit: Int, + buildUpdatedSummary: (Option[WorkflowMetadataSummaryEntry], Seq[MetadataEntry]) => WorkflowMetadataSummaryEntry + )(implicit ec: ExecutionContext): Nothing = notImplemented() + + override def getWorkflowStatus(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Nothing = + notImplemented() - override def getWorkflowLabels(workflowExecutionUuid: String) - (implicit ec: ExecutionContext): Nothing = notImplemented() + override def getWorkflowLabels(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Nothing = + notImplemented() - override def getRootAndSubworkflowLabels(rootWorkflowExecutionUuid: String) - (implicit ec: ExecutionContext): Nothing = notImplemented() + override def getRootAndSubworkflowLabels(rootWorkflowExecutionUuid: String)(implicit + ec: ExecutionContext + ): Nothing = notImplemented() override def queryWorkflowSummaries(parentWorkflowIdMetadataKey: String, workflowStatuses: Set[String], @@ -233,10 +261,9 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc includeSubworkflows: Boolean, page: Option[Int], pageSize: Option[Int], - newestFirst: Boolean) - (implicit ec: ExecutionContext): Nothing = { + newestFirst: Boolean + )(implicit ec: ExecutionContext): Nothing = notImplemented() - } override def countWorkflowSummaries(parentWorkflowIdMetadataKey: String, workflowStatuses: Set[String], @@ -250,66 +277,101 @@ class WriteMetadataActorSpec extends TestKitSuite with AnyFlatSpecLike with Matc startTimestampOption: Option[Timestamp], endTimestampOption: Option[Timestamp], metadataArchiveStatus: Set[Option[String]], - includeSubworkflows: Boolean) - (implicit ec: ExecutionContext): Nothing = { + includeSubworkflows: Boolean + )(implicit ec: ExecutionContext): Nothing = notImplemented() - } - override def updateMetadataArchiveStatus(workflowExecutionUuid: String, newArchiveStatus: Option[String]): Future[Int] = notImplemented() + override def updateMetadataArchiveStatus(workflowExecutionUuid: String, + newArchiveStatus: Option[String] + ): Future[Int] = notImplemented() - override def withConnection[A](block: Connection => A): Nothing = { + override def withConnection[A](block: Connection => A): Nothing = notImplemented() - } override def close(): Nothing = notImplemented() - override def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId: String, newArchiveStatus: Option[String])(implicit ec: ExecutionContext): Future[Int] = { + override def deleteAllMetadataForWorkflowAndUpdateArchiveStatus(workflowId: String, + newArchiveStatus: Option[String] + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - } - override def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = { + override def getRootWorkflowId(workflowId: String)(implicit ec: ExecutionContext): Future[Option[String]] = notImplemented() - } - override def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], thresholdTimestamp: Timestamp, batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[String]] = { + override def queryWorkflowIdsByArchiveStatusAndEndedOnOrBeforeThresholdTimestamp(archiveStatus: Option[String], + thresholdTimestamp: Timestamp, + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[String]] = notImplemented() - } - override def getSummaryQueueSize()(implicit ec: ExecutionContext): Future[Int] = { + override def getSummaryQueueSize()(implicit ec: ExecutionContext): Future[Int] = notImplemented() - } - override def countMetadataEntries(workflowExecutionUuid: String, expandSubWorkflows: Boolean, timeout: Duration)(implicit ec: ExecutionContext): Future[Int] = { + override def countMetadataEntries(workflowExecutionUuid: String, expandSubWorkflows: Boolean, timeout: Duration)( + implicit ec: ExecutionContext + ): Future[Int] = notImplemented() - } - override def countMetadataEntries(workflowExecutionUuid: String, metadataKey: String, expandSubWorkflows: Boolean, timeout: Duration)(implicit ec: ExecutionContext): Future[Int] = { + override def countMetadataEntries(workflowExecutionUuid: String, + metadataKey: String, + expandSubWorkflows: Boolean, + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - } - override def countMetadataEntries(workflowExecutionUuid: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], expandSubWorkflows: Boolean, timeout: Duration)(implicit ec: ExecutionContext): Future[Int] = { + override def countMetadataEntries(workflowExecutionUuid: String, + callFullyQualifiedName: String, + jobIndex: Option[Int], + jobAttempt: Option[Int], + expandSubWorkflows: Boolean, + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - } - override def countMetadataEntries(workflowUuid: String, metadataKey: String, callFullyQualifiedName: String, jobIndex: Option[Int], jobAttempt: Option[Int], expandSubWorkflows: Boolean, timeout: Duration)(implicit ec: ExecutionContext): Future[Int] = { + override def countMetadataEntries(workflowUuid: String, + metadataKey: String, + callFullyQualifiedName: String, + jobIndex: Option[Int], + jobAttempt: Option[Int], + expandSubWorkflows: Boolean, + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - } - override def countMetadataEntryWithKeyConstraints(workflowExecutionUuid: String, metadataKeysToFilterFor: List[String], metadataKeysToFilterAgainst: List[String], metadataJobQueryValue: MetadataJobQueryValue, expandSubWorkflows: Boolean, timeout: Duration)(implicit ec: ExecutionContext): Future[Int] = { + override def countMetadataEntryWithKeyConstraints(workflowExecutionUuid: String, + metadataKeysToFilterFor: List[String], + metadataKeysToFilterAgainst: List[String], + metadataJobQueryValue: MetadataJobQueryValue, + expandSubWorkflows: Boolean, + timeout: Duration + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - } - override def getMetadataArchiveStatusAndEndTime(workflowId: String)(implicit ec: ExecutionContext): Future[(Option[String], Option[Timestamp])] = notImplemented() + override def getMetadataArchiveStatusAndEndTime(workflowId: String)(implicit + ec: ExecutionContext + ): Future[(Option[String], Option[Timestamp])] = notImplemented() - override def queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], workflowEndTimestampThreshold: Timestamp, batchSize: Long)(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = notImplemented() + override def queryWorkflowsToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], + workflowEndTimestampThreshold: Timestamp, + batchSize: Long + )(implicit ec: ExecutionContext): Future[Seq[WorkflowMetadataSummaryEntry]] = notImplemented() - override def countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp(workflowStatuses: List[String], workflowEndTimestampThreshold: Timestamp)(implicit ec: ExecutionContext): Future[Int] = notImplemented() + override def countWorkflowsLeftToArchiveThatEndedOnOrBeforeThresholdTimestamp( + workflowStatuses: List[String], + workflowEndTimestampThreshold: Timestamp + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - override def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp(workflowEndTimestampThreshold: Timestamp)(implicit ec: ExecutionContext): Future[Int] = notImplemented() + override def countWorkflowsLeftToDeleteThatEndedOnOrBeforeThresholdTimestamp( + workflowEndTimestampThreshold: Timestamp + )(implicit ec: ExecutionContext): Future[Int] = notImplemented() - override def getMetadataTableSizeInformation()(implicit ec: ExecutionContext): Future[Option[InformationSchemaEntry]] = notImplemented() + override def getMetadataTableSizeInformation()(implicit + ec: ExecutionContext + ): Future[Option[InformationSchemaEntry]] = notImplemented() - override def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: String)(implicit ec: ExecutionContext): Future[Vector[MetadataEntry]] = notImplemented() + override def getFailedJobsMetadataWithWorkflowId(rootWorkflowId: String)(implicit + ec: ExecutionContext + ): Future[Vector[MetadataEntry]] = notImplemented() } } @@ -320,7 +382,8 @@ object WriteMetadataActorSpec { class BatchSizeCountingWriteMetadataActor(override val batchSize: Int, override val flushRate: FiniteDuration, override val serviceRegistryActor: ActorRef, - override val threshold: Int) extends WriteMetadataActor(batchSize, flushRate, serviceRegistryActor, threshold, MetadataStatisticsDisabled) { + override val threshold: Int + ) extends WriteMetadataActor(batchSize, flushRate, serviceRegistryActor, threshold, MetadataStatisticsDisabled) { var batchSizes: Vector[Int] = Vector.empty var failureCount: Int = 0 diff --git a/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala index 3caee16eb11..8b4219e1d5e 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala @@ -8,7 +8,12 @@ import com.google.api.services.pubsub.model.Topic import com.typesafe.config.{Config, ConfigFactory} import cromwell.core.WorkflowId import cromwell.services.ServicesSpec -import cromwell.services.metadata.MetadataService.{MetadataWriteFailure, MetadataWriteSuccess, PutMetadataAction, PutMetadataActionAndRespond} +import cromwell.services.metadata.MetadataService.{ + MetadataWriteFailure, + MetadataWriteSuccess, + PutMetadataAction, + PutMetadataActionAndRespond +} import cromwell.services.metadata.{MetadataEvent, MetadataKey, MetadataValue} import org.broadinstitute.dsde.workbench.google.GooglePubSubDAO import org.broadinstitute.dsde.workbench.google.GooglePubSubDAO.PubSubMessage @@ -177,29 +182,36 @@ class PubSubMetadataServiceActorSpec extends ServicesSpec { } object PubSubMetadataServiceActorSpec { + /** A variant of PubSubMetadataServiceActor with a GooglePubSubDAO which will always return success */ - class SuccessfulMockPubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { + class SuccessfulMockPubSubMetadataServiceActor(serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef + ) extends PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { override def createPubSubConnection(): GooglePubSubDAO = new SuccessfulMockGooglePubSubDao } /** A variant of PubSubMetadataServiceActor with a GooglePubSubDAO which will always return failure */ - class FailingToCreateTopicMockPubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { + class FailingToCreateTopicMockPubSubMetadataServiceActor(serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef + ) extends PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { override def createPubSubConnection(): GooglePubSubDAO = new FailingToCreateTopicMockGooglePubSubDao } /** A variant of PubSubMetadataServiceActor which will fail on message publication */ - class FailToPublishMockPubSubMetadataServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) - extends PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { + class FailToPublishMockPubSubMetadataServiceActor(serviceConfig: Config, + globalConfig: Config, + serviceRegistryActor: ActorRef + ) extends PubSubMetadataServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { override def createPubSubConnection(): GooglePubSubDAO = new FailToPublishMockGooglePubSubDao } trait MockGooglePubSubDao extends GooglePubSubDAO { - override implicit val executionContext: ExecutionContext = ExecutionContext.global + implicit override val executionContext: ExecutionContext = ExecutionContext.global override def createTopic(topicName: String): Future[Boolean] override def createSubscription(topicName: String, subscriptionName: String): Future[Boolean] @@ -207,26 +219,36 @@ object PubSubMetadataServiceActorSpec { // The following aren't used so leaving them empty override def deleteTopic(topicName: String): Future[Boolean] = throw new UnsupportedOperationException - override def getTopic(topicName: String)(implicit executionContext: ExecutionContext): Future[Option[Topic]] = throw new UnsupportedOperationException + override def getTopic(topicName: String)(implicit executionContext: ExecutionContext): Future[Option[Topic]] = + throw new UnsupportedOperationException override def deleteSubscription(subscriptionName: String): Future[Boolean] = throw new UnsupportedOperationException - override def acknowledgeMessages(subscriptionName: String, messages: scala.collection.Seq[PubSubMessage]): Future[Unit] = throw new UnsupportedOperationException - override def acknowledgeMessagesById(subscriptionName: String, ackIds: scala.collection.Seq[String]): Future[Unit] = throw new UnsupportedOperationException - override def pullMessages(subscriptionName: String, maxMessages: Int): Future[scala.collection.Seq[PubSubMessage]] = throw new UnsupportedOperationException - override def setTopicIamPermissions(topicName: String, permissions: Map[WorkbenchEmail, String]): Future[Unit] = throw new UnsupportedOperationException + override def acknowledgeMessages(subscriptionName: String, + messages: scala.collection.Seq[PubSubMessage] + ): Future[Unit] = throw new UnsupportedOperationException + override def acknowledgeMessagesById(subscriptionName: String, ackIds: scala.collection.Seq[String]): Future[Unit] = + throw new UnsupportedOperationException + override def pullMessages(subscriptionName: String, maxMessages: Int): Future[scala.collection.Seq[PubSubMessage]] = + throw new UnsupportedOperationException + override def setTopicIamPermissions(topicName: String, permissions: Map[WorkbenchEmail, String]): Future[Unit] = + throw new UnsupportedOperationException } class SuccessfulMockGooglePubSubDao extends MockGooglePubSubDao { override def createTopic(topicName: String): Future[Boolean] = Future.successful(true) - override def createSubscription(topicName: String, subscriptionName: String): Future[Boolean] = Future.successful(true) - override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] = Future.successful(()) + override def createSubscription(topicName: String, subscriptionName: String): Future[Boolean] = + Future.successful(true) + override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] = + Future.successful(()) } class FailingToCreateTopicMockGooglePubSubDao extends SuccessfulMockGooglePubSubDao { - override def createTopic(topicName: String): Future[Boolean] = Future.failed(new RuntimeException("Unable to create topic")) + override def createTopic(topicName: String): Future[Boolean] = + Future.failed(new RuntimeException("Unable to create topic")) } class FailToPublishMockGooglePubSubDao extends SuccessfulMockGooglePubSubDao { - override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] = Future.failed(new RuntimeException("sorry charlie")) + override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] = + Future.failed(new RuntimeException("sorry charlie")) } // This doesn't include a project so should be a failure diff --git a/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala b/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala index 95a0f64fd96..47fba0684fc 100644 --- a/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala +++ b/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala @@ -79,8 +79,10 @@ class DescriberSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { object DescriberSpec { sealed trait DescriberSpecTestCase { def expectedDescription: String } - final case class FileAndDescription(file: String, override val expectedDescription: String) extends DescriberSpecTestCase - final case class UrlAndDescription(url: String, override val expectedDescription: String) extends DescriberSpecTestCase + final case class FileAndDescription(file: String, override val expectedDescription: String) + extends DescriberSpecTestCase + final case class UrlAndDescription(url: String, override val expectedDescription: String) + extends DescriberSpecTestCase def interpretTestCase(caseDirectory: Path): DescriberSpecTestCase = { val description = caseDirectory.resolve("description.json").contentAsString diff --git a/services/src/test/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActorSpec.scala b/services/src/test/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActorSpec.scala index a37f4021650..08b9955dcc1 100644 --- a/services/src/test/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActorSpec.scala +++ b/services/src/test/scala/cromwell/services/womtool/impl/WomtoolServiceInCromwellActorSpec.scala @@ -19,13 +19,17 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { val womtoolActor: ActorRef = system.actorOf( - props = WomtoolServiceInCromwellActor.props(ConfigFactory.empty(), ConfigFactory.empty(), TestProbe("serviceRegistryActor").ref), - name = "womtoolActor", + props = WomtoolServiceInCromwellActor.props(ConfigFactory.empty(), + ConfigFactory.empty(), + TestProbe("serviceRegistryActor").ref + ), + name = "womtoolActor" ) CromwellLanguages.initLanguages(LanguageConfiguration.AllLanguageEntries) object TestData { - val workflowUrlValid = "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" + val workflowUrlValid = + "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/womtool/src/test/resources/validate/wdl_draft3/valid/callable_imports/my_workflow.wdl" val workflowUrlNotFound = "https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow" val workflowUrlBadHost = "https://zardoz.zardoz" val workflowUrlNotAUrl = "Zardoz" @@ -167,33 +171,40 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { val wsfc = wsfcConjurer(workflowSource = Option(TestData.wdlValid), inputsJson = TestData.bogusInputs) - check(DescribeRequest(wsfc), DescribeSuccess( - description = WorkflowDescription( - valid = false, - errors = List("Required workflow input 'wf_hello.hello.addressee' not specified"), - validWorkflow = true, - name = "wf_hello", - inputs = List(InputDescription("hello.addressee", WomStringType, "String", optional = false, None)), - submittedDescriptorType = Map("descriptorType" -> "WDL", "descriptorTypeVersion" -> "1.0"), - isRunnableWorkflow = true, + check( + DescribeRequest(wsfc), + DescribeSuccess( + description = WorkflowDescription( + valid = false, + errors = List("Required workflow input 'wf_hello.hello.addressee' not specified"), + validWorkflow = true, + name = "wf_hello", + inputs = List(InputDescription("hello.addressee", WomStringType, "String", optional = false, None)), + submittedDescriptorType = Map("descriptorType" -> "WDL", "descriptorTypeVersion" -> "1.0"), + isRunnableWorkflow = true + ) ) - )) + ) } "return valid = false, validWorkflow = true for a valid inputs-requiring workflow with an empty inputs JSON" in { val wsfc = wsfcConjurer(workflowSource = Option(TestData.wdlValid), inputsJson = TestData.emptyInputs) - check(DescribeRequest(wsfc), DescribeSuccess( - description = WorkflowDescription( - valid = false, - errors = List("Required workflow input 'wf_hello.hello.addressee' not specified"), - validWorkflow = true, - name = "wf_hello", - inputs = List(InputDescription("hello.addressee", WomStringType, "String", optional = false, None)), - submittedDescriptorType = Map("descriptorType" -> "WDL", "descriptorTypeVersion" -> "1.0"), - isRunnableWorkflow = true), - )) + check( + DescribeRequest(wsfc), + DescribeSuccess( + description = WorkflowDescription( + valid = false, + errors = List("Required workflow input 'wf_hello.hello.addressee' not specified"), + validWorkflow = true, + name = "wf_hello", + inputs = List(InputDescription("hello.addressee", WomStringType, "String", optional = false, None)), + submittedDescriptorType = Map("descriptorType" -> "WDL", "descriptorTypeVersion" -> "1.0"), + isRunnableWorkflow = true + ) + ) + ) } "return valid for a valid no-inputs workflow with empty inputs" in { @@ -228,14 +239,27 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { val wsfc = wsfcConjurer(workflowSource = Option(TestData.wdlValidNoInputs), inputsJson = TestData.bogusInputs) - check(DescribeRequest(wsfc), DescribeSuccess( - description = WorkflowDescription(valid = false, errors = List("WARNING: Unexpected input provided: foo.bar (expected inputs: [])"), validWorkflow = true, name = "wf_hello", inputs = List.empty, submittedDescriptorType = Map("descriptorType" -> "WDL", "descriptorTypeVersion" -> "1.0"), isRunnableWorkflow = true))) + check( + DescribeRequest(wsfc), + DescribeSuccess( + description = WorkflowDescription( + valid = false, + errors = List("WARNING: Unexpected input provided: foo.bar (expected inputs: [])"), + validWorkflow = true, + name = "wf_hello", + inputs = List.empty, + submittedDescriptorType = Map("descriptorType" -> "WDL", "descriptorTypeVersion" -> "1.0"), + isRunnableWorkflow = true + ) + ) + ) } // In draft-2 we allow extraneous inputs for legacy reasons - e.g. users put comments in them "return valid for a valid no-inputs draft-2 workflow with extraneous inputs" in { - val wsfc = wsfcConjurer(workflowSource = Option(TestData.wdlValidDraft2NoInputs), inputsJson = TestData.bogusInputs) + val wsfc = + wsfcConjurer(workflowSource = Option(TestData.wdlValidDraft2NoInputs), inputsJson = TestData.bogusInputs) check( DescribeRequest(wsfc), @@ -298,7 +322,8 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { "return an error when both workflow URL and workflow source specified" in { - val wsfc = wsfcConjurer(workflowSource = Option(TestData.wdlInvalid), workflowUrl = Option(TestData.workflowUrlValid)) + val wsfc = + wsfcConjurer(workflowSource = Option(TestData.wdlInvalid), workflowUrl = Option(TestData.workflowUrlValid)) check(DescribeRequest(wsfc), DescribeFailure("Both workflow source and url can't be supplied")) } @@ -307,9 +332,12 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { val wsfc = wsfcConjurer(workflowUrl = Option(TestData.workflowUrlNotFound)) - check(DescribeRequest(wsfc), + check( + DescribeRequest(wsfc), DescribeFailure( - "Failed to resolve 'https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow (reason 1 of 1): 404: Not Found")) + "Failed to resolve 'https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://raw.githubusercontent.com/broadinstitute/cromwell/develop/my_workflow (reason 1 of 1): 404: Not Found" + ) + ) } "return an error when the workflow URL's host can't be resolved" in { @@ -320,9 +348,17 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { (for { result <- (womtoolActor ? DescribeRequest(wsfc)).mapTo[DescribeResult] _ = result should ( - be(DescribeFailure("Failed to resolve 'https://zardoz.zardoz' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://zardoz.zardoz (reason 1 of 1): HTTP resolver with headers had an unexpected error (zardoz.zardoz: Name or service not known)")) - or - be(DescribeFailure("Failed to resolve 'https://zardoz.zardoz' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://zardoz.zardoz (reason 1 of 1): HTTP resolver with headers had an unexpected error (zardoz.zardoz: nodename nor servname provided, or not known)")) + be( + DescribeFailure( + "Failed to resolve 'https://zardoz.zardoz' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://zardoz.zardoz (reason 1 of 1): HTTP resolver with headers had an unexpected error (zardoz.zardoz: Name or service not known)" + ) + ) + or + be( + DescribeFailure( + "Failed to resolve 'https://zardoz.zardoz' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Failed to download https://zardoz.zardoz (reason 1 of 1): HTTP resolver with headers had an unexpected error (zardoz.zardoz: nodename nor servname provided, or not known)" + ) + ) ) } yield ()).futureValue } @@ -332,7 +368,12 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { val wsfc = wsfcConjurer(workflowUrl = Option(TestData.workflowUrlNotAUrl)) // The HTTP resolver has figured out that you have not given it a URL and assumes it's a relative path - check(DescribeRequest(wsfc), DescribeFailure("Failed to resolve 'Zardoz' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Relative path")) + check( + DescribeRequest(wsfc), + DescribeFailure( + "Failed to resolve 'Zardoz' using resolver: 'http importer (no 'relative-to' origin)' (reason 1 of 1): Relative path" + ) + ) } } @@ -346,7 +387,8 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { workflowUrl: Option[WorkflowUrl] = None, workflowType: Option[WorkflowType] = None, workflowTypeVersion: Option[WorkflowTypeVersion] = None, - inputsJson: WorkflowJson = ""): WorkflowSourceFilesCollection = { + inputsJson: WorkflowJson = "" + ): WorkflowSourceFilesCollection = WorkflowSourceFilesCollection( workflowSource = workflowSource, workflowUrl = workflowUrl, @@ -361,6 +403,5 @@ class WomtoolServiceInCromwellActorSpec extends ServicesSpec { warnings = Seq.empty, requestedWorkflowId = None ) - } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala index fed3b2c09e0..bbd552b4325 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala @@ -81,21 +81,27 @@ object AwsBatchAsyncBackendJobExecutionActor { } class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackendJobLifecycleActor with StandardAsyncExecutionActor with AwsBatchJobCachingActorHelper - with KvClient with AskSupport { + extends BackendJobLifecycleActor + with StandardAsyncExecutionActor + with AwsBatchJobCachingActorHelper + with KvClient + with AskSupport { /** * The builder for `IoCommands` to the storage system used by jobs executed by this backend */ - override lazy val ioCommandBuilder: IoCommandBuilder = configuration.fileSystem match { + override lazy val ioCommandBuilder: IoCommandBuilder = configuration.fileSystem match { case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder - case _ => DefaultIoCommandBuilder + case _ => DefaultIoCommandBuilder } // the cromwell backend Actor val backendSingletonActor: ActorRef = standardParams.backendSingletonActorOption.getOrElse( - throw new RuntimeException(s"AWS Backend actor cannot exist without its backend singleton (of type ${AwsBatchSingletonActor.getClass.getSimpleName})")) + throw new RuntimeException( + s"AWS Backend actor cannot exist without its backend singleton (of type ${AwsBatchSingletonActor.getClass.getSimpleName})" + ) + ) import AwsBatchAsyncBackendJobExecutionActor._ @@ -115,11 +121,12 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar override lazy val pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(1.second, 5.minutes, 1.1) - override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff( - initialInterval = 3 seconds, maxInterval = 20 seconds, multiplier = 1.1) + override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = + SimpleExponentialBackoff(initialInterval = 3 seconds, maxInterval = 20 seconds, multiplier = 1.1) - //the name (String) of the docker image that will be used to contain this job - private lazy val jobDockerImage = jobDescriptor.maybeCallCachingEligible.dockerHash.getOrElse(runtimeAttributes.dockerImage) + // the name (String) of the docker image that will be used to contain this job + private lazy val jobDockerImage = + jobDescriptor.maybeCallCachingEligible.dockerHash.getOrElse(runtimeAttributes.dockerImage) override lazy val dockerImageUsed: Option[String] = Option(jobDockerImage) @@ -128,7 +135,6 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar |$jobShell ${jobPaths.script.pathWithoutScheme} |""".stripMargin - /* Batch job object (see AwsBatchJob). This has the configuration necessary * to perform all operations with the AWS Batch infrastructure. This is * where the real work happens @@ -163,23 +169,26 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar */ lazy val cmdScript = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => commandScriptContents.toEither.toOption.get - case _ => execScript + case AWSBatchStorageSystems.s3 => commandScriptContents.toEither.toOption.get + case _ => execScript } - lazy val batchJob: AwsBatchJob = { + lazy val batchJob: AwsBatchJob = AwsBatchJob( jobDescriptor, runtimeAttributes, instantiatedCommand.commandString, cmdScript, - rcPath.toString, executionStdout, executionStderr, + rcPath.toString, + executionStdout, + executionStderr, generateAwsBatchInputs(jobDescriptor), generateAwsBatchOutputs(jobDescriptor), - jobPaths, Seq.empty[AwsBatchParameter], + jobPaths, + Seq.empty[AwsBatchParameter], configuration.awsConfig.region, - Option(configuration.awsAuth)) - } + Option(configuration.awsAuth) + ) /* Tries to abort the job in flight * * @param job A StandardAsyncJob object (has jobId value) to cancel @@ -188,7 +197,9 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar */ override def tryAbort(job: StandardAsyncJob): Unit = { batchJob.abort(job.jobId) // job.JobId should be the AWS Batch Job Id based on analysis of other backends - Log.info(s"Attempted CancelJob operation in AWS Batch for Job ID ${job.jobId}. There were no errors during the operation") + Log.info( + s"Attempted CancelJob operation in AWS Batch for Job ID ${job.jobId}. There were no errors during the operation" + ) Log.info(s"We have normality. Anything you still can't cope with is therefore your own problem") Log.info(s"https://www.youtube.com/watch?v=YCRxnjE7JVs") () @@ -200,14 +211,19 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar * Takes two arrays of remote and local WOM File paths and generates the necessary AwsBatchInputs. */ private def inputsFromWomFiles(namePrefix: String, - remotePathArray: Seq[WomFile], - localPathArray: Seq[WomFile], - jobDescriptor: BackendJobDescriptor): Iterable[AwsBatchInput] = { - (remotePathArray zip localPathArray zipWithIndex) flatMap { - case ((remotePath, localPath), index) => - Seq(AwsBatchFileInput(s"$namePrefix-$index", remotePath.valueString, DefaultPathBuilder.get(localPath.valueString), workingDisk)) + remotePathArray: Seq[WomFile], + localPathArray: Seq[WomFile], + jobDescriptor: BackendJobDescriptor + ): Iterable[AwsBatchInput] = + (remotePathArray zip localPathArray zipWithIndex) flatMap { case ((remotePath, localPath), index) => + Seq( + AwsBatchFileInput(s"$namePrefix-$index", + remotePath.valueString, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) } - } /** * Turns WomFiles into relative paths. These paths are relative to the working disk. @@ -215,18 +231,17 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar * relativeLocalizationPath("foo/bar.txt") -> "foo/bar.txt" * relativeLocalizationPath("s3://some/bucket/foo.txt") -> "some/bucket/foo.txt" */ - override protected def relativeLocalizationPath(file: WomFile): WomFile = { + override protected def relativeLocalizationPath(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { case Success(path) => - configuration.fileSystem match { + configuration.fileSystem match { case AWSBatchStorageSystems.s3 => path.pathWithoutScheme - case _ => path.toString + case _ => path.toString } case _ => value } ) - } /** * Generate a set of inputs based on a job description @@ -236,28 +251,28 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar private[aws] def generateAwsBatchInputs(jobDescriptor: BackendJobDescriptor): Set[AwsBatchInput] = { val writeFunctionFiles = instantiatedCommand.createdFiles map { f => f.file.value.md5SumShort -> List(f) } toMap - val writeFunctionInputs = writeFunctionFiles flatMap { - case (name, files) => inputsFromWomFiles(name, files.map(_.file), files.map(localizationPath), jobDescriptor) + val writeFunctionInputs = writeFunctionFiles flatMap { case (name, files) => + inputsFromWomFiles(name, files.map(_.file), files.map(localizationPath), jobDescriptor) } // Collect all WomFiles from inputs to the call. val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = jobDescriptor.fullyQualifiedInputs safeMapValues { womFile => - val arrays: Seq[WomArray] = womFile collectAsSeq { - case womFile: WomFile => - val files: List[WomSingleFile] = DirectoryFunctions - .listWomSingleFiles(womFile, callPaths.workflowPaths) - .toTry(s"Error getting single files for $womFile").get - WomArray(WomArrayType(WomSingleFileType), files) + val arrays: Seq[WomArray] = womFile collectAsSeq { case womFile: WomFile => + val files: List[WomSingleFile] = DirectoryFunctions + .listWomSingleFiles(womFile, callPaths.workflowPaths) + .toTry(s"Error getting single files for $womFile") + .get + WomArray(WomArrayType(WomSingleFileType), files) } - arrays.flatMap(_.value).collect { - case womFile: WomFile => womFile + arrays.flatMap(_.value).collect { case womFile: WomFile => + womFile } } - val callInputInputs = callInputFiles flatMap { - case (name, files) => inputsFromWomFiles(name, files, files.map(relativeLocalizationPath), jobDescriptor) + val callInputInputs = callInputFiles flatMap { case (name, files) => + inputsFromWomFiles(name, files, files.map(relativeLocalizationPath), jobDescriptor) } val scriptInput: AwsBatchInput = AwsBatchFileInput( @@ -278,12 +293,11 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar */ private def relativePathAndVolume(path: String, disks: Seq[AwsBatchVolume]): (Path, AwsBatchVolume) = { - def getAbsolutePath(path: Path) = { + def getAbsolutePath(path: Path) = configuration.fileSystem match { case AWSBatchStorageSystems.s3 => AwsBatchWorkingDisk.MountPoint.resolve(path) case _ => DefaultPathBuilder.get(configuration.root).resolve(path) } - } val absolutePath = DefaultPathBuilder.get(path) match { case p if !p.isAbsolute => getAbsolutePath(p) case p => p @@ -292,7 +306,9 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar disks.find(d => absolutePath.startsWith(d.mountPoint)) match { case Some(disk) => (disk.mountPoint.relativize(absolutePath), disk) case None => - throw new Exception(s"Absolute path $path doesn't appear to be under any mount points: ${disks.map(_.toString).mkString(", ")}") + throw new Exception( + s"Absolute path $path doesn't appear to be under any mount points: ${disks.map(_.toString).mkString(", ")}" + ) } } @@ -301,18 +317,18 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar * @param referenceName the name to make safe * @return the name or the MD5sum of that name if the name is >= 128 characters */ - private def makeSafeAwsBatchReferenceName(referenceName: String) = { + private def makeSafeAwsBatchReferenceName(referenceName: String) = if (referenceName.length <= 127) referenceName else referenceName.md5Sum - } private[aws] def generateAwsBatchOutputs(jobDescriptor: BackendJobDescriptor): Set[AwsBatchFileOutput] = { import cats.syntax.validated._ - def evaluateFiles(output: OutputDefinition): List[WomFile] = { + def evaluateFiles(output: OutputDefinition): List[WomFile] = Try( - output.expression.evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType).map(_.toList map { _.file }) + output.expression + .evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType) + .map(_.toList map { _.file }) ).getOrElse(List.empty[WomFile].validNel) - .getOrElse(List.empty) - } + .getOrElse(List.empty) val womFileOutputs = jobDescriptor.taskCall.callable.outputs.flatMap(evaluateFiles) map relativeLocalizationPath @@ -324,7 +340,8 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar } } - val additionalGlobOutput = jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(generateAwsBatchGlobFileOutputs).toSet + val additionalGlobOutput = + jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(generateAwsBatchGlobFileOutputs).toSet outputs.toSet ++ additionalGlobOutput } @@ -360,11 +377,12 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // used by generateAwsBatchOutputs, could potentially move this def within that function private def generateAwsBatchSingleFileOutputs(womFile: WomSingleFile): List[AwsBatchFileOutput] = { val destination = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => callRootPath.resolve(womFile.value.stripPrefix("/")).pathAsString - case _ => DefaultPathBuilder.get(womFile.valueString) match { - case p if !p.isAbsolute => callRootPath.resolve(womFile.value.stripPrefix("/")).pathAsString - case p => p.pathAsString - } + case AWSBatchStorageSystems.s3 => callRootPath.resolve(womFile.value.stripPrefix("/")).pathAsString + case _ => + DefaultPathBuilder.get(womFile.valueString) match { + case p if !p.isAbsolute => callRootPath.resolve(womFile.value.stripPrefix("/")).pathAsString + case p => p.pathAsString + } } val (relpath, disk) = relativePathAndVolume(womFile.value, runtimeAttributes.disks) @@ -385,64 +403,79 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // We need both the glob directory and the glob list: List( // The glob directory: - AwsBatchFileOutput(makeSafeAwsBatchReferenceName(globDirectory), globDirectoryDestinationPath, DefaultPathBuilder.get(globDirectory + "*"), globDirectoryDisk), + AwsBatchFileOutput(makeSafeAwsBatchReferenceName(globDirectory), + globDirectoryDestinationPath, + DefaultPathBuilder.get(globDirectory + "*"), + globDirectoryDisk + ), // The glob list file: - AwsBatchFileOutput(makeSafeAwsBatchReferenceName(globListFile), globListFileDestinationPath, DefaultPathBuilder.get(globListFile), globDirectoryDisk) + AwsBatchFileOutput(makeSafeAwsBatchReferenceName(globListFile), + globListFileDestinationPath, + DefaultPathBuilder.get(globListFile), + globDirectoryDisk + ) ) } - override lazy val commandDirectory: Path = configuration.fileSystem match { + override lazy val commandDirectory: Path = configuration.fileSystem match { case AWSBatchStorageSystems.s3 => AwsBatchWorkingDisk.MountPoint - case _ => jobPaths.callExecutionRoot + case _ => jobPaths.callExecutionRoot } override def globParentDirectory(womGlobFile: WomGlobFile): Path = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => + case AWSBatchStorageSystems.s3 => val (_, disk) = relativePathAndVolume(womGlobFile.value, runtimeAttributes.disks) disk.mountPoint case _ => commandDirectory } - override def isTerminal(runStatus: RunStatus): Boolean = { + override def isTerminal(runStatus: RunStatus): Boolean = runStatus match { case _: TerminalRunStatus => true case _ => false } - } /** * Asynchronously upload the command script to the script path * @return a `Future` for the asynch operation */ - def uploadScriptFile(): Future[Unit] = { + def uploadScriptFile(): Future[Unit] = commandScriptContents.fold( errors => Future.failed(new RuntimeException(errors.toList.mkString(", "))), asyncIo.writeAsync(jobPaths.script, _, Seq.empty) ) - } // Primary entry point for cromwell to actually run something - override def executeAsync(): Future[ExecutionHandle] = { - + override def executeAsync(): Future[ExecutionHandle] = for { - //upload the command script + // upload the command script _ <- uploadScriptFile() completionPromise = Promise[SubmitJobResponse]() - //send a message to the Actor requesting a job submission + // send a message to the Actor requesting a job submission _ = backendSingletonActor ! SubmitAwsJobRequest(batchJob, attributes, completionPromise) - //the future response of the submit job request + // the future response of the submit job request submitJobResponse <- completionPromise.future - //send a notify of status method to the Actor - _ = backendSingletonActor ! NotifyOfStatus(runtimeAttributes.queueArn, submitJobResponse.jobId, Option(Initializing)) - } yield PendingExecutionHandle(jobDescriptor, StandardAsyncJob(submitJobResponse.jobId), Option(batchJob), previousState = None) - } - + // send a notify of status method to the Actor + _ = backendSingletonActor ! NotifyOfStatus(runtimeAttributes.queueArn, + submitJobResponse.jobId, + Option(Initializing) + ) + } yield PendingExecutionHandle(jobDescriptor, + StandardAsyncJob(submitJobResponse.jobId), + Option(batchJob), + previousState = None + ) - override def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = reconnectAsync(jobId) + override def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = reconnectAsync(jobId) override def reconnectAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = { - val handle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState](jobDescriptor, jobId, Option(batchJob), previousState = None) + val handle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState](jobDescriptor, + jobId, + Option(batchJob), + previousState = + None + ) Future.successful(handle) } @@ -475,7 +508,8 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar jobLogger.info("Having to fall back to AWS query for status") Future.fromTry(job.status(jobId)) case other => - val message = s"Programmer Error (please report this): Received an unexpected message from the OccasionalPollingActor: $other" + val message = + s"Programmer Error (please report this): Received an unexpected message from the OccasionalPollingActor: $other" jobLogger.error(message) Future.failed(new Exception(message) with NoStackTrace) } @@ -492,69 +526,66 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _ => super.isFatal(throwable) } - override lazy val startMetadataKeyValues: Map[String, Any] = super[AwsBatchJobCachingActorHelper].startMetadataKeyValues + override lazy val startMetadataKeyValues: Map[String, Any] = + super[AwsBatchJobCachingActorHelper].startMetadataKeyValues - //opportunity to send custom metadata when the run is in a terminal state, currently we don't - override def getTerminalMetadata(runStatus: RunStatus): Map[String, Any] = { + // opportunity to send custom metadata when the run is in a terminal state, currently we don't + override def getTerminalMetadata(runStatus: RunStatus): Map[String, Any] = runStatus match { case _: TerminalRunStatus => Map() case unknown => throw new RuntimeException(s"Attempt to get terminal metadata from non terminal status: $unknown") } - } def hostAbsoluteFilePath(jobPaths: JobPaths, pathString: String): Path = { - val pathBuilders:List[PathBuilder] = List(DefaultPathBuilder) + val pathBuilders: List[PathBuilder] = List(DefaultPathBuilder) val path = PathFactory.buildPath(pathString, pathBuilders) if (!path.isAbsolute) jobPaths.callExecutionRoot.resolve(path).toAbsolutePath - else if(jobPaths.isInExecution(path.pathAsString)) + else if (jobPaths.isInExecution(path.pathAsString)) jobPaths.hostPathFromContainerPath(path.pathAsString) else jobPaths.hostPathFromContainerInputs(path.pathAsString) } override def mapOutputWomFile(womFile: WomFile): WomFile = { - val wfile = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => + val wfile = configuration.fileSystem match { + case AWSBatchStorageSystems.s3 => womFile case _ => val hostPath = hostAbsoluteFilePath(jobPaths, womFile.valueString) - if (!hostPath.exists) throw new FileNotFoundException(s"Could not process output, file not found: ${hostPath.pathAsString}") + if (!hostPath.exists) + throw new FileNotFoundException(s"Could not process output, file not found: ${hostPath.pathAsString}") womFile mapFile { _ => hostPath.pathAsString } } womFileToPath(generateAwsBatchOutputs(jobDescriptor))(wfile) } - private[aws] def womFileToPath(outputs: Set[AwsBatchFileOutput])(womFile: WomFile): WomFile = { + private[aws] def womFileToPath(outputs: Set[AwsBatchFileOutput])(womFile: WomFile): WomFile = womFile mapFile { path => outputs collectFirst { case output if output.name == makeSafeAwsBatchReferenceName(path) => output.s3key } getOrElse path } - } - override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = { + override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = runStatus match { case successStatus: RunStatus.Succeeded => successStatus.eventList case unknown => throw new RuntimeException(s"handleExecutionSuccess not called with RunStatus.Success. Instead got $unknown") } - } - override def retryEvaluateOutputs(exception: Exception): Boolean = { + override def retryEvaluateOutputs(exception: Exception): Boolean = exception match { case aggregated: CromwellAggregatedException => aggregated.throwables.collectFirst { case s: SocketTimeoutException => s }.isDefined case _ => false } - } - override def mapCommandLineWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineWomFile(womFile: WomFile): WomFile = womFile.mapFile(value => getPath(value) match { case Success(path: S3Path) => workingDisk.mountPoint.resolve(path.pathWithoutScheme).pathAsString case _ => value } ) - } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala index e0dd8951550..3b05654d879 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala @@ -39,7 +39,11 @@ import common.exception.MessageAggregation import common.validation.ErrorOr._ import common.validation.Validation._ import cromwell.backend.CommonBackendConfigurationAttributes -import cromwell.backend.impl.aws.callcaching.{AwsBatchCacheHitDuplicationStrategy, CopyCachedOutputs, UseOriginalCachedOutputs} +import cromwell.backend.impl.aws.callcaching.{ + AwsBatchCacheHitDuplicationStrategy, + CopyCachedOutputs, + UseOriginalCachedOutputs +} import cromwell.cloudsupport.aws.AwsConfiguration import cromwell.cloudsupport.aws.auth.AwsAuthMode import eu.timepit.refined._ @@ -57,7 +61,8 @@ case class AwsBatchAttributes(fileSystem: String, executionBucket: String, duplicationStrategy: AwsBatchCacheHitDuplicationStrategy, submitAttempts: Int Refined Positive, - createDefinitionAttempts: Int Refined Positive) + createDefinitionAttempts: Int Refined Positive +) object AwsBatchAttributes { lazy val Logger = LoggerFactory.getLogger(this.getClass) @@ -77,49 +82,54 @@ object AwsBatchAttributes { private val context = "AwsBatch" - implicit val urlReader: ValueReader[URL] = StringReader.stringValueReader.map { URI.create(_).toURL } + implicit val urlReader: ValueReader[URL] = StringReader.stringValueReader.map(URI.create(_).toURL) def fromConfigs(awsConfig: AwsConfiguration, backendConfig: Config): AwsBatchAttributes = { - val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => entry.getKey } + val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => + entry.getKey + } warnNotRecognized(configKeys, availableConfigKeys, context, Logger) def warnDeprecated(keys: Set[String], deprecated: Map[String, String], context: String, logger: Logger) = { val deprecatedKeys = keys.intersect(deprecated.keySet) - deprecatedKeys foreach { key => logger.warn(s"Found deprecated configuration key $key, replaced with ${deprecated.get(key)}") } + deprecatedKeys foreach { key => + logger.warn(s"Found deprecated configuration key $key, replaced with ${deprecated.get(key)}") + } } warnDeprecated(configKeys, deprecatedAwsBatchKeys, context, Logger) - val executionBucket: ErrorOr[String] = validate { backendConfig.as[String]("root") } + val executionBucket: ErrorOr[String] = validate(backendConfig.as[String]("root")) - val fileSysStr:ErrorOr[String] = validate {backendConfig.hasPath("filesystems.s3") match { - case true => "s3" - case false => "local" - }} + val fileSysStr: ErrorOr[String] = validate { + backendConfig.hasPath("filesystems.s3") match { + case true => "s3" + case false => "local" + } + } val fileSysPath = backendConfig.hasPath("filesystems.s3") match { case true => "filesystems.s3" case false => "filesystems.local" } - val filesystemAuthMode: ErrorOr[AwsAuthMode] = { + val filesystemAuthMode: ErrorOr[AwsAuthMode] = (for { authName <- validate { backendConfig.as[String](s"${fileSysPath}.auth") }.toEither validAuth <- awsConfig.auth(authName).toEither } yield validAuth).toValidated - } - val duplicationStrategy: ErrorOr[AwsBatchCacheHitDuplicationStrategy] = validate { - backendConfig. - as[Option[String]](s"${fileSysPath}.caching.duplication-strategy"). - getOrElse("copy") match { - case "copy" => CopyCachedOutputs - case "reference" => UseOriginalCachedOutputs - case other => throw new IllegalArgumentException(s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details.") - } + backendConfig.as[Option[String]](s"${fileSysPath}.caching.duplication-strategy").getOrElse("copy") match { + case "copy" => CopyCachedOutputs + case "reference" => UseOriginalCachedOutputs + case other => + throw new IllegalArgumentException( + s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details." + ) + } } ( @@ -144,8 +154,6 @@ object AwsBatchAttributes { override def read(config: Config, path: String): ErrorOr[Refined[Int, Positive]] = { val int = config.getInt(path) refineV[Positive](int).toValidatedNel + } } - } } - - diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendInitializationData.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendInitializationData.scala index edfbd560362..ae34cd958f8 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendInitializationData.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendInitializationData.scala @@ -34,20 +34,23 @@ package cromwell.backend.impl.aws import cromwell.backend.standard.{StandardInitializationData, StandardValidatedRuntimeAttributesBuilder} import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider -case class AwsBatchBackendInitializationData -( +case class AwsBatchBackendInitializationData( override val workflowPaths: AwsBatchWorkflowPaths, override val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, configuration: AwsBatchConfiguration, - provider: AwsCredentialsProvider, + provider: AwsCredentialsProvider // TODO: We'll need something specific for batch probably, but I need to // understand more about the genomics node first - //genomics: Genomics -) extends StandardInitializationData(workflowPaths, runtimeAttributesBuilder, AwsBatchBackendInitializationDataUtility.getExpressionFunctionsClass(configuration.fileSystem)) + // genomics: Genomics +) extends StandardInitializationData( + workflowPaths, + runtimeAttributesBuilder, + AwsBatchBackendInitializationDataUtility.getExpressionFunctionsClass(configuration.fileSystem) + ) -object AwsBatchBackendInitializationDataUtility { - def getExpressionFunctionsClass(fs: String) = fs match { - case AWSBatchStorageSystems.s3 => classOf[AwsBatchExpressionFunctions] - case _ => classOf[AwsBatchExpressionFunctionsForFS] - } +object AwsBatchBackendInitializationDataUtility { + def getExpressionFunctionsClass(fs: String) = fs match { + case AWSBatchStorageSystems.s3 => classOf[AwsBatchExpressionFunctions] + case _ => classOf[AwsBatchExpressionFunctionsForFS] + } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendLifecycleActorFactory.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendLifecycleActorFactory.scala index c0ae9210852..498bee626b1 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendLifecycleActorFactory.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchBackendLifecycleActorFactory.scala @@ -32,8 +32,20 @@ package cromwell.backend.impl.aws import akka.actor.{ActorRef, Props} -import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor, JobExecutionMap} -import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardFinalizationActor, StandardFinalizationActorParams, StandardInitializationActor, StandardInitializationActorParams, StandardLifecycleActorFactory} +import cromwell.backend.{ + BackendConfigurationDescriptor, + BackendInitializationData, + BackendWorkflowDescriptor, + JobExecutionMap +} +import cromwell.backend.standard.{ + StandardAsyncExecutionActor, + StandardFinalizationActor, + StandardFinalizationActorParams, + StandardInitializationActor, + StandardInitializationActorParams, + StandardLifecycleActorFactory +} import cromwell.core.CallOutputs import wom.graph.CommandCallNode @@ -43,41 +55,38 @@ import wom.graph.CommandCallNode * @param name Factory name * @param configurationDescriptor configuration descriptor for the backend */ -case class AwsBatchBackendLifecycleActorFactory( - name: String, - configurationDescriptor: BackendConfigurationDescriptor) +case class AwsBatchBackendLifecycleActorFactory(name: String, configurationDescriptor: BackendConfigurationDescriptor) extends StandardLifecycleActorFactory { - override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] - = classOf[AwsBatchInitializationActor] + override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = + classOf[AwsBatchInitializationActor] - override lazy val asyncExecutionActorClass: Class[_ <: StandardAsyncExecutionActor] - = classOf[AwsBatchAsyncBackendJobExecutionActor] + override lazy val asyncExecutionActorClass: Class[_ <: StandardAsyncExecutionActor] = + classOf[AwsBatchAsyncBackendJobExecutionActor] - override lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] - = Option(classOf[AwsBatchFinalizationActor]) + override lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] = Option( + classOf[AwsBatchFinalizationActor] + ) - override lazy val jobIdKey: String - = AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey + override lazy val jobIdKey: String = AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey val configuration = new AwsBatchConfiguration(configurationDescriptor) - override def workflowInitializationActorParams( - workflowDescriptor: BackendWorkflowDescriptor, - ioActor: ActorRef, - calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, - restart: Boolean): StandardInitializationActorParams = { + override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restart: Boolean + ): StandardInitializationActorParams = AwsBatchInitializationActorParams(workflowDescriptor, ioActor, calls, configuration, serviceRegistryActor, restart) - } - override def workflowFinalizationActorParams( - workflowDescriptor: BackendWorkflowDescriptor, - ioActor: ActorRef, - calls: Set[CommandCallNode], - jobExecutionMap: JobExecutionMap, - workflowOutputs: CallOutputs, - initializationDataOption: Option[BackendInitializationData]): StandardFinalizationActorParams = { + override def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs, + initializationDataOption: Option[BackendInitializationData] + ): StandardFinalizationActorParams = // The `AwsBatchInitializationActor` will only return a non-`Empty` // `AwsBatchBackendInitializationData` from a successful `beforeAll` // invocation. HOWEVER, the finalization actor is created regardless @@ -86,10 +95,15 @@ case class AwsBatchBackendLifecycleActorFactory( // `AwsBatchBackendInitializationData` option, and there is no `.get` // on the initialization data as there is with the execution or cache // hit copying actor methods. - AwsBatchFinalizationActorParams(workflowDescriptor, ioActor, calls, configuration, jobExecutionMap, workflowOutputs, initializationDataOption) - } + AwsBatchFinalizationActorParams(workflowDescriptor, + ioActor, + calls, + configuration, + jobExecutionMap, + workflowOutputs, + initializationDataOption + ) - override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = { + override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = Option(AwsBatchSingletonActor.props(configuration.awsConfig.region, Option(configuration.awsAuth))) - } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchConfiguration.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchConfiguration.scala index 2bc76d4bb0b..a5ef13cdf23 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchConfiguration.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchConfiguration.scala @@ -33,7 +33,7 @@ package cromwell.backend.impl.aws import cromwell.filesystems.s3.S3PathBuilderFactory import cromwell.backend.BackendConfigurationDescriptor -import cromwell.core.{BackendDockerConfiguration} +import cromwell.core.BackendDockerConfiguration import cromwell.core.path.PathBuilderFactory import cromwell.cloudsupport.aws.AwsConfiguration @@ -48,20 +48,19 @@ class AwsBatchConfiguration(val configurationDescriptor: BackendConfigurationDes val dockerCredentials = BackendDockerConfiguration.build(configurationDescriptor.backendConfig).dockerCredentials val fileSystem = configurationDescriptor.backendConfig.hasPath("filesystems.s3") match { - case true => "s3" + case true => "s3" case false => "local" - } + } val pathBuilderFactory = configurationDescriptor.backendConfig.hasPath("filesystems.s3") match { case true => S3PathBuilderFactory(configurationDescriptor.globalConfig, configurationDescriptor.backendConfig) case false => - PathBuilderFactory + PathBuilderFactory } } object AWSBatchStorageSystems { - val s3:String = "s3" - val efs:String = "efs" - val ebs:String = "ebs" - val local:String = "local" + val s3: String = "s3" + val efs: String = "efs" + val ebs: String = "ebs" + val local: String = "local" } - diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchExpressionFunctions.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchExpressionFunctions.scala index b75d4d6b00f..5cf2ef424bd 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchExpressionFunctions.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchExpressionFunctions.scala @@ -39,24 +39,22 @@ import cromwell.filesystems.s3.batch.S3BatchCommandBuilder import cromwell.core.path.{DefaultPath, Path} class AwsBatchExpressionFunctions(standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { + extends StandardExpressionFunctions(standardParams) { override lazy val ioCommandBuilder: IoCommandBuilder = S3BatchCommandBuilder - override def preMapping(str: String) = { + override def preMapping(str: String) = S3PathBuilder.validatePath(str) match { case _: ValidFullS3Path => str case PossiblyValidRelativeS3Path => callContext.root.resolve(str.stripPrefix("/")).pathAsString case invalid: InvalidS3Path => throw new IllegalArgumentException(invalid.errorMessage) } - } } class AwsBatchExpressionFunctionsForFS(standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { - override def postMapping(path: Path) = { + extends StandardExpressionFunctions(standardParams) { + override def postMapping(path: Path) = path match { case _: DefaultPath if !path.isAbsolute => callContext.root.resolve(path) case _ => path } - } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchFinalizationActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchFinalizationActor.scala index 6fdccf995eb..ba09f871abd 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchFinalizationActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchFinalizationActor.scala @@ -40,8 +40,7 @@ import cromwell.core.io.DefaultIoCommandBuilder import wom.graph.CommandCallNode import cromwell.filesystems.s3.batch.S3BatchCommandBuilder -case class AwsBatchFinalizationActorParams -( +case class AwsBatchFinalizationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], @@ -54,13 +53,14 @@ case class AwsBatchFinalizationActorParams } class AwsBatchFinalizationActor(val params: AwsBatchFinalizationActorParams) - extends StandardFinalizationActor(params) with AsyncIoActorClient { + extends StandardFinalizationActor(params) + with AsyncIoActorClient { lazy val configuration: AwsBatchConfiguration = params.configuration - override lazy val ioCommandBuilder = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder - case _ => DefaultIoCommandBuilder + override lazy val ioCommandBuilder = configuration.fileSystem match { + case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder + case _ => DefaultIoCommandBuilder } override def ioActor: ActorRef = params.ioActor diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala index fe3668f9b00..aadf35bff14 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala @@ -36,8 +36,12 @@ import java.io.IOException import akka.actor.ActorRef import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider import cromwell.filesystems.s3.batch.S3BatchCommandBuilder -import cromwell.backend.standard.{StandardInitializationActor, StandardInitializationActorParams, StandardValidatedRuntimeAttributesBuilder} -import cromwell.backend.{BackendConfigurationDescriptor, BackendWorkflowDescriptor, BackendInitializationData} +import cromwell.backend.standard.{ + StandardInitializationActor, + StandardInitializationActorParams, + StandardValidatedRuntimeAttributesBuilder +} +import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor} import cromwell.core.io.DefaultIoCommandBuilder import cromwell.core.io.AsyncIoActorClient import cromwell.core.path.Path @@ -45,8 +49,7 @@ import wom.graph.CommandCallNode import scala.concurrent.Future -case class AwsBatchInitializationActorParams -( +case class AwsBatchInitializationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], @@ -58,34 +61,36 @@ case class AwsBatchInitializationActorParams } object AwsBatchInitializationActor { - private case class AuthFileAlreadyExistsException(path: Path) extends IOException(s"Failed to upload authentication file at $path:" + - s" there was already a file at the same location and this workflow was not being restarted.") + private case class AuthFileAlreadyExistsException(path: Path) + extends IOException( + s"Failed to upload authentication file at $path:" + + s" there was already a file at the same location and this workflow was not being restarted." + ) } class AwsBatchInitializationActor(params: AwsBatchInitializationActorParams) - extends StandardInitializationActor(params) with AsyncIoActorClient { + extends StandardInitializationActor(params) + with AsyncIoActorClient { override lazy val ioActor = params.ioActor private val configuration = params.configuration - override implicit val system = context.system + implicit override val system = context.system - override def beforeAll(): Future[Option[BackendInitializationData]] = { + override def beforeAll(): Future[Option[BackendInitializationData]] = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => super.beforeAll() - case _ => { + case AWSBatchStorageSystems.s3 => super.beforeAll() + case _ => initializationData map { data => publishWorkflowRoot(data.workflowPaths.workflowRoot.pathAsString) Option(data) } - } } - } override lazy val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder = AwsBatchRuntimeAttributes.runtimeAttributesBuilder(configuration) private lazy val provider: Future[AwsCredentialsProvider] = - Future { configuration.awsAuth.provider() } + Future(configuration.awsAuth.provider()) override lazy val workflowPaths: Future[AwsBatchWorkflowPaths] = for { prov <- provider @@ -96,14 +101,14 @@ class AwsBatchInitializationActor(params: AwsBatchInitializationActorParams) prov <- provider } yield AwsBatchBackendInitializationData(workflowPaths, runtimeAttributesBuilder, configuration, prov) - override lazy val ioCommandBuilder = { + override lazy val ioCommandBuilder = { val conf = Option(configuration) match { case Some(cf) => cf - case None => new AwsBatchConfiguration(params.configurationDescriptor) + case None => new AwsBatchConfiguration(params.configurationDescriptor) } conf.fileSystem match { - case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder - case _ => DefaultIoCommandBuilder + case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder + case _ => DefaultIoCommandBuilder } } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala index de4617ad786..3e2ba2d482b 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala @@ -50,7 +50,12 @@ import software.amazon.awssdk.services.batch.model._ import software.amazon.awssdk.services.cloudwatchlogs.CloudWatchLogsClient import software.amazon.awssdk.services.cloudwatchlogs.model.{GetLogEventsRequest, OutputLogEvent} import software.amazon.awssdk.services.s3.S3Client -import software.amazon.awssdk.services.s3.model.{GetObjectRequest, HeadObjectRequest, NoSuchKeyException, PutObjectRequest} +import software.amazon.awssdk.services.s3.model.{ + GetObjectRequest, + HeadObjectRequest, + NoSuchKeyException, + PutObjectRequest +} import wdl4s.parser.MemoryUnit import scala.jdk.CollectionConverters._ @@ -67,20 +72,21 @@ import scala.util.{Random, Try} * @param commandLine command line to be passed to the job * @param commandScript the `commandLine` with additional commands to setup the context and localize/ de-localize files */ -final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL - runtimeAttributes: AwsBatchRuntimeAttributes, // config or WDL/CWL - commandLine: String, // WDL/CWL - commandScript: String, // WDL/CWL - dockerRc: String, // Calculated from StandardAsyncExecutionActor - dockerStdout: String, // Calculated from StandardAsyncExecutionActor - dockerStderr: String, // Calculated from StandardAsyncExecutionActor - inputs: Set[AwsBatchInput], - outputs: Set[AwsBatchFileOutput], - jobPaths: JobPaths, // Based on config, calculated in Job Paths, key to all things outside container - parameters: Seq[AwsBatchParameter], - configRegion: Option[Region], - optAwsAuthMode: Option[AwsAuthMode] = None - ) { +final case class AwsBatchJob( + jobDescriptor: BackendJobDescriptor, // WDL/CWL + runtimeAttributes: AwsBatchRuntimeAttributes, // config or WDL/CWL + commandLine: String, // WDL/CWL + commandScript: String, // WDL/CWL + dockerRc: String, // Calculated from StandardAsyncExecutionActor + dockerStdout: String, // Calculated from StandardAsyncExecutionActor + dockerStderr: String, // Calculated from StandardAsyncExecutionActor + inputs: Set[AwsBatchInput], + outputs: Set[AwsBatchFileOutput], + jobPaths: JobPaths, // Based on config, calculated in Job Paths, key to all things outside container + parameters: Seq[AwsBatchParameter], + configRegion: Option[Region], + optAwsAuthMode: Option[AwsAuthMode] = None +) { // values for container environment val AWS_MAX_ATTEMPTS: String = "AWS_MAX_ATTEMPTS" @@ -90,7 +96,7 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL val Log: Logger = LoggerFactory.getLogger(AwsBatchJob.getClass) - //this will be the "folder" that scripts will live in (underneath the script bucket) + // this will be the "folder" that scripts will live in (underneath the script bucket) val scriptKeyPrefix = "scripts/" // TODO: Auth, endpoint @@ -115,38 +121,40 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL * 3. at the end of the script sync all content with the s3 bucket */ lazy val reconfiguredScript: String = { - //this is the location of the aws cli mounted into the container by the ec2 launch template + // this is the location of the aws cli mounted into the container by the ec2 launch template val s3Cmd = "/usr/local/aws-cli/v2/current/bin/aws s3" - //internal to the container, therefore not mounted + // internal to the container, therefore not mounted val workDir = "/tmp/scratch" - //working in a mount will cause collisions in long running workers + // working in a mount will cause collisions in long running workers val replaced = commandScript.replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) - val insertionPoint = replaced.indexOf("\n", replaced.indexOf("#!")) +1 //just after the new line after the shebang! + val insertionPoint = + replaced.indexOf("\n", replaced.indexOf("#!")) + 1 // just after the new line after the shebang! /* generate a series of s3 copy statements to copy any s3 files into the container. We randomize the order so that large scatters don't all attempt to copy the same thing at the same time. */ - val inputCopyCommand = Random.shuffle(inputs.map { - case input: AwsBatchFileInput if input.s3key.startsWith("s3://") && input.s3key.endsWith(".tmp") => - //we are localizing a tmp file which may contain workdirectory paths that need to be reconfigured - s""" - |$s3Cmd cp --no-progress ${input.s3key} $workDir/${input.local} - |sed -i 's#${AwsBatchWorkingDisk.MountPoint.pathAsString}#$workDir#g' $workDir/${input.local} - |""".stripMargin - - - case input: AwsBatchFileInput if input.s3key.startsWith("s3://") => - s"$s3Cmd cp --no-progress ${input.s3key} ${input.mount.mountPoint.pathAsString}/${input.local}" - .replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) - - case input: AwsBatchFileInput => - //here we don't need a copy command but the centaurTests expect us to verify the existence of the file - val filePath = s"${input.mount.mountPoint.pathAsString}/${input.local.pathAsString}" - .replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) - - s"test -e $filePath || echo 'input file: $filePath does not exist' && exit 1" - - case _ => "" - }.toList).mkString("\n") + val inputCopyCommand = Random + .shuffle(inputs.map { + case input: AwsBatchFileInput if input.s3key.startsWith("s3://") && input.s3key.endsWith(".tmp") => + // we are localizing a tmp file which may contain workdirectory paths that need to be reconfigured + s""" + |$s3Cmd cp --no-progress ${input.s3key} $workDir/${input.local} + |sed -i 's#${AwsBatchWorkingDisk.MountPoint.pathAsString}#$workDir#g' $workDir/${input.local} + |""".stripMargin + + case input: AwsBatchFileInput if input.s3key.startsWith("s3://") => + s"$s3Cmd cp --no-progress ${input.s3key} ${input.mount.mountPoint.pathAsString}/${input.local}" + .replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) + + case input: AwsBatchFileInput => + // here we don't need a copy command but the centaurTests expect us to verify the existence of the file + val filePath = s"${input.mount.mountPoint.pathAsString}/${input.local.pathAsString}" + .replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) + + s"test -e $filePath || echo 'input file: $filePath does not exist' && exit 1" + + case _ => "" + }.toList) + .mkString("\n") // this goes at the start of the script after the #! val preamble = @@ -166,42 +174,45 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL val stdOut = dockerStdout.replace("/cromwell_root", workDir) val stdErr = dockerStderr.replace("/cromwell_root", workDir) - //generate a series of s3 commands to delocalize artifacts from the container to storage at the end of the task - val outputCopyCommand = outputs.map { - case output: AwsBatchFileOutput if output.local.pathAsString.contains("*") => "" //filter out globs - case output: AwsBatchFileOutput if output.name.endsWith(".list") && output.name.contains("glob-") => - - val s3GlobOutDirectory = output.s3key.replace(".list", "") - val globDirectory = output.name.replace(".list", "") - /* - * Need to process this list and de-localize each file if the list file actually exists - * if it doesn't exist then 'touch' it so that it can be copied otherwise later steps will get upset - * about the missing file - */ - s""" - |touch ${output.name} - |$s3Cmd cp --no-progress ${output.name} ${output.s3key} - |if [ -e $globDirectory ]; then $s3Cmd cp --no-progress $globDirectory $s3GlobOutDirectory --recursive --exclude "cromwell_glob_control_file"; fi - |""".stripMargin - - case output: AwsBatchFileOutput if output.s3key.startsWith("s3://") && output.mount.mountPoint.pathAsString == AwsBatchWorkingDisk.MountPoint.pathAsString => - //output is on working disk mount - s""" - |$s3Cmd cp --no-progress $workDir/${output.local.pathAsString} ${output.s3key} - |""".stripMargin - case output: AwsBatchFileOutput => - //output on a different mount - s"$s3Cmd cp --no-progress ${output.mount.mountPoint.pathAsString}/${output.local.pathAsString} ${output.s3key}" - case _ => "" - }.mkString("\n") + "\n" + + // generate a series of s3 commands to delocalize artifacts from the container to storage at the end of the task + val outputCopyCommand = outputs + .map { + case output: AwsBatchFileOutput if output.local.pathAsString.contains("*") => "" // filter out globs + case output: AwsBatchFileOutput if output.name.endsWith(".list") && output.name.contains("glob-") => + val s3GlobOutDirectory = output.s3key.replace(".list", "") + val globDirectory = output.name.replace(".list", "") + /* + * Need to process this list and de-localize each file if the list file actually exists + * if it doesn't exist then 'touch' it so that it can be copied otherwise later steps will get upset + * about the missing file + */ + s""" + |touch ${output.name} + |$s3Cmd cp --no-progress ${output.name} ${output.s3key} + |if [ -e $globDirectory ]; then $s3Cmd cp --no-progress $globDirectory $s3GlobOutDirectory --recursive --exclude "cromwell_glob_control_file"; fi + |""".stripMargin + + case output: AwsBatchFileOutput + if output.s3key.startsWith( + "s3://" + ) && output.mount.mountPoint.pathAsString == AwsBatchWorkingDisk.MountPoint.pathAsString => + // output is on working disk mount + s""" + |$s3Cmd cp --no-progress $workDir/${output.local.pathAsString} ${output.s3key} + |""".stripMargin + case output: AwsBatchFileOutput => + // output on a different mount + s"$s3Cmd cp --no-progress ${output.mount.mountPoint.pathAsString}/${output.local.pathAsString} ${output.s3key}" + case _ => "" + } + .mkString("\n") + "\n" + s""" |if [ -f $workDir/${jobPaths.returnCodeFilename} ]; then $s3Cmd cp --no-progress $workDir/${jobPaths.returnCodeFilename} ${jobPaths.callRoot.pathAsString}/${jobPaths.returnCodeFilename} ; fi\n |if [ -f $stdErr ]; then $s3Cmd cp --no-progress $stdErr ${jobPaths.standardPaths.error.pathAsString}; fi |if [ -f $stdOut ]; then $s3Cmd cp --no-progress $stdOut ${jobPaths.standardPaths.output.pathAsString}; fi |""".stripMargin - - //insert the preamble at the insertion point and the postscript copy command at the end + // insert the preamble at the insertion point and the postscript copy command at the end replaced.patch(insertionPoint, preamble, 0) + s""" |{ @@ -212,87 +223,106 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL |} |""".stripMargin } - private def batch_file_s3_url(scriptBucketName: String, scriptKeyPrefix: String, scriptKey: String): String = runtimeAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => s"s3://${runtimeAttributes.scriptS3BucketName}/$scriptKeyPrefix$scriptKey" - case _ => "" - } + private def batch_file_s3_url(scriptBucketName: String, scriptKeyPrefix: String, scriptKey: String): String = + runtimeAttributes.fileSystem match { + case AWSBatchStorageSystems.s3 => s"s3://${runtimeAttributes.scriptS3BucketName}/$scriptKeyPrefix$scriptKey" + case _ => "" + } - private def generateEnvironmentKVPairs(scriptBucketName: String, scriptKeyPrefix: String, scriptKey: String): List[KeyValuePair] = { - List(buildKVPair(AWS_MAX_ATTEMPTS, AWS_MAX_ATTEMPTS_DEFAULT_VALUE), + private def generateEnvironmentKVPairs(scriptBucketName: String, + scriptKeyPrefix: String, + scriptKey: String + ): List[KeyValuePair] = + List( + buildKVPair(AWS_MAX_ATTEMPTS, AWS_MAX_ATTEMPTS_DEFAULT_VALUE), buildKVPair(AWS_RETRY_MODE, AWS_RETRY_MODE_DEFAULT_VALUE), buildKVPair("BATCH_FILE_TYPE", "script"), - buildKVPair("BATCH_FILE_S3_URL",batch_file_s3_url(scriptBucketName,scriptKeyPrefix,scriptKey))) - } + buildKVPair("BATCH_FILE_S3_URL", batch_file_s3_url(scriptBucketName, scriptKeyPrefix, scriptKey)) + ) - def submitJob[F[_]]()( implicit timer: Timer[F], async: Async[F]): Aws[F, SubmitJobResponse] = { + def submitJob[F[_]]()(implicit timer: Timer[F], async: Async[F]): Aws[F, SubmitJobResponse] = { - val taskId = jobDescriptor.key.call.fullyQualifiedName + "-" + jobDescriptor.key.index + "-" + jobDescriptor.key.attempt + val taskId = + jobDescriptor.key.call.fullyQualifiedName + "-" + jobDescriptor.key.index + "-" + jobDescriptor.key.attempt - //find or create the script in s3 to execute for s3 fileSystem - val scriptKey = runtimeAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => findOrCreateS3Script(reconfiguredScript, runtimeAttributes.scriptS3BucketName) - case _ => "" + // find or create the script in s3 to execute for s3 fileSystem + val scriptKey = runtimeAttributes.fileSystem match { + case AWSBatchStorageSystems.s3 => findOrCreateS3Script(reconfiguredScript, runtimeAttributes.scriptS3BucketName) + case _ => "" } - if(runtimeAttributes.fileSystem == AWSBatchStorageSystems.s3) { - val regex = "s3://([^/]*)/(.*)".r - val regex(bucketName, key) = jobPaths.callExecutionRoot.toString - writeReconfiguredScriptForAudit(reconfiguredScript, bucketName, key+"/reconfigured-script.sh") + if (runtimeAttributes.fileSystem == AWSBatchStorageSystems.s3) { + val regex = "s3://([^/]*)/(.*)".r + val regex(bucketName, key) = jobPaths.callExecutionRoot.toString + writeReconfiguredScriptForAudit(reconfiguredScript, bucketName, key + "/reconfigured-script.sh") } - val batch_script = runtimeAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => s"s3://${runtimeAttributes.scriptS3BucketName}/$scriptKeyPrefix$scriptKey" - case _ => commandScript + case AWSBatchStorageSystems.s3 => s"s3://${runtimeAttributes.scriptS3BucketName}/$scriptKeyPrefix$scriptKey" + case _ => commandScript } - //calls the client to submit the job + // calls the client to submit the job def callClient(definitionArn: String, awsBatchAttributes: AwsBatchAttributes): Aws[F, SubmitJobResponse] = { Log.info(s"Submitting taskId: $taskId, job definition : $definitionArn, script: $batch_script") val submit: F[SubmitJobResponse] = - async.delay(batchClient.submitJob( - SubmitJobRequest.builder() - .jobName(sanitize(jobDescriptor.taskCall.fullyQualifiedName)) - .parameters(parameters.collect({ case i: AwsBatchInput => i.toStringString }).toMap.asJava) - - //provide job environment variables, vcpu and memory - .containerOverrides( - ContainerOverrides.builder - .environment( - - generateEnvironmentKVPairs(runtimeAttributes.scriptS3BucketName, scriptKeyPrefix, scriptKey): _* - ) - .resourceRequirements( - ResourceRequirement.builder() - .`type`(ResourceType.VCPU) - .value(runtimeAttributes.cpu.value.toString) - .build(), - ResourceRequirement.builder() - .`type`(ResourceType.MEMORY) - .value(runtimeAttributes.memory.to(MemoryUnit.MB).amount.toInt.toString) - .build(), - ) - .build() - ) - .jobQueue(runtimeAttributes.queueArn) - .jobDefinition(definitionArn) - .build - )) + async.delay( + batchClient.submitJob( + SubmitJobRequest + .builder() + .jobName(sanitize(jobDescriptor.taskCall.fullyQualifiedName)) + .parameters(parameters.collect { case i: AwsBatchInput => i.toStringString }.toMap.asJava) + + // provide job environment variables, vcpu and memory + .containerOverrides( + ContainerOverrides.builder + .environment( + generateEnvironmentKVPairs(runtimeAttributes.scriptS3BucketName, scriptKeyPrefix, scriptKey): _* + ) + .resourceRequirements( + ResourceRequirement + .builder() + .`type`(ResourceType.VCPU) + .value(runtimeAttributes.cpu.value.toString) + .build(), + ResourceRequirement + .builder() + .`type`(ResourceType.MEMORY) + .value(runtimeAttributes.memory.to(MemoryUnit.MB).amount.toInt.toString) + .build() + ) + .build() + ) + .jobQueue(runtimeAttributes.queueArn) + .jobDefinition(definitionArn) + .build + ) + ) ReaderT.liftF( - Stream.retry(submit, 0.millis, duration => duration.plus(duration), awsBatchAttributes.submitAttempts.value, { - // RegisterJobDefinition is eventually consistent, so it may not be there - case e: ClientException => e.statusCode() == 404 - case _ => false - }).compile.last.map(_.get)) //if successful there is guaranteed to be a value emitted, hence we can .get this option + Stream + .retry( + submit, + 0.millis, + duration => duration.plus(duration), + awsBatchAttributes.submitAttempts.value, + { + // RegisterJobDefinition is eventually consistent, so it may not be there + case e: ClientException => e.statusCode() == 404 + case _ => false + } + ) + .compile + .last + .map(_.get) + ) // if successful there is guaranteed to be a value emitted, hence we can .get this option } (findOrCreateDefinition[F]() product Kleisli.ask[F, AwsBatchAttributes]).flatMap((callClient _).tupled) } - /** * Performs an md5 digest the script, checks in s3 bucket for that script, if it's not already there then persists it. * @@ -300,31 +330,38 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL * @param scriptS3BucketName the bucket that stores the scripts * @return the name of the script that was found or created */ - private def findOrCreateS3Script(commandLine :String, scriptS3BucketName: String) :String = { + private def findOrCreateS3Script(commandLine: String, scriptS3BucketName: String): String = { val bucketName = scriptS3BucketName - val key = MessageDigest.getInstance("MD5") + val key = MessageDigest + .getInstance("MD5") .digest(commandLine.getBytes()) .foldLeft("")(_ + "%02x".format(_)) Log.debug(s"s3 object name for script is calculated to be s3://$bucketName/$scriptKeyPrefix$key") - try { //try and get the object + try { // try and get the object - s3Client.getObject( GetObjectRequest.builder().bucket(bucketName).key( scriptKeyPrefix + key).build ) - s3Client.headObject(HeadObjectRequest.builder() - .bucket(bucketName) - .key( scriptKeyPrefix + key ) - .build - ).eTag().equals(key) + s3Client.getObject(GetObjectRequest.builder().bucket(bucketName).key(scriptKeyPrefix + key).build) + s3Client + .headObject( + HeadObjectRequest + .builder() + .bucket(bucketName) + .key(scriptKeyPrefix + key) + .build + ) + .eTag() + .equals(key) // if there's no exception then the script already exists Log.debug(s"""Found script $bucketName/$scriptKeyPrefix$key""") } catch { - case _: NoSuchKeyException => //this happens if there is no object with that key in the bucket - val putRequest = PutObjectRequest.builder() - .bucket(bucketName) //remove the "s3://" prefix + case _: NoSuchKeyException => // this happens if there is no object with that key in the bucket + val putRequest = PutObjectRequest + .builder() + .bucket(bucketName) // remove the "s3://" prefix .key(scriptKeyPrefix + key) .build @@ -335,7 +372,7 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL key } - private def writeReconfiguredScriptForAudit( reconfiguredScript: String, bucketName: String, key: String) = { + private def writeReconfiguredScriptForAudit(reconfiguredScript: String, bucketName: String, key: String) = { val putObjectRequest = PutObjectRequest.builder().bucket(bucketName).key(key).build() s3Client.putObject(putObjectRequest, RequestBody.fromString(reconfiguredScript)) } @@ -345,95 +382,100 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL * @return Arn for newly created job definition * */ - private def findOrCreateDefinition[F[_]]() - (implicit async: Async[F], timer: Timer[F]): Aws[F, String] = ReaderT { awsBatchAttributes => - - // this is a call back that is executed below by the async.recoverWithRetry(retry) - val submit = async.delay({ - - val commandStr = awsBatchAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => reconfiguredScript - case _ => commandScript - } - val jobDefinitionContext = AwsBatchJobDefinitionContext( - runtimeAttributes = runtimeAttributes, - commandText = commandStr, - dockerRcPath = dockerRc, - dockerStdoutPath = dockerStdout, - dockerStderrPath = dockerStderr, - jobDescriptor = jobDescriptor, - jobPaths = jobPaths, - inputs = inputs, - outputs = outputs) - - val jobDefinitionBuilder = StandardAwsBatchJobDefinitionBuilder - val jobDefinition = jobDefinitionBuilder.build(jobDefinitionContext) - - - //check if there is already a suitable definition based on the calculated job definition name - val jobDefinitionName = jobDefinition.name - - Log.debug(s"Checking for existence of job definition called: $jobDefinitionName") - - val describeJobDefinitionRequest = DescribeJobDefinitionsRequest.builder() - .jobDefinitionName( jobDefinitionName ) - .status("ACTIVE") - .build() - - val describeJobDefinitionResponse = batchClient.describeJobDefinitions(describeJobDefinitionRequest) - - if ( !describeJobDefinitionResponse.jobDefinitions.isEmpty ) { - //sort the definitions so that the latest revision is at the head - val definitions = describeJobDefinitionResponse.jobDefinitions().asScala.toList.sortWith(_.revision > _.revision) - - //return the arn of the job - definitions.head.jobDefinitionArn() - } else { - Log.debug(s"No job definition found. Creating job definition: $jobDefinitionName") - - // See: - // - // http://aws-java-sdk-javadoc.s3-website-us-west-2.amazonaws.com/latest/software/amazon/awssdk/services/batch/model/RegisterJobDefinitionRequest.Builder.html - val definitionRequest = RegisterJobDefinitionRequest.builder - .containerProperties(jobDefinition.containerProperties) + private def findOrCreateDefinition[F[_]]()(implicit async: Async[F], timer: Timer[F]): Aws[F, String] = ReaderT { + awsBatchAttributes => + // this is a call back that is executed below by the async.recoverWithRetry(retry) + val submit = async.delay { + + val commandStr = awsBatchAttributes.fileSystem match { + case AWSBatchStorageSystems.s3 => reconfiguredScript + case _ => commandScript + } + val jobDefinitionContext = AwsBatchJobDefinitionContext( + runtimeAttributes = runtimeAttributes, + commandText = commandStr, + dockerRcPath = dockerRc, + dockerStdoutPath = dockerStdout, + dockerStderrPath = dockerStderr, + jobDescriptor = jobDescriptor, + jobPaths = jobPaths, + inputs = inputs, + outputs = outputs + ) + + val jobDefinitionBuilder = StandardAwsBatchJobDefinitionBuilder + val jobDefinition = jobDefinitionBuilder.build(jobDefinitionContext) + + // check if there is already a suitable definition based on the calculated job definition name + val jobDefinitionName = jobDefinition.name + + Log.debug(s"Checking for existence of job definition called: $jobDefinitionName") + + val describeJobDefinitionRequest = DescribeJobDefinitionsRequest + .builder() .jobDefinitionName(jobDefinitionName) - // See https://stackoverflow.com/questions/24349517/scala-method-named-type - .`type`(JobDefinitionType.CONTAINER) - .build + .status("ACTIVE") + .build() + + val describeJobDefinitionResponse = batchClient.describeJobDefinitions(describeJobDefinitionRequest) + + if (!describeJobDefinitionResponse.jobDefinitions.isEmpty) { + // sort the definitions so that the latest revision is at the head + val definitions = + describeJobDefinitionResponse.jobDefinitions().asScala.toList.sortWith(_.revision > _.revision) + + // return the arn of the job + definitions.head.jobDefinitionArn() + } else { + Log.debug(s"No job definition found. Creating job definition: $jobDefinitionName") + + // See: + // + // http://aws-java-sdk-javadoc.s3-website-us-west-2.amazonaws.com/latest/software/amazon/awssdk/services/batch/model/RegisterJobDefinitionRequest.Builder.html + val definitionRequest = RegisterJobDefinitionRequest.builder + .containerProperties(jobDefinition.containerProperties) + .jobDefinitionName(jobDefinitionName) + // See https://stackoverflow.com/questions/24349517/scala-method-named-type + .`type`(JobDefinitionType.CONTAINER) + .build - Log.debug(s"Submitting definition request: $definitionRequest") + Log.debug(s"Submitting definition request: $definitionRequest") - val response: RegisterJobDefinitionResponse = batchClient.registerJobDefinition(definitionRequest) - Log.info(s"Definition created: $response") - response.jobDefinitionArn() + val response: RegisterJobDefinitionResponse = batchClient.registerJobDefinition(definitionRequest) + Log.info(s"Definition created: $response") + response.jobDefinitionArn() + } } - }) - - - // a function to retry submissions, returns a higher kind parameterized on a String (where the String is an arn) - val retry: F[String] = Stream.retry( - fo = submit, //the value to attempt to get - delay = 0.millis, //how long to wait - nextDelay = _ * 2, //how long to back off after a failure - maxAttempts = awsBatchAttributes.createDefinitionAttempts.value, //how many times to try - retriable = { //a function to say if we should retry or not - // RegisterJobDefinition throws 404s every once in a while - case e: ClientException => e.statusCode() == 404 || e.statusCode() == 409 - // a 409 means an eventual consistency collision has happened, most likely during a scatter. - // Just wait and retry as job definition names are canonical and if another thread succeeds in making one then - // that will be used and if there really isn't one, then the definition will be created. - case _ => false //don't retry other cases - } - ).compile.last.map(_.get) - // attempt to register the job definition - async.recoverWith(submit){ - case e: ClientException if e.statusCode == 404 || - e.statusCode == 409 || e.statusCode == 429 => retry //probably worth trying again - } + // a function to retry submissions, returns a higher kind parameterized on a String (where the String is an arn) + val retry: F[String] = Stream + .retry( + fo = submit, // the value to attempt to get + delay = 0.millis, // how long to wait + nextDelay = _ * 2, // how long to back off after a failure + maxAttempts = awsBatchAttributes.createDefinitionAttempts.value, // how many times to try + retriable = { // a function to say if we should retry or not + // RegisterJobDefinition throws 404s every once in a while + case e: ClientException => e.statusCode() == 404 || e.statusCode() == 409 + // a 409 means an eventual consistency collision has happened, most likely during a scatter. + // Just wait and retry as job definition names are canonical and if another thread succeeds in making one then + // that will be used and if there really isn't one, then the definition will be created. + case _ => false // don't retry other cases + } + ) + .compile + .last + .map(_.get) + + // attempt to register the job definition + async.recoverWith(submit) { + case e: ClientException + if e.statusCode == 404 || + e.statusCode == 409 || e.statusCode == 429 => + retry // probably worth trying again + } } - /** Gets the status of a job by its Id, converted to a RunStatus * * @param jobId Job ID as defined in AWS Batch @@ -446,54 +488,62 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL } yield runStatus def detail(jobId: String): JobDetail = { - //TODO: This client call should be wrapped in a cats Effect + // TODO: This client call should be wrapped in a cats Effect val describeJobsResponse = batchClient.describeJobs(DescribeJobsRequest.builder.jobs(jobId).build) - val jobDetail = describeJobsResponse.jobs.asScala.headOption. - getOrElse(throw new RuntimeException(s"Expected a job Detail to be present from this request: $describeJobsResponse and this response: $describeJobsResponse ")) + val jobDetail = describeJobsResponse.jobs.asScala.headOption.getOrElse( + throw new RuntimeException( + s"Expected a job Detail to be present from this request: $describeJobsResponse and this response: $describeJobsResponse " + ) + ) jobDetail } - def rc(detail: JobDetail): Integer = { + def rc(detail: JobDetail): Integer = detail.container.exitCode - } def output(detail: JobDetail): String = { - val events: Seq[OutputLogEvent] = cloudWatchLogsClient.getLogEvents(GetLogEventsRequest.builder - // http://aws-java-sdk-javadoc.s3-website-us-west-2.amazonaws.com/latest/software/amazon/awssdk/services/batch/model/ContainerDetail.html#logStreamName-- - .logGroupName("/aws/batch/job") - .logStreamName(detail.container.logStreamName) - .startFromHead(true) - .build).events.asScala.toList - val eventMessages = for ( event <- events ) yield event.message + val events: Seq[OutputLogEvent] = cloudWatchLogsClient + .getLogEvents( + GetLogEventsRequest.builder + // http://aws-java-sdk-javadoc.s3-website-us-west-2.amazonaws.com/latest/software/amazon/awssdk/services/batch/model/ContainerDetail.html#logStreamName-- + .logGroupName("/aws/batch/job") + .logStreamName(detail.container.logStreamName) + .startFromHead(true) + .build + ) + .events + .asScala + .toList + val eventMessages = for (event <- events) yield event.message eventMessages mkString "\n" } - def abort(jobId: String): TerminateJobResponse = { + def abort(jobId: String): TerminateJobResponse = /* * Using Terminate here because it will work on jobs at any stage of their lifecycle whereas cancel will only work * on jobs that are not yet at the STARTING or RUNNING phase */ batchClient.terminateJob(TerminateJobRequest.builder.jobId(jobId).reason("cromwell abort called").build()) - } /** * Generate a `String` describing the instance. Mainly for debugging * @return a description of the instance */ - override def toString: String = { + override def toString: String = new ToStringBuilder(this, ToStringStyle.JSON_STYLE) .append("jobDescriptor", jobDescriptor) .append("runtimeAttributes", runtimeAttributes) .append("commandLine", commandLine) .append("commandScript", commandScript) - .append("dockerRc", dockerRc).append("dockerStderr", dockerStderr).append("dockerStdout", dockerStdout) + .append("dockerRc", dockerRc) + .append("dockerStderr", dockerStderr) + .append("dockerStdout", dockerStdout) .append("inputs", inputs) .append("outputs", outputs) .append("jobPaths", jobPaths) .append("configRegion", configRegion) .append("awsAuthMode", optAwsAuthMode) .build - } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobCachingActorHelper.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobCachingActorHelper.scala index 903f88a1cc4..c1d99620ac7 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobCachingActorHelper.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobCachingActorHelper.scala @@ -44,22 +44,25 @@ import cromwell.core.path.Path trait AwsBatchJobCachingActorHelper extends StandardCachingActorHelper { this: Actor with JobLogging => - lazy val initializationData: AwsBatchBackendInitializationData = { + lazy val initializationData: AwsBatchBackendInitializationData = backendInitializationDataAs[AwsBatchBackendInitializationData] - } lazy val configuration: AwsBatchConfiguration = initializationData.configuration // TODO: Determine if call paths are relevant lazy val callPaths: AwsBatchJobPaths = jobPaths.asInstanceOf[AwsBatchJobPaths] - lazy val runtimeAttributes: AwsBatchRuntimeAttributes = AwsBatchRuntimeAttributes(validatedRuntimeAttributes, configuration.runtimeConfig, configuration.fileSystem) - - lazy val workingDisk: AwsBatchVolume = runtimeAttributes.disks.find(x => configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => x.name == AwsBatchWorkingDisk.Name - case _ => configuration.root.startsWith(x.mountPoint.pathAsString) - }).get + lazy val runtimeAttributes: AwsBatchRuntimeAttributes = + AwsBatchRuntimeAttributes(validatedRuntimeAttributes, configuration.runtimeConfig, configuration.fileSystem) + lazy val workingDisk: AwsBatchVolume = runtimeAttributes.disks + .find(x => + configuration.fileSystem match { + case AWSBatchStorageSystems.s3 => x.name == AwsBatchWorkingDisk.Name + case _ => configuration.root.startsWith(x.mountPoint.pathAsString) + } + ) + .get lazy val callRootPath: Path = callPaths.callExecutionRoot lazy val returnCodeFilename: String = callPaths.returnCodeFilename diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala index 549d3c65185..1dab976fb70 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala @@ -34,7 +34,15 @@ package cromwell.backend.impl.aws import scala.collection.mutable.ListBuffer import cromwell.backend.BackendJobDescriptor import cromwell.backend.io.JobPaths -import software.amazon.awssdk.services.batch.model.{ContainerProperties, Host, KeyValuePair, MountPoint, ResourceRequirement, ResourceType, Volume} +import software.amazon.awssdk.services.batch.model.{ + ContainerProperties, + Host, + KeyValuePair, + MountPoint, + ResourceRequirement, + ResourceType, + Volume +} import cromwell.backend.impl.aws.io.AwsBatchVolume import scala.jdk.CollectionConverters._ @@ -43,7 +51,6 @@ import org.apache.commons.lang3.builder.{ToStringBuilder, ToStringStyle} import org.slf4j.{Logger, LoggerFactory} import wdl4s.parser.MemoryUnit - /** * Responsible for the creation of the job definition. * @@ -62,18 +69,16 @@ sealed trait AwsBatchJobDefinition { def containerProperties: ContainerProperties def name: String - override def toString: String = { + override def toString: String = new ToStringBuilder(this, ToStringStyle.JSON_STYLE) .append("name", name) .append("containerProperties", containerProperties) .build - } } trait AwsBatchJobDefinitionBuilder { val Log: Logger = LoggerFactory.getLogger(StandardAwsBatchJobDefinitionBuilder.getClass) - /** Gets a builder, seeded with appropriate portions of the container properties * * @param dockerImage docker image with which to run @@ -83,71 +88,77 @@ trait AwsBatchJobDefinitionBuilder { def builder(dockerImage: String): ContainerProperties.Builder = ContainerProperties.builder().image(dockerImage) - def buildResources(builder: ContainerProperties.Builder, - context: AwsBatchJobDefinitionContext): (ContainerProperties.Builder, String) = { + context: AwsBatchJobDefinitionContext + ): (ContainerProperties.Builder, String) = { // The initial buffer should only contain one item - the hostpath of the // local disk mount point, which will be needed by the docker container // that copies data around val environment = List.empty[KeyValuePair] - - def buildVolumes(disks: Seq[AwsBatchVolume]): List[Volume] = { - - //all the configured disks plus the fetch and run volume and the aws-cli volume + def buildVolumes(disks: Seq[AwsBatchVolume]): List[Volume] = + // all the configured disks plus the fetch and run volume and the aws-cli volume disks.map(d => d.toVolume()).toList ++ List( - Volume.builder() - .name("fetchAndRunScript") - .host(Host.builder().sourcePath("/usr/local/bin/fetch_and_run.sh").build()) - .build(), - //the aws-cli location on the EC2 - Volume.builder() + Volume + .builder() + .name("fetchAndRunScript") + .host(Host.builder().sourcePath("/usr/local/bin/fetch_and_run.sh").build()) + .build(), + // the aws-cli location on the EC2 + Volume + .builder() .name("awsCliHome") .host(Host.builder().sourcePath("/usr/local/aws-cli").build()) .build() ) - } - - def buildMountPoints(disks: Seq[AwsBatchVolume]): List[MountPoint] = { - //all the configured disks plus the fetch and run mount point and the AWS cli mount point + def buildMountPoints(disks: Seq[AwsBatchVolume]): List[MountPoint] = + // all the configured disks plus the fetch and run mount point and the AWS cli mount point disks.map(_.toMountPoint).toList ++ List( - MountPoint.builder() + MountPoint + .builder() .readOnly(true) .sourceVolume("fetchAndRunScript") .containerPath("/var/scratch/fetch_and_run.sh") .build(), - - MountPoint.builder() + MountPoint + .builder() .readOnly(true) .sourceVolume("awsCliHome") - //where the aws-cli will be on the container + // where the aws-cli will be on the container .containerPath("/usr/local/aws-cli") .build() ) - } - def buildName(imageName: String, packedCommand: String, volumes: List[Volume], mountPoints: List[MountPoint], env: Seq[KeyValuePair]): String = { - val str = s"$imageName:$packedCommand:${volumes.map(_.toString).mkString(",")}:${mountPoints.map(_.toString).mkString(",")}:${env.map(_.toString).mkString(",")}" + def buildName(imageName: String, + packedCommand: String, + volumes: List[Volume], + mountPoints: List[MountPoint], + env: Seq[KeyValuePair] + ): String = { + val str = s"$imageName:$packedCommand:${volumes.map(_.toString).mkString(",")}:${mountPoints + .map(_.toString) + .mkString(",")}:${env.map(_.toString).mkString(",")}" - val sha1 = MessageDigest.getInstance("SHA-1") - .digest( str.getBytes("UTF-8") ) - .map("%02x".format(_)).mkString + val sha1 = MessageDigest + .getInstance("SHA-1") + .digest(str.getBytes("UTF-8")) + .map("%02x".format(_)) + .mkString - val prefix = s"cromwell_$imageName".slice(0,88) // will be joined to a 40 character SHA1 for total length of 128 + val prefix = s"cromwell_$imageName".slice(0, 88) // will be joined to a 40 character SHA1 for total length of 128 sanitize(prefix + sha1) } - val cmdName = context.runtimeAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => "/var/scratch/fetch_and_run.sh" - case _ => context.commandText + case AWSBatchStorageSystems.s3 => "/var/scratch/fetch_and_run.sh" + case _ => context.commandText } val packedCommand = packCommand("/bin/bash", "-c", cmdName) - val volumes = buildVolumes( context.runtimeAttributes.disks ) - val mountPoints = buildMountPoints( context.runtimeAttributes.disks) + val volumes = buildVolumes(context.runtimeAttributes.disks) + val mountPoints = buildMountPoints(context.runtimeAttributes.disks) val jobDefinitionName = buildName( context.runtimeAttributes.dockerImage, packedCommand.mkString(","), @@ -158,21 +169,23 @@ trait AwsBatchJobDefinitionBuilder { (builder .command(packedCommand.asJava) - .resourceRequirements( - ResourceRequirement.builder() - .`type`(ResourceType.MEMORY) - .value(context.runtimeAttributes.memory.to(MemoryUnit.MB).amount.toInt.toString) - .build(), - ResourceRequirement.builder() - .`type`(ResourceType.VCPU) - .value(context.runtimeAttributes.cpu.value.toString) - .build(), - ) - .volumes( volumes.asJava) - .mountPoints( mountPoints.asJava) - .environment(environment.asJava), - - jobDefinitionName) + .resourceRequirements( + ResourceRequirement + .builder() + .`type`(ResourceType.MEMORY) + .value(context.runtimeAttributes.memory.to(MemoryUnit.MB).amount.toInt.toString) + .build(), + ResourceRequirement + .builder() + .`type`(ResourceType.VCPU) + .value(context.runtimeAttributes.cpu.value.toString) + .build() + ) + .volumes(volumes.asJava) + .mountPoints(mountPoints.asJava) + .environment(environment.asJava), + jobDefinitionName + ) } private def packCommand(shell: String, options: String, mainCommand: String): Seq[String] = { @@ -180,10 +193,9 @@ trait AwsBatchJobDefinitionBuilder { val lim = 1024 val packedCommand = mainCommand.length() match { case len if len <= lim => mainCommand - case len if len > lim => { + case len if len > lim => rc += "gzipdata" // This is hard coded in our agent and must be the first item gzip(mainCommand) - } } rc += shell rc += options @@ -196,7 +208,7 @@ trait AwsBatchJobDefinitionBuilder { object StandardAwsBatchJobDefinitionBuilder extends AwsBatchJobDefinitionBuilder { def build(context: AwsBatchJobDefinitionContext): AwsBatchJobDefinition = { - //instantiate a builder with the name of the docker image + // instantiate a builder with the name of the docker image val builderInst = builder(context.runtimeAttributes.dockerImage) val (b, name) = buildResources(builderInst, context) @@ -204,32 +216,32 @@ object StandardAwsBatchJobDefinitionBuilder extends AwsBatchJobDefinitionBuilder } } -case class StandardAwsBatchJobDefinitionBuilder private(containerProperties: ContainerProperties, name: String) extends AwsBatchJobDefinition +case class StandardAwsBatchJobDefinitionBuilder private (containerProperties: ContainerProperties, name: String) + extends AwsBatchJobDefinition object AwsBatchJobDefinitionContext -case class AwsBatchJobDefinitionContext( - runtimeAttributes: AwsBatchRuntimeAttributes, - commandText: String, - dockerRcPath: String, - dockerStdoutPath: String, - dockerStderrPath: String, - jobDescriptor: BackendJobDescriptor, - jobPaths: JobPaths, - inputs: Set[AwsBatchInput], - outputs: Set[AwsBatchFileOutput]){ - - override def toString: String = { +case class AwsBatchJobDefinitionContext(runtimeAttributes: AwsBatchRuntimeAttributes, + commandText: String, + dockerRcPath: String, + dockerStdoutPath: String, + dockerStderrPath: String, + jobDescriptor: BackendJobDescriptor, + jobPaths: JobPaths, + inputs: Set[AwsBatchInput], + outputs: Set[AwsBatchFileOutput] +) { + + override def toString: String = new ToStringBuilder(this, ToStringStyle.JSON_STYLE) .append("runtimeAttributes", runtimeAttributes) .append("commandText", commandText) .append("dockerRcPath", dockerRcPath) - .append("dockerStderrPath",dockerStderrPath) + .append("dockerStderrPath", dockerStderrPath) .append("dockerStdoutPath", dockerStdoutPath) .append("jobDescriptor", jobDescriptor) .append("jobPaths", jobPaths) .append("inputs", inputs) .append("outputs", outputs) .build - } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobPaths.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobPaths.scala index e8d93570dbc..5725bea6001 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobPaths.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobPaths.scala @@ -45,7 +45,10 @@ object AwsBatchJobPaths { * @param workflowPaths the paths for the job * @param jobKey the job */ -final case class AwsBatchJobPaths(override val workflowPaths: AwsBatchWorkflowPaths, jobKey: BackendJobDescriptorKey, override val isCallCacheCopyAttempt: Boolean = false) extends JobPaths { +final case class AwsBatchJobPaths(override val workflowPaths: AwsBatchWorkflowPaths, + jobKey: BackendJobDescriptorKey, + override val isCallCacheCopyAttempt: Boolean = false +) extends JobPaths { def logBasename = { val index = jobKey.index.map(s => s"-$s").getOrElse("") diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchMetadataKeys.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchMetadataKeys.scala index 1a884fd886b..cf7ec9bea94 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchMetadataKeys.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchMetadataKeys.scala @@ -31,7 +31,6 @@ package cromwell.backend.impl.aws - object AwsBatchMetadataKeys { val ExecutionBucket = "batch:executionBucket" // TODO: This will be needed later diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchParameters.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchParameters.scala index ff9a75156f7..61a61e60a5f 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchParameters.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchParameters.scala @@ -42,12 +42,13 @@ import software.amazon.awssdk.services.batch.model.KeyValuePair sealed trait AwsBatchParameter { def name: String def toKeyValuePair: KeyValuePair - def toStringString: (String,String) + def toStringString: (String, String) } sealed trait AwsBatchInput extends AwsBatchParameter -final case class AwsBatchFileInput(name: String, s3key: String, local: Path, mount: AwsBatchVolume) extends AwsBatchInput { +final case class AwsBatchFileInput(name: String, s3key: String, local: Path, mount: AwsBatchVolume) + extends AwsBatchInput { def toKeyValuePair = KeyValuePair.builder.name(name).value(s3key).build def toStringString = (name, s3key) def containerPath: Path = mount.mountPoint.resolve(local) @@ -58,7 +59,8 @@ final case class AwsBatchLiteralInput(name: String, value: String) extends AwsBa def toStringString = (name, value) } -final case class AwsBatchFileOutput(name: String, s3key: String, local: Path, mount: AwsBatchVolume) extends AwsBatchParameter { +final case class AwsBatchFileOutput(name: String, s3key: String, local: Path, mount: AwsBatchVolume) + extends AwsBatchParameter { def toKeyValuePair = KeyValuePair.builder.name(name).value(s3key).build def toStringString = (name, s3key) } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala index ca0eeb9b10c..87f04cc44dc 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala @@ -71,7 +71,8 @@ case class AwsBatchRuntimeAttributes(cpu: Int Refined Positive, continueOnReturnCode: ContinueOnReturnCode, noAddress: Boolean, scriptS3BucketName: String, - fileSystem:String= "s3") + fileSystem: String = "s3" +) object AwsBatchRuntimeAttributes { @@ -92,96 +93,125 @@ object AwsBatchRuntimeAttributes { private val MemoryDefaultValue = "2 GB" - private def cpuValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = CpuValidation.instance - .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) + private def cpuValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.instance + .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) - private def cpuMinValidation(runtimeConfig: Option[Config]):RuntimeAttributesValidation[Int Refined Positive] = CpuValidation.instanceMin - .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) + private def cpuMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.instanceMin + .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) private def failOnStderrValidation(runtimeConfig: Option[Config]) = FailOnStderrValidation.default(runtimeConfig) - private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = ContinueOnReturnCodeValidation.default(runtimeConfig) + private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = + ContinueOnReturnCodeValidation.default(runtimeConfig) - private def disksValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Seq[AwsBatchVolume]] = DisksValidation - .withDefault(DisksValidation.configDefaultWomValue(runtimeConfig) getOrElse DisksDefaultValue) + private def disksValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Seq[AwsBatchVolume]] = + DisksValidation + .withDefault(DisksValidation.configDefaultWomValue(runtimeConfig) getOrElse DisksDefaultValue) - private def zonesValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Vector[String]] = ZonesValidation - .withDefault(ZonesValidation.configDefaultWomValue(runtimeConfig) getOrElse ZonesDefaultValue) + private def zonesValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Vector[String]] = + ZonesValidation + .withDefault(ZonesValidation.configDefaultWomValue(runtimeConfig) getOrElse ZonesDefaultValue) - private def memoryValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = { + private def memoryValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = MemoryValidation.withDefaultMemory( RuntimeAttributesKeys.MemoryKey, - MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, runtimeConfig) getOrElse MemoryDefaultValue) - } + MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, runtimeConfig) getOrElse MemoryDefaultValue + ) - private def memoryMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = { - MemoryValidation.withDefaultMemory( - RuntimeAttributesKeys.MemoryMinKey, - MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryMinKey, runtimeConfig) getOrElse MemoryDefaultValue) - } + private def memoryMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = + MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryMinKey, + MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryMinKey, + runtimeConfig + ) getOrElse MemoryDefaultValue + ) - private def noAddressValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = noAddressValidationInstance - .withDefault(noAddressValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse NoAddressDefaultValue) + private def noAddressValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = + noAddressValidationInstance + .withDefault(noAddressValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse NoAddressDefaultValue) - private def scriptS3BucketNameValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[String] = { - ScriptS3BucketNameValidation(scriptS3BucketKey).withDefault(ScriptS3BucketNameValidation(scriptS3BucketKey) - .configDefaultWomValue(runtimeConfig).getOrElse( throw new RuntimeException( "scriptBucketName is required" ))) - } + private def scriptS3BucketNameValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[String] = + ScriptS3BucketNameValidation(scriptS3BucketKey).withDefault( + ScriptS3BucketNameValidation(scriptS3BucketKey) + .configDefaultWomValue(runtimeConfig) + .getOrElse(throw new RuntimeException("scriptBucketName is required")) + ) private val dockerValidation: RuntimeAttributesValidation[String] = DockerValidation.instance private def queueArnValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[String] = - QueueArnValidation.withDefault(QueueArnValidation.configDefaultWomValue(runtimeConfig) getOrElse - (throw new RuntimeException("queueArn is required"))) + QueueArnValidation.withDefault( + QueueArnValidation.configDefaultWomValue(runtimeConfig) getOrElse + (throw new RuntimeException("queueArn is required")) + ) def runtimeAttributesBuilder(configuration: AwsBatchConfiguration): StandardValidatedRuntimeAttributesBuilder = { val runtimeConfig = configuration.runtimeConfig - def validationsS3backend = StandardValidatedRuntimeAttributesBuilder.default(runtimeConfig).withValidation( - cpuValidation(runtimeConfig), - cpuMinValidation(runtimeConfig), - disksValidation(runtimeConfig), - zonesValidation(runtimeConfig), - memoryValidation(runtimeConfig), - memoryMinValidation(runtimeConfig), - noAddressValidation(runtimeConfig), - dockerValidation, - queueArnValidation(runtimeConfig), - scriptS3BucketNameValidation(runtimeConfig) - ) - def validationsLocalBackend = StandardValidatedRuntimeAttributesBuilder.default(runtimeConfig).withValidation( - cpuValidation(runtimeConfig), - cpuMinValidation(runtimeConfig), - disksValidation(runtimeConfig), - zonesValidation(runtimeConfig), - memoryValidation(runtimeConfig), - memoryMinValidation(runtimeConfig), - noAddressValidation(runtimeConfig), - dockerValidation, - queueArnValidation(runtimeConfig) - ) - - configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => validationsS3backend - - case _ => validationsLocalBackend + def validationsS3backend = StandardValidatedRuntimeAttributesBuilder + .default(runtimeConfig) + .withValidation( + cpuValidation(runtimeConfig), + cpuMinValidation(runtimeConfig), + disksValidation(runtimeConfig), + zonesValidation(runtimeConfig), + memoryValidation(runtimeConfig), + memoryMinValidation(runtimeConfig), + noAddressValidation(runtimeConfig), + dockerValidation, + queueArnValidation(runtimeConfig), + scriptS3BucketNameValidation(runtimeConfig) + ) + def validationsLocalBackend = StandardValidatedRuntimeAttributesBuilder + .default(runtimeConfig) + .withValidation( + cpuValidation(runtimeConfig), + cpuMinValidation(runtimeConfig), + disksValidation(runtimeConfig), + zonesValidation(runtimeConfig), + memoryValidation(runtimeConfig), + memoryMinValidation(runtimeConfig), + noAddressValidation(runtimeConfig), + dockerValidation, + queueArnValidation(runtimeConfig) + ) + + configuration.fileSystem match { + case AWSBatchStorageSystems.s3 => validationsS3backend + + case _ => validationsLocalBackend } } - def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, runtimeAttrsConfig: Option[Config], fileSystem:String): AwsBatchRuntimeAttributes = { - val cpu: Int Refined Positive = RuntimeAttributesValidation.extract(cpuValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, + runtimeAttrsConfig: Option[Config], + fileSystem: String + ): AwsBatchRuntimeAttributes = { + val cpu: Int Refined Positive = + RuntimeAttributesValidation.extract(cpuValidation(runtimeAttrsConfig), validatedRuntimeAttributes) val zones: Vector[String] = RuntimeAttributesValidation.extract(ZonesValidation, validatedRuntimeAttributes) - val memory: MemorySize = RuntimeAttributesValidation.extract(memoryValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val disks: Seq[AwsBatchVolume] = RuntimeAttributesValidation.extract(disksValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val memory: MemorySize = + RuntimeAttributesValidation.extract(memoryValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val disks: Seq[AwsBatchVolume] = + RuntimeAttributesValidation.extract(disksValidation(runtimeAttrsConfig), validatedRuntimeAttributes) val docker: String = RuntimeAttributesValidation.extract(dockerValidation, validatedRuntimeAttributes) - val queueArn: String = RuntimeAttributesValidation.extract(queueArnValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val failOnStderr: Boolean = RuntimeAttributesValidation.extract(failOnStderrValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val noAddress: Boolean = RuntimeAttributesValidation.extract(noAddressValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val scriptS3BucketName = fileSystem match { - case AWSBatchStorageSystems.s3 => RuntimeAttributesValidation.extract(scriptS3BucketNameValidation(runtimeAttrsConfig) , validatedRuntimeAttributes) - case _ => "" - } - + val queueArn: String = + RuntimeAttributesValidation.extract(queueArnValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val failOnStderr: Boolean = + RuntimeAttributesValidation.extract(failOnStderrValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract( + continueOnReturnCodeValidation(runtimeAttrsConfig), + validatedRuntimeAttributes + ) + val noAddress: Boolean = + RuntimeAttributesValidation.extract(noAddressValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val scriptS3BucketName = fileSystem match { + case AWSBatchStorageSystems.s3 => + RuntimeAttributesValidation.extract(scriptS3BucketNameValidation(runtimeAttrsConfig), + validatedRuntimeAttributes + ) + case _ => "" + } new AwsBatchRuntimeAttributes( cpu, @@ -203,23 +233,21 @@ object ScriptS3BucketNameValidation { def apply(key: String): ScriptS3BucketNameValidation = new ScriptS3BucketNameValidation(key) } -class ScriptS3BucketNameValidation( key: String ) extends StringRuntimeAttributesValidation(key) { +class ScriptS3BucketNameValidation(key: String) extends StringRuntimeAttributesValidation(key) { - //a reasonable but not perfect regex for a bucket. see https://stackoverflow.com/a/50484916/3573553 - protected val s3BucketNameRegex: Regex = "(?=^.{3,63}$)(?!^(\\d+\\.)+\\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])$)" - .r + // a reasonable but not perfect regex for a bucket. see https://stackoverflow.com/a/50484916/3573553 + protected val s3BucketNameRegex: Regex = + "(?=^.{3,63}$)(?!^(\\d+\\.)+\\d+$)(^(([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\-]*[a-z0-9])$)".r - - override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { - case WomString(s) => validateBucketName(s) + override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { case WomString(s) => + validateBucketName(s) } - private def validateBucketName(possibleBucketName: String): ErrorOr[String] = { + private def validateBucketName(possibleBucketName: String): ErrorOr[String] = possibleBucketName match { - case s3BucketNameRegex(_@_*) => possibleBucketName.validNel + case s3BucketNameRegex(_ @_*) => possibleBucketName.validNel case _ => "The Script Bucket name has an invalid s3 bucket format".invalidNel } - } } object QueueArnValidation extends ArnValidation(AwsBatchRuntimeAttributes.QueueArnKey) { @@ -227,7 +255,7 @@ object QueueArnValidation extends ArnValidation(AwsBatchRuntimeAttributes.QueueA // https://docs.aws.amazon.com/en_us/general/latest/gr/aws-arns-and-namespaces.html#arn-syntax-batch // arn:aws:batch:region:account-id:job-queue/queue-name override protected val arnRegex: Regex = - s""" + s""" (?x) # Turn on comments and whitespace insensitivity (arn) # Every AWS ARN starts with "arn" : @@ -259,22 +287,21 @@ object ArnValidation { } class ArnValidation(override val key: String) extends StringRuntimeAttributesValidation(key) { - override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { - case WomString(s) => validateArn(s) + override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { case WomString(s) => + validateArn(s) } - private def validateArn(possibleArn: String): ErrorOr[String] = { + private def validateArn(possibleArn: String): ErrorOr[String] = possibleArn match { - case arnRegex(_@_*) => possibleArn.validNel + case arnRegex(_ @_*) => possibleArn.validNel case _ => "ARN has invalid format".invalidNel } - } // Possible ARN formats can be found here // https://docs.aws.amazon.com/en_us/general/latest/gr/aws-arns-and-namespaces.html // This is quite vague regex, but it allows to support a lot of ARN formats protected val arnRegex: Regex = - s""" + s""" (?x) # Turn on comments and whitespace insensitivity (arn) # Every ARN starts with "arn" : @@ -347,27 +374,25 @@ object DisksValidation extends RuntimeAttributesValidation[Seq[AwsBatchVolume]] defaulted } - private def validateLocalDisk(disk: String): ErrorOr[AwsBatchVolume] = { + private def validateLocalDisk(disk: String): ErrorOr[AwsBatchVolume] = AwsBatchVolume.parse(disk) match { case scala.util.Success(attachedDisk) => attachedDisk.validNel case scala.util.Failure(ex) => ex.getMessage.invalidNel } - } private def sequenceNels(nels: Seq[ErrorOr[AwsBatchVolume]]): ErrorOr[Seq[AwsBatchVolume]] = { val emptyDiskNel: ErrorOr[Vector[AwsBatchVolume]] = Vector.empty[AwsBatchVolume].validNel - val disksNel: ErrorOr[Vector[AwsBatchVolume]] = nels.foldLeft(emptyDiskNel) { - (acc, v) => (acc, v) mapN { (a, v) => a :+ v } + val disksNel: ErrorOr[Vector[AwsBatchVolume]] = nels.foldLeft(emptyDiskNel) { (acc, v) => + (acc, v) mapN { (a, v) => a :+ v } } disksNel } - private def addDefault(disksNel: ErrorOr[Seq[AwsBatchVolume]]): ErrorOr[Seq[AwsBatchVolume]] = { + private def addDefault(disksNel: ErrorOr[Seq[AwsBatchVolume]]): ErrorOr[Seq[AwsBatchVolume]] = disksNel map { case disks if disks.exists(_.name == AwsBatchWorkingDisk.Name) || disks.exists(_.fsType == "efs") => disks case disks => disks :+ AwsBatchWorkingDisk.Default } - } override protected def missingValueMessage: String = s"Expecting $key runtime attribute to be a comma separated String or Array[String]" diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchSingletonActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchSingletonActor.scala index ed32d0bd3d8..185a8fcac90 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchSingletonActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchSingletonActor.scala @@ -7,9 +7,17 @@ import cromwell.cloudsupport.aws.auth.AwsAuthMode import cromwell.core.Mailbox import software.amazon.awssdk.regions.Region -class AwsBatchSingletonActor(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) extends Actor with ActorLogging { - val awsOccasionalStatusPoller: ActorRef = context.actorOf(OccasionalStatusPollingActor.props(configRegion, optAwsAuthMode).withMailbox(Mailbox.PriorityMailbox), "OccasionalStatusPollingActor") - val awsIntervalLimitedSubmitActor: ActorRef = context.actorOf(IntervalLimitedAwsJobSubmitActor.props(configRegion).withMailbox(Mailbox.PriorityMailbox), "IntervalLimitedAWSSubmitActor") +class AwsBatchSingletonActor(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) + extends Actor + with ActorLogging { + val awsOccasionalStatusPoller: ActorRef = context.actorOf( + OccasionalStatusPollingActor.props(configRegion, optAwsAuthMode).withMailbox(Mailbox.PriorityMailbox), + "OccasionalStatusPollingActor" + ) + val awsIntervalLimitedSubmitActor: ActorRef = context.actorOf( + IntervalLimitedAwsJobSubmitActor.props(configRegion).withMailbox(Mailbox.PriorityMailbox), + "IntervalLimitedAWSSubmitActor" + ) override def receive = { @@ -23,5 +31,7 @@ class AwsBatchSingletonActor(configRegion: Option[Region], optAwsAuthMode: Optio } object AwsBatchSingletonActor { - def props(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) = Props(new AwsBatchSingletonActor(configRegion, optAwsAuthMode)) + def props(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) = Props( + new AwsBatchSingletonActor(configRegion, optAwsAuthMode) + ) } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPaths.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPaths.scala index 45d3eafe75d..1212aa24100 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPaths.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPaths.scala @@ -53,25 +53,28 @@ object AwsBatchWorkflowPaths { */ case class AwsBatchWorkflowPaths(workflowDescriptor: BackendWorkflowDescriptor, provider: AwsCredentialsProvider, - configuration: AwsBatchConfiguration)(implicit actorSystem: ActorSystem) extends WorkflowPaths { + configuration: AwsBatchConfiguration +)(implicit actorSystem: ActorSystem) + extends WorkflowPaths { - override lazy val executionRootString: String = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => workflowDescriptor.workflowOptions.getOrElse(AwsBatchWorkflowPaths.RootOptionKey, configuration.root) + override lazy val executionRootString: String = configuration.fileSystem match { + case AWSBatchStorageSystems.s3 => + workflowDescriptor.workflowOptions.getOrElse(AwsBatchWorkflowPaths.RootOptionKey, configuration.root) case _ => configuration.root } private val workflowOptions: WorkflowOptions = workflowDescriptor.workflowOptions - override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): AwsBatchJobPaths = { + override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): AwsBatchJobPaths = new AwsBatchJobPaths(workflowPaths.asInstanceOf[AwsBatchWorkflowPaths], jobKey) - } - override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor) + override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = + this.copy(workflowDescriptor = workflowDescriptor) override def config: Config = configuration.configurationDescriptor.backendConfig - override def pathBuilders: List[PathBuilder] = { + override def pathBuilders: List[PathBuilder] = if (configuration.fileSystem == "s3") { List(configuration.pathBuilderFactory.asInstanceOf[S3PathBuilderFactory].fromProvider(workflowOptions, provider)) } else { - WorkflowPaths.DefaultPathBuilders} - } + WorkflowPaths.DefaultPathBuilders + } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/IntervalLimitedAwsJobSubmitActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/IntervalLimitedAwsJobSubmitActor.scala index f16aa95eb12..3c4cbfe6457 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/IntervalLimitedAwsJobSubmitActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/IntervalLimitedAwsJobSubmitActor.scala @@ -20,7 +20,8 @@ import scala.util.{Failure, Success} class IntervalLimitedAwsJobSubmitActor(configRegion: Option[Region]) extends Actor with ActorLogging { implicit val ec: ExecutionContext = context.dispatcher - implicit val timer: Timer[IO] = cats.effect.IO.timer(ExecutionContext.fromExecutor(Executors.newSingleThreadScheduledExecutor)) + implicit val timer: Timer[IO] = + cats.effect.IO.timer(ExecutionContext.fromExecutor(Executors.newSingleThreadScheduledExecutor)) val WorkInterval = 100.millis @@ -33,30 +34,30 @@ class IntervalLimitedAwsJobSubmitActor(configRegion: Option[Region]) extends Act // Maps job ID to status private var workQueue: Vector[SubmitAwsJobRequest] = Vector.empty - override def receive = { case sfm: SubmitAwsJobRequest => workQueue :+= sfm - case CheckForWork => workQueue.headOption match { - case Some(SubmitAwsJobRequest(batchJob, attributes, completionPromise)) => - batchJob.submitJob[IO]().run(attributes).unsafeToFuture() onComplete { - case Success(value) => - completionPromise.success(value) - scheduleWorkCheck(WorkInterval) - case Failure(error) => - completionPromise.failure(error) - log.error(error, s"Submission of new AWS job failed") - scheduleWorkCheck(WorkInterval) - } - workQueue = workQueue.tail - case None => - scheduleWorkCheck(WorkInterval) - } + case CheckForWork => + workQueue.headOption match { + case Some(SubmitAwsJobRequest(batchJob, attributes, completionPromise)) => + batchJob.submitJob[IO]().run(attributes).unsafeToFuture() onComplete { + case Success(value) => + completionPromise.success(value) + scheduleWorkCheck(WorkInterval) + case Failure(error) => + completionPromise.failure(error) + log.error(error, s"Submission of new AWS job failed") + scheduleWorkCheck(WorkInterval) + } + workQueue = workQueue.tail + case None => + scheduleWorkCheck(WorkInterval) + } } def scheduleWorkCheck(in: FiniteDuration): Unit = { - context.system.scheduler.scheduleOnce(in) { self ! CheckForWork } + context.system.scheduler.scheduleOnce(in)(self ! CheckForWork) () } @@ -67,7 +68,10 @@ object IntervalLimitedAwsJobSubmitActor { sealed trait IntervalLimitedAwsJobSubmitActorMessage case object CheckForWork - final case class SubmitAwsJobRequest(job: AwsBatchJob, attributes: AwsBatchAttributes, completionPromise: Promise[SubmitJobResponse]) extends IntervalLimitedAwsJobSubmitActorMessage + final case class SubmitAwsJobRequest(job: AwsBatchJob, + attributes: AwsBatchAttributes, + completionPromise: Promise[SubmitJobResponse] + ) extends IntervalLimitedAwsJobSubmitActorMessage def props(configRegion: Option[Region]) = Props(new IntervalLimitedAwsJobSubmitActor(configRegion)) } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala index 152130745f5..dda0e614ef4 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala @@ -26,7 +26,9 @@ import scala.util.{Failure, Success, Try} * - The 'queuesToMonitor' get added to but never cleared out (so on a multi-tenant system, will grow indefinitely) * - We don't track completed jobs - so when a job completes the caller will get a None, and have to fall back to an AWS query anyway. */ -class OccasionalStatusPollingActor(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) extends Actor with ActorLogging { +class OccasionalStatusPollingActor(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) + extends Actor + with ActorLogging { implicit val ec: ExecutionContext = context.dispatcher @@ -61,7 +63,10 @@ class OccasionalStatusPollingActor(configRegion: Option[Region], optAwsAuthMode: statuses += jobId -> status queuesToMonitor += queueArn // Set addition so expectation is a no-op almost every time case NotifyOfStatus(_, jobId, None) => - log.error("Programmer Error: OccasionalStatusPollerActor was given an empty status update for {}. It was probably intended to be filled.", jobId) + log.error( + "Programmer Error: OccasionalStatusPollerActor was given an empty status update for {}. It was probably intended to be filled.", + jobId + ) } private def updateStatuses() = { @@ -69,14 +74,18 @@ class OccasionalStatusPollingActor(configRegion: Option[Region], optAwsAuthMode: final case class PageAccumulation(nextPageToken: String, currentList: Vector[String]) @tailrec - def findJobsInStatus(awsStatusName: String, queueName: String, pageAccumulation: Option[PageAccumulation]): Vector[String] = { + def findJobsInStatus(awsStatusName: String, + queueName: String, + pageAccumulation: Option[PageAccumulation] + ): Vector[String] = { - val requestBuilder = ListJobsRequest.builder() + val requestBuilder = ListJobsRequest + .builder() .jobStatus(awsStatusName) .jobQueue(queueName) - .maxResults(100) + .maxResults(100) - pageAccumulation.foreach { pa => requestBuilder.nextToken(pa.nextPageToken) } + pageAccumulation.foreach(pa => requestBuilder.nextToken(pa.nextPageToken)) val request = requestBuilder.build() @@ -103,19 +112,18 @@ class OccasionalStatusPollingActor(configRegion: Option[Region], optAwsAuthMode: // Remove the old values and add the new values statuses = statuses.filterNot(_._2 == mapToRunStatus) ++ jobIdsInStatus.map(_ -> mapToRunStatus) - } recover { - case e => - log.error(e, s"Failure fetching statuses for AWS jobs in $mapToRunStatus. No updates will occur.") + } recover { case e => + log.error(e, s"Failure fetching statuses for AWS jobs in $mapToRunStatus. No updates will occur.") } () } - updateForStatusNames(List("SUBMITTED", "PENDING", "RUNNABLE"), Initializing) - updateForStatusNames(List("STARTING", "RUNNING"), Running) + updateForStatusNames(List("SUBMITTED", "PENDING", "RUNNABLE"), Initializing) + updateForStatusNames(List("STARTING", "RUNNING"), Running) } def scheduleStatusUpdate(in: FiniteDuration): Unit = { - context.system.scheduler.scheduleOnce(in) { self ! UpdateStatuses } + context.system.scheduler.scheduleOnce(in)(self ! UpdateStatuses) () } @@ -129,8 +137,11 @@ object OccasionalStatusPollingActor { case object UpdateStatuses extends OccasionalStatusPollingActorMessage case class StatusUpdates(newState: Map[String, RunStatus]) extends OccasionalStatusPollingActorMessage - final case class NotifyOfStatus(queueArn: String, jobId: String, runStatus: Option[RunStatus]) extends OccasionalStatusPollingActorMessage + final case class NotifyOfStatus(queueArn: String, jobId: String, runStatus: Option[RunStatus]) + extends OccasionalStatusPollingActorMessage final case class WhatsMyStatus(queueArn: String, jobId: String) extends OccasionalStatusPollingActorMessage - def props(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) = Props(new OccasionalStatusPollingActor(configRegion, optAwsAuthMode)) + def props(configRegion: Option[Region], optAwsAuthMode: Option[AwsAuthMode] = None) = Props( + new OccasionalStatusPollingActor(configRegion, optAwsAuthMode) + ) } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/RunStatus.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/RunStatus.scala index f3218c9ca47..bdf34d38063 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/RunStatus.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/RunStatus.scala @@ -50,8 +50,11 @@ sealed trait RunStatus { } object RunStatus { - def fromJobStatus(status: JobStatus, jobId: String, errorMessage: Option[String] = None, - eventList: Seq[ExecutionEvent] = Seq.empty): Try[RunStatus] = { + def fromJobStatus(status: JobStatus, + jobId: String, + errorMessage: Option[String] = None, + eventList: Seq[ExecutionEvent] = Seq.empty + ): Try[RunStatus] = status match { case JobStatus.FAILED => Success(Failed(jobId, errorMessage, eventList)) case JobStatus.PENDING => Success(Initializing) @@ -63,7 +66,6 @@ object RunStatus { // JobStatus.UNKNOWN_TO_SDK_VERSION case _ => Failure(new RuntimeException(s"job {$jobId} has an unknown status {$status}")) } - } case object Initializing extends RunStatus case object Running extends RunStatus @@ -82,33 +84,30 @@ object RunStatus { } object UnsuccessfulRunStatus { - def apply(jobId: String, status: String, errorMessage: Option[String], eventList: Seq[ExecutionEvent]): UnsuccessfulRunStatus = { + def apply(jobId: String, + status: String, + errorMessage: Option[String], + eventList: Seq[ExecutionEvent] + ): UnsuccessfulRunStatus = if (status == "Stopped") { // TODO: Verify this Stopped(jobId, errorMessage, eventList) } else { Failed(jobId, errorMessage, eventList) } - } } - final case class Stopped(jobId: String, - errorMessage: Option[String], - eventList: Seq[ExecutionEvent], - ) extends UnsuccessfulRunStatus { + final case class Stopped(jobId: String, errorMessage: Option[String], eventList: Seq[ExecutionEvent]) + extends UnsuccessfulRunStatus { override def toString = "Stopped" } - final case class Failed(jobId: String, - errorMessage: Option[String], - eventList: Seq[ExecutionEvent], - ) extends UnsuccessfulRunStatus { + final case class Failed(jobId: String, errorMessage: Option[String], eventList: Seq[ExecutionEvent]) + extends UnsuccessfulRunStatus { override def toString = "Failed" } - final case class Cancelled(jobId: String, - errorMessage: Option[String], - eventList: Seq[ExecutionEvent], - ) extends UnsuccessfulRunStatus { + final case class Cancelled(jobId: String, errorMessage: Option[String], eventList: Seq[ExecutionEvent]) + extends UnsuccessfulRunStatus { override def toString = "Cancelled" } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendCacheHitCopyingActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendCacheHitCopyingActor.scala index 86bfd46e754..568f4d5a0a3 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendCacheHitCopyingActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendCacheHitCopyingActor.scala @@ -33,7 +33,7 @@ package cromwell.backend.impl.aws.callcaching import com.google.cloud.storage.contrib.nio.CloudStorageOptions import common.util.TryUtil import cromwell.backend.BackendInitializationData -import cromwell.backend.impl.aws.{AWSBatchStorageSystems, AwsBatchBackendInitializationData} +import cromwell.backend.impl.aws.{AwsBatchBackendInitializationData, AWSBatchStorageSystems} import cromwell.backend.io.JobPaths import cromwell.backend.standard.callcaching.{StandardCacheHitCopyingActor, StandardCacheHitCopyingActorParams} import cromwell.core.CallOutputs @@ -46,20 +46,22 @@ import wom.values.WomFile import scala.language.postfixOps import scala.util.Try -class AwsBatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams) { +class AwsBatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) + extends StandardCacheHitCopyingActor(standardParams) { private val batchAttributes = BackendInitializationData .as[AwsBatchBackendInitializationData](standardParams.backendInitializationDataOption) - .configuration.batchAttributes + .configuration + .batchAttributes override protected val commandBuilder: IoCommandBuilder = batchAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder - case _ => DefaultIoCommandBuilder + case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder + case _ => DefaultIoCommandBuilder } private val cachingStrategy = batchAttributes.duplicationStrategy override def processSimpletons(womValueSimpletons: Seq[WomValueSimpleton], - sourceCallRootPath: Path, - ): Try[(CallOutputs, Set[IoCommand[_]])] = { + sourceCallRootPath: Path + ): Try[(CallOutputs, Set[IoCommand[_]])] = (batchAttributes.fileSystem, cachingStrategy) match { case (AWSBatchStorageSystems.s3, UseOriginalCachedOutputs) => val touchCommands: Seq[Try[IoTouchCommand]] = womValueSimpletons collect { @@ -72,10 +74,10 @@ class AwsBatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyin } case (_, _) => super.processSimpletons(womValueSimpletons, sourceCallRootPath) } - } - override def processDetritus(sourceJobDetritusFiles: Map[String, String] - ): Try[(Map[String, Path], Set[IoCommand[_]])] = + override def processDetritus( + sourceJobDetritusFiles: Map[String, String] + ): Try[(Map[String, Path], Set[IoCommand[_]])] = (batchAttributes.fileSystem, cachingStrategy) match { case (AWSBatchStorageSystems.s3, UseOriginalCachedOutputs) => // apply getPath on each detritus string file @@ -92,33 +94,40 @@ class AwsBatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyin } } case (_, _) => super.processDetritus(sourceJobDetritusFiles) - } + } override protected def additionalIoCommands(sourceCallRootPath: Path, originalSimpletons: Seq[WomValueSimpleton], newOutputs: CallOutputs, - originalDetritus: Map[String, String], - newDetritus: Map[String, Path]): Try[List[Set[IoCommand[_]]]] = Try { - (batchAttributes.fileSystem, cachingStrategy) match { + originalDetritus: Map[String, String], + newDetritus: Map[String, Path] + ): Try[List[Set[IoCommand[_]]]] = Try { + (batchAttributes.fileSystem, cachingStrategy) match { case (AWSBatchStorageSystems.s3, UseOriginalCachedOutputs) => - val content = - s""" - |This directory does not contain any output files because this job matched an identical job that was previously run, thus it was a cache-hit. - |Cromwell is configured to not copy outputs during call caching. To change this, edit the filesystems.aws.caching.duplication-strategy field in your backend configuration. - |The original outputs can be found at this location: ${sourceCallRootPath.pathAsString} + val content = + s""" + |This directory does not contain any output files because this job matched an identical job that was previously run, thus it was a cache-hit. + |Cromwell is configured to not copy outputs during call caching. To change this, edit the filesystems.aws.caching.duplication-strategy field in your backend configuration. + |The original outputs can be found at this location: ${sourceCallRootPath.pathAsString} """.stripMargin // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - List(Set( - S3BatchCommandBuilder.writeCommand( - path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", - content = content, - options = Seq(CloudStorageOptions.withMimeType("text/plain")), - ).get - )) - case (AWSBatchStorageSystems.s3, CopyCachedOutputs) => List.empty - case (_, _) => - super.additionalIoCommands(sourceCallRootPath,originalSimpletons, newOutputs, originalDetritus,newDetritus).get + List( + Set( + S3BatchCommandBuilder + .writeCommand( + path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", + content = content, + options = Seq(CloudStorageOptions.withMimeType("text/plain")) + ) + .get + ) + ) + case (AWSBatchStorageSystems.s3, CopyCachedOutputs) => List.empty + case (_, _) => + super + .additionalIoCommands(sourceCallRootPath, originalSimpletons, newOutputs, originalDetritus, newDetritus) + .get } } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendFileHashingActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendFileHashingActor.scala index f731fb1b44a..0a3e781bdb3 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendFileHashingActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchBackendFileHashingActor.scala @@ -37,11 +37,15 @@ import cromwell.backend.impl.aws.AwsBatchBackendInitializationData import cromwell.backend.impl.aws.AWSBatchStorageSystems import cromwell.core.io.DefaultIoCommandBuilder -class AwsBatchBackendFileHashingActor(standardParams: StandardFileHashingActorParams) extends StandardFileHashingActor(standardParams) { +class AwsBatchBackendFileHashingActor(standardParams: StandardFileHashingActorParams) + extends StandardFileHashingActor(standardParams) { - override val ioCommandBuilder = BackendInitializationData.as[AwsBatchBackendInitializationData](standardParams.backendInitializationDataOption) - .configuration.batchAttributes.fileSystem match { - case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder - case _ => DefaultIoCommandBuilder + override val ioCommandBuilder = BackendInitializationData + .as[AwsBatchBackendInitializationData](standardParams.backendInitializationDataOption) + .configuration + .batchAttributes + .fileSystem match { + case AWSBatchStorageSystems.s3 => S3BatchCommandBuilder + case _ => DefaultIoCommandBuilder } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchCacheHitDuplicationStrategy.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchCacheHitDuplicationStrategy.scala index 24b92d5b62f..9efec4b9330 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchCacheHitDuplicationStrategy.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/callcaching/AwsBatchCacheHitDuplicationStrategy.scala @@ -33,4 +33,4 @@ package cromwell.backend.impl.aws.callcaching sealed trait AwsBatchCacheHitDuplicationStrategy case object CopyCachedOutputs extends AwsBatchCacheHitDuplicationStrategy -case object UseOriginalCachedOutputs extends AwsBatchCacheHitDuplicationStrategy \ No newline at end of file +case object UseOriginalCachedOutputs extends AwsBatchCacheHitDuplicationStrategy diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/errors/AwsBatchKnownJobFailure.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/errors/AwsBatchKnownJobFailure.scala index 654fc81be49..0ea8c7a7fd6 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/errors/AwsBatchKnownJobFailure.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/errors/AwsBatchKnownJobFailure.scala @@ -6,11 +6,11 @@ import cromwell.core.path.Path sealed trait AwsBatchKnownJobFailure extends KnownJobFailureException case class FailedToDelocalizeFailure(message: String, jobTag: String, stderrPath: Option[Path]) - extends AwsBatchKnownJobFailure { + extends AwsBatchKnownJobFailure { lazy val stderrMessage = stderrPath map { p => s"3) Look into the stderr (${p.pathAsString}) file for evidence that some of the output files the command is expected to create were not created." } getOrElse "" - + lazy val missingFilesMessage = if (message.contains("No URLs matched")) { s"""It appears that some of the expected output files for task $jobTag did not exist when the command exited. |A few things to try @@ -19,6 +19,6 @@ case class FailedToDelocalizeFailure(message: String, jobTag: String, stderrPath |$stderrMessage """.stripMargin } else "" - + override def getMessage = missingFilesMessage + message } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/io/AwsBatchVolume.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/io/AwsBatchVolume.scala index 769c2c2f5d8..9f9003e2dcd 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/io/AwsBatchVolume.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/io/AwsBatchVolume.scala @@ -42,7 +42,6 @@ import wom.values._ import scala.util.Try import scala.util.matching.Regex - /* * This will handle volumes that are defined in the configuration. It will * *not* attach new block storage, as that is handled a priori as part of the @@ -66,7 +65,7 @@ object AwsBatchVolume { case DiskPatterns.WorkingDiskPattern(_, _) => Valid(AwsBatchWorkingDisk()) case DiskPatterns.MountedDiskPattern(mountPoint, _, fsType) => - Valid(AwsBatchEmptyMountedDisk(DefaultPathBuilder.get(mountPoint),fsType)) + Valid(AwsBatchEmptyMountedDisk(DefaultPathBuilder.get(mountPoint), fsType)) case _ => s"Disk strings should be of the format 'local-disk' or '/mount/point' but got: '$s'".invalidNel } @@ -86,29 +85,24 @@ trait AwsBatchVolume { def name: String def mountPoint: Path def fsType: String - def getHostPath(id: Option[String]) : String = { + def getHostPath(id: Option[String]): String = id match { case Some(id) => mountPoint.toAbsolutePath.pathAsString + "/" + id - case None => mountPoint.toAbsolutePath.pathAsString + case None => mountPoint.toAbsolutePath.pathAsString } - } - def toVolume(id: Option[String]=None): Volume = { - Volume - .builder + def toVolume(id: Option[String] = None): Volume = + Volume.builder .name(name) .host(Host.builder.sourcePath(getHostPath(id)).build) .build - } - def toMountPoint: MountPoint = { - MountPoint - .builder + def toMountPoint: MountPoint = + MountPoint.builder .containerPath(mountPoint.toAbsolutePath.pathAsString) .sourceVolume(name) .build - } } -case class AwsBatchEmptyMountedDisk(mountPoint: Path, ftype:String="ebs") extends AwsBatchVolume { +case class AwsBatchEmptyMountedDisk(mountPoint: Path, ftype: String = "ebs") extends AwsBatchVolume { val name = s"d-${mountPoint.pathAsString.md5Sum}" val fsType = ftype.toLowerCase override def toString: String = s"$name $mountPoint" @@ -117,7 +111,7 @@ case class AwsBatchEmptyMountedDisk(mountPoint: Path, ftype:String="ebs") extend object AwsBatchWorkingDisk { val MountPoint: Path = DefaultPathBuilder.get("/cromwell_root") val Name = "local-disk" - val fsType= "ebs" + val fsType = "ebs" val Default = AwsBatchWorkingDisk() } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala index 9b171741471..18dbb9d17b3 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala @@ -14,11 +14,10 @@ package object aws { type Aws[F[_], A] = ReaderT[F, AwsBatchAttributes, A] - def sanitize(name: String): String ={ + def sanitize(name: String): String = // Up to 128 letters (uppercase and lowercase), numbers, hyphens, and underscores are allowed. // We'll replace all invalid characters with an underscore name.replaceAll("[^A-Za-z0-9_\\-]", "_").slice(0, 128) - } def buildKVPair(key: String, value: String): KeyValuePair = KeyValuePair.builder.name(key).value(value).build @@ -67,9 +66,11 @@ package object aws { * @tparam ClientT the type of the client that you will get back * @return a configured client for the AWS service */ - def configureClient[BuilderT <: AwsClientBuilder[BuilderT, ClientT], ClientT](builder: AwsClientBuilder[BuilderT, ClientT], - awsAuthMode: Option[AwsAuthMode], - configRegion: Option[Region]): ClientT = { + def configureClient[BuilderT <: AwsClientBuilder[BuilderT, ClientT], ClientT]( + builder: AwsClientBuilder[BuilderT, ClientT], + awsAuthMode: Option[AwsAuthMode], + configRegion: Option[Region] + ): ClientT = { awsAuthMode.foreach { awsAuthMode => builder.credentialsProvider(awsAuthMode.provider()) } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala index ba2e5580b3f..089ac4257cf 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala @@ -43,7 +43,13 @@ import cromwell.backend.impl.aws.AwsBatchAsyncBackendJobExecutionActor.AwsBatchP import cromwell.backend.impl.aws.RunStatus.UnsuccessfulRunStatus import cromwell.backend.impl.aws.io.AwsBatchWorkingDisk import cromwell.backend.io.JobPathsSpecHelper._ -import cromwell.backend.standard.{DefaultStandardAsyncExecutionActorParams, StandardAsyncExecutionActorParams, StandardAsyncJob, StandardExpressionFunctions, StandardExpressionFunctionsParams} +import cromwell.backend.standard.{ + DefaultStandardAsyncExecutionActorParams, + StandardAsyncExecutionActorParams, + StandardAsyncJob, + StandardExpressionFunctions, + StandardExpressionFunctionsParams +} import cromwell.cloudsupport.aws.s3.S3Storage import cromwell.core.Tags.AwsTest import cromwell.core._ @@ -80,8 +86,14 @@ import scala.concurrent.{Await, ExecutionContext, Future, Promise} import scala.language.postfixOps import scala.util.Success -class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite - with AnyFlatSpecLike with Matchers with ImplicitSender with BackendSpec with BeforeAndAfter with DefaultJsonProtocol { +class AwsBatchAsyncBackendJobExecutionActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with BackendSpec + with BeforeAndAfter + with DefaultJsonProtocol { lazy val mockPathBuilderS3: S3PathBuilder = S3PathBuilder.fromProvider( AnonymousCredentialsProvider.create, S3Storage.DefaultConfiguration, @@ -98,23 +110,23 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val YoSup: String = s""" - |task sup { - | String addressee - | command { - | echo "yo sup $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | runtime { - | docker: "alpine:latest" - | queueArn: "arn:aws:batch:us-east-1:111222333444:job-queue/job-queue" - | } - |} - | - |workflow wf_sup { - | call sup - |} + |task sup { + | String addressee + | command { + | echo "yo sup $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | runtime { + | docker: "alpine:latest" + | queueArn: "arn:aws:batch:us-east-1:111222333444:job-queue/job-queue" + | } + |} + | + |workflow wf_sup { + | call sup + |} """.stripMargin private val Inputs: Map[FullyQualifiedName, WomValue] = Map("wf_sup.sup.addressee" -> WomString("dog")) @@ -128,11 +140,11 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite lazy val TestableStandardExpressionFunctionsParamsS3: StandardExpressionFunctionsParams = new StandardExpressionFunctionsParams { - override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilderS3) - override lazy val callContext: CallContext = TestableCallContextS3 - override val ioActorProxy: ActorRef = simpleIoActor - override val executionContext: ExecutionContext = system.dispatcher - } + override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilderS3) + override lazy val callContext: CallContext = TestableCallContextS3 + override val ioActorProxy: ActorRef = simpleIoActor + override val executionContext: ExecutionContext = system.dispatcher + } lazy val TestableStandardExpressionFunctionsParamsLocal: StandardExpressionFunctionsParams = new StandardExpressionFunctionsParams { override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilderLocal) @@ -140,12 +152,10 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite override val ioActorProxy: ActorRef = simpleIoActor override val executionContext: ExecutionContext = system.dispatcher } - lazy val TestableAwsBatchExpressionFunctions: AwsBatchExpressionFunctions = { + lazy val TestableAwsBatchExpressionFunctions: AwsBatchExpressionFunctions = new AwsBatchExpressionFunctions(TestableStandardExpressionFunctionsParamsS3) - } - lazy val TestableAwsBatchExpressionFunctionsLocal: AwsBatchExpressionFunctions = { + lazy val TestableAwsBatchExpressionFunctionsLocal: AwsBatchExpressionFunctions = new AwsBatchExpressionFunctions(TestableStandardExpressionFunctionsParamsLocal) - } private def buildInitializationData(jobDescriptor: BackendJobDescriptor, configuration: AwsBatchConfiguration) = { val workflowPaths = AwsBatchWorkflowPaths( jobDescriptor.workflowDescriptor, @@ -156,17 +166,18 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite AwsBatchBackendInitializationData(workflowPaths, runtimeAttributesBuilder, configuration, null) } - class TestableAwsBatchJobExecutionActor(params: StandardAsyncExecutionActorParams, functions: StandardExpressionFunctions) - extends AwsBatchAsyncBackendJobExecutionActor(params) { + class TestableAwsBatchJobExecutionActor(params: StandardAsyncExecutionActorParams, + functions: StandardExpressionFunctions + ) extends AwsBatchAsyncBackendJobExecutionActor(params) { def this(jobDescriptor: BackendJobDescriptor, promise: Promise[BackendJobExecutionResponse], configuration: AwsBatchConfiguration, - //functions: AwsBatchExpressionFunctions = TestableAwsBatchExpressionFunctions, + // functions: AwsBatchExpressionFunctions = TestableAwsBatchExpressionFunctions, functions: StandardExpressionFunctions = TestableAwsBatchExpressionFunctions, singletonActor: ActorRef = emptyActor, - ioActor: ActorRef = mockIoActor) = { - + ioActor: ActorRef = mockIoActor + ) = this( DefaultStandardAsyncExecutionActorParams( jobIdKey = AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey, @@ -181,7 +192,6 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite ), functions ) - } override lazy val jobLogger: JobLogger = new JobLogger( "TestLogger", @@ -213,7 +223,9 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite private def buildJobDescriptor(): BackendJobDescriptor = { val attempt = 1 val wdlNamespace = WdlNamespaceWithWorkflow.load(YoSup, Seq.empty[Draft2ImportResolver]).get - val womDefinition = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val womDefinition = wdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(Inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => @@ -238,7 +250,14 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val key = BackendJobDescriptorKey(job, None, attempt) val runtimeAttributes = makeRuntimeAttributes(job) val prefetchedKvEntries = Map[String, KvResponse]() - BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(Inputs), NoDocker, None, prefetchedKvEntries) + BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(Inputs), + NoDocker, + None, + prefetchedKvEntries + ) case Left(badtimes) => fail(badtimes.toList.mkString(", ")) } } @@ -282,15 +301,19 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite // Await.result(promise.future, Timeout) // } - def buildTestActorRef(functions:StandardExpressionFunctions): TestActorRef[TestableAwsBatchJobExecutionActor] = { + def buildTestActorRef(functions: StandardExpressionFunctions): TestActorRef[TestableAwsBatchJobExecutionActor] = { // For this test we say that all previous attempts were preempted: val jobDescriptor = buildJobDescriptor() - val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), - configuration, - //TestableAwsBatchExpressionFunctions, - functions, - emptyActor, - failIoActor)) + val props = Props( + new TestableAwsBatchJobExecutionActor(jobDescriptor, + Promise(), + configuration, + // TestableAwsBatchExpressionFunctions, + functions, + emptyActor, + failIoActor + ) + ) TestActorRef(props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") } @@ -427,7 +450,7 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite checkFailedResult(None).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true // TODO: Determine the actual error message that comes back and special-case it in handlExecutionFailure // in AwsBatchBackendJobExecutionActor - //checkFailedResult(Option("Operation canceled at")) shouldBe AbortedExecutionHandle + // checkFailedResult(Option("Operation canceled at")) shouldBe AbortedExecutionHandle actorRef.stop() } @@ -446,12 +469,15 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite fileKey -> fileVal ) - val wdlNamespace = WdlNamespaceWithWorkflow.load(YoSup, - Seq.empty[Draft2ImportResolver]).get - val womWorkflow = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val wdlNamespace = WdlNamespaceWithWorkflow.load(YoSup, Seq.empty[Draft2ImportResolver]).get + val womWorkflow = wdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), @@ -464,19 +490,34 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite None ) - val call: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t }).get + val call: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => + t + }.get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) - val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration, TestableAwsBatchExpressionFunctions)) + val props = Props( + new TestableAwsBatchJobExecutionActor(jobDescriptor, + Promise(), + configuration, + TestableAwsBatchExpressionFunctions + ) + ) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") - + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) - def pathToLocal(womValue: WomValue): WomValue = { + def pathToLocal(womValue: WomValue): WomValue = WomFileMapper.mapWomFiles(testActorRef.underlyingActor.mapCommandLineWomFile)(womValue).get - } val mappedInputs = jobDescriptor.localInputs safeMapValues pathToLocal @@ -498,37 +539,59 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite } } - private val dockerAndDiskWdlNamespace = WdlNamespaceWithWorkflow.load(SampleWdl.CurrentDirectory.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get + private val dockerAndDiskWdlNamespace = WdlNamespaceWithWorkflow + .load(SampleWdl.CurrentDirectory.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get it should "generate correct AwsBatchFileInputs from a WdlMap" taggedAs AwsTest ignore { val inputs: Map[String, WomValue] = Map( - "stringToFileMap" -> WomMap(WomMapType(WomStringType, WomSingleFileType), Map( - WomString("stringTofile1") -> WomSingleFile("s3://path/to/stringTofile1"), - WomString("stringTofile2") -> WomSingleFile("s3://path/to/stringTofile2") - )), - "fileToStringMap" -> WomMap(WomMapType(WomSingleFileType, WomStringType), Map( - WomSingleFile("s3://path/to/fileToString1") -> WomString("fileToString1"), - WomSingleFile("s3://path/to/fileToString2") -> WomString("fileToString2") - )), - "fileToFileMap" -> WomMap(WomMapType(WomSingleFileType, WomSingleFileType), Map( - WomSingleFile("s3://path/to/fileToFile1Key") -> WomSingleFile("s3://path/to/fileToFile1Value"), - WomSingleFile("s3://path/to/fileToFile2Key") -> WomSingleFile("s3://path/to/fileToFile2Value") - )), - "stringToString" -> WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("stringToString1") -> WomString("path/to/stringToString1"), - WomString("stringToString2") -> WomString("path/to/stringToString2") - )) + "stringToFileMap" -> WomMap( + WomMapType(WomStringType, WomSingleFileType), + Map( + WomString("stringTofile1") -> WomSingleFile("s3://path/to/stringTofile1"), + WomString("stringTofile2") -> WomSingleFile("s3://path/to/stringTofile2") + ) + ), + "fileToStringMap" -> WomMap( + WomMapType(WomSingleFileType, WomStringType), + Map( + WomSingleFile("s3://path/to/fileToString1") -> WomString("fileToString1"), + WomSingleFile("s3://path/to/fileToString2") -> WomString("fileToString2") + ) + ), + "fileToFileMap" -> WomMap( + WomMapType(WomSingleFileType, WomSingleFileType), + Map( + WomSingleFile("s3://path/to/fileToFile1Key") -> WomSingleFile("s3://path/to/fileToFile1Value"), + WomSingleFile("s3://path/to/fileToFile2Key") -> WomSingleFile("s3://path/to/fileToFile2Value") + ) + ), + "stringToString" -> WomMap( + WomMapType(WomStringType, WomStringType), + Map( + WomString("stringToString1") -> WomString("path/to/stringToString1"), + WomString("stringToString2") -> WomString("path/to/stringToString2") + ) + ) ) - val workflowInputs = inputs map { - case (k, v) => s"wf_whereami.whereami$k" -> v + val workflowInputs = inputs map { case (k, v) => + s"wf_whereami.whereami$k" -> v } - val womWorkflow = dockerAndDiskWdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val womWorkflow = dockerAndDiskWdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + dockerAndDiskWdlNamespace.toWomExecutable(Option(workflowInputs.toJson.compactPrint), + NoIoFunctionSet, + strictValidation = true + ) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -543,43 +606,103 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration)) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val batchInputs = testActorRef.underlyingActor.generateAwsBatchInputs(jobDescriptor) batchInputs should have size 8 - batchInputs should contain(AwsBatchFileInput( - "stringToFileMap-0", "s3://path/to/stringTofile1", DefaultPathBuilder.get("path/to/stringTofile1"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "stringToFileMap-1", "s3://path/to/stringTofile2", DefaultPathBuilder.get("path/to/stringTofile2"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "fileToStringMap-0", "s3://path/to/fileToString1", DefaultPathBuilder.get("path/to/fileToString1"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "fileToStringMap-1", "s3://path/to/fileToString2", DefaultPathBuilder.get("path/to/fileToString2"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "fileToFileMap-0", "s3://path/to/fileToFile1Key", DefaultPathBuilder.get("path/to/fileToFile1Key"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "fileToFileMap-1", "s3://path/to/fileToFile1Value", DefaultPathBuilder.get("path/to/fileToFile1Value"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "fileToFileMap-2", "s3://path/to/fileToFile2Key", DefaultPathBuilder.get("path/to/fileToFile2Key"), workingDisk)) - batchInputs should contain(AwsBatchFileInput( - "fileToFileMap-3", "s3://path/to/fileToFile2Value", DefaultPathBuilder.get("path/to/fileToFile2Value"), workingDisk)) + batchInputs should contain( + AwsBatchFileInput("stringToFileMap-0", + "s3://path/to/stringTofile1", + DefaultPathBuilder.get("path/to/stringTofile1"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("stringToFileMap-1", + "s3://path/to/stringTofile2", + DefaultPathBuilder.get("path/to/stringTofile2"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("fileToStringMap-0", + "s3://path/to/fileToString1", + DefaultPathBuilder.get("path/to/fileToString1"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("fileToStringMap-1", + "s3://path/to/fileToString2", + DefaultPathBuilder.get("path/to/fileToString2"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("fileToFileMap-0", + "s3://path/to/fileToFile1Key", + DefaultPathBuilder.get("path/to/fileToFile1Key"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("fileToFileMap-1", + "s3://path/to/fileToFile1Value", + DefaultPathBuilder.get("path/to/fileToFile1Value"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("fileToFileMap-2", + "s3://path/to/fileToFile2Key", + DefaultPathBuilder.get("path/to/fileToFile2Key"), + workingDisk + ) + ) + batchInputs should contain( + AwsBatchFileInput("fileToFileMap-3", + "s3://path/to/fileToFile2Value", + DefaultPathBuilder.get("path/to/fileToFile2Value"), + workingDisk + ) + ) case Left(badness) => fail(badness.toList.mkString(", ")) } } - def makeAwsBatchActorRef(sampleWdl: SampleWdl, callName: LocallyQualifiedName, inputs: Map[FullyQualifiedName, WomValue], - functions: StandardExpressionFunctions = TestableAwsBatchExpressionFunctions): - TestActorRef[TestableAwsBatchJobExecutionActor] = { - val womWorkflow = WdlNamespaceWithWorkflow.load(sampleWdl.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + def makeAwsBatchActorRef(sampleWdl: SampleWdl, + callName: LocallyQualifiedName, + inputs: Map[FullyQualifiedName, WomValue], + functions: StandardExpressionFunctions = TestableAwsBatchExpressionFunctions + ): TestActorRef[TestableAwsBatchJobExecutionActor] = { + val womWorkflow = WdlNamespaceWithWorkflow + .load(sampleWdl.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, Seq.empty[Draft2ImportResolver]) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), + NoIoFunctionSet, + strictValidation = true + ) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -594,10 +717,20 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == callName).get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration, functions)) - TestActorRef[TestableAwsBatchJobExecutionActor](props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + TestActorRef[TestableAwsBatchJobExecutionActor]( + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) case Left(badness) => fail(badness.toList.mkString(", ")) } } @@ -611,11 +744,18 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val workflowId = backend.workflowDescriptor.id val batchInputs = backend.generateAwsBatchInputs(jobDescriptor) batchInputs should have size 1 - batchInputs should contain(AwsBatchFileInput("in-0", "s3://blah/b/c.txt", DefaultPathBuilder.get("blah/b/c.txt"), workingDisk)) + batchInputs should contain( + AwsBatchFileInput("in-0", "s3://blah/b/c.txt", DefaultPathBuilder.get("blah/b/c.txt"), workingDisk) + ) val outputs = backend.generateAwsBatchOutputs(jobDescriptor) outputs should have size 1 - outputs should contain(AwsBatchFileOutput("out", - s"s3://my-cromwell-workflows-bucket/file_passing/$workflowId/call-a/out", DefaultPathBuilder.get("out"), workingDisk)) + outputs should contain( + AwsBatchFileOutput("out", + s"s3://my-cromwell-workflows-bucket/file_passing/$workflowId/call-a/out", + DefaultPathBuilder.get("out"), + workingDisk + ) + ) } it should "generate correct AwsBatchInputs when a command line contains a write_lines call in it" taggedAs AwsTest ignore { @@ -623,23 +763,28 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite "strs" -> WomArray(WomArrayType(WomStringType), Seq("A", "B", "C").map(WomString)) ) - class TestAwsBatchExpressionFunctions extends AwsBatchExpressionFunctions(TestableStandardExpressionFunctionsParamsS3) { - override def writeFile(path: String, content: String): Future[WomSingleFile] = { + class TestAwsBatchExpressionFunctions + extends AwsBatchExpressionFunctions(TestableStandardExpressionFunctionsParamsS3) { + override def writeFile(path: String, content: String): Future[WomSingleFile] = Future.fromTry(Success(WomSingleFile(s"s3://some/path/file.txt"))) - } } - class TestAwsBatchExpressionFunctionsLocal extends AwsBatchExpressionFunctionsForFS(TestableStandardExpressionFunctionsParamsLocal) { - override def writeFile(path: String, content: String): Future[WomSingleFile] = { + class TestAwsBatchExpressionFunctionsLocal + extends AwsBatchExpressionFunctionsForFS(TestableStandardExpressionFunctionsParamsLocal) { + override def writeFile(path: String, content: String): Future[WomSingleFile] = Future.fromTry(Success(WomSingleFile(s"/some/path/file.txt"))) - } } val functions = new TestAwsBatchExpressionFunctions val backend = makeAwsBatchActorRef(SampleWdl.ArrayIO, "serialize", inputs, functions).underlyingActor val jobDescriptor = backend.jobDescriptor val batchInputs = backend.generateAwsBatchInputs(jobDescriptor) batchInputs should have size 1 - batchInputs should contain(AwsBatchFileInput( - "c6fd5c91-0", "s3://some/path/file.txt", DefaultPathBuilder.get("some/path/file.txt"), workingDisk)) + batchInputs should contain( + AwsBatchFileInput("c6fd5c91-0", + "s3://some/path/file.txt", + DefaultPathBuilder.get("some/path/file.txt"), + workingDisk + ) + ) val outputs = backend.generateAwsBatchOutputs(jobDescriptor) outputs should have size 0 @@ -648,8 +793,9 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val jobDescriptorLocal = backendLocal.jobDescriptor val batchInputsLocal = backend.generateAwsBatchInputs(jobDescriptorLocal) batchInputsLocal should have size 1 - batchInputsLocal should contain(AwsBatchFileInput( - "c6fd5c91-0", "/some/path/file.txt", DefaultPathBuilder.get("some/path/file.txt"), workingDisk)) + batchInputsLocal should contain( + AwsBatchFileInput("c6fd5c91-0", "/some/path/file.txt", DefaultPathBuilder.get("some/path/file.txt"), workingDisk) + ) val outputsLocal = backendLocal.generateAwsBatchOutputs(jobDescriptorLocal) outputsLocal should have size 0 } @@ -657,13 +803,22 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "generate correct AwsBatchFileInputs from a WdlArray" taggedAs AwsTest ignore { val inputs: Map[String, WomValue] = Map( "fileArray" -> - WomArray(WomArrayType(WomSingleFileType), Seq(WomSingleFile("s3://path/to/file1"), WomSingleFile("s3://path/to/file2"))) + WomArray(WomArrayType(WomSingleFileType), + Seq(WomSingleFile("s3://path/to/file1"), WomSingleFile("s3://path/to/file2")) + ) ) - val womWorkflow = dockerAndDiskWdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val womWorkflow = dockerAndDiskWdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), + NoIoFunctionSet, + strictValidation = true + ) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -678,16 +833,29 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration)) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val batchInputs = testActorRef.underlyingActor.generateAwsBatchInputs(jobDescriptor) batchInputs should have size 2 - batchInputs should contain(AwsBatchFileInput("fileArray-0", "s3://path/to/file1", DefaultPathBuilder.get("path/to/file1"), workingDisk)) - batchInputs should contain(AwsBatchFileInput("fileArray-1", "s3://path/to/file2", DefaultPathBuilder.get("path/to/file2"), workingDisk)) + batchInputs should contain( + AwsBatchFileInput("fileArray-0", "s3://path/to/file1", DefaultPathBuilder.get("path/to/file1"), workingDisk) + ) + batchInputs should contain( + AwsBatchFileInput("fileArray-1", "s3://path/to/file2", DefaultPathBuilder.get("path/to/file2"), workingDisk) + ) case Left(badness) => fail(badness.toList.mkString(", ")) } } @@ -698,10 +866,17 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite "file2" -> WomSingleFile("s3://path/to/file2") ) - val womWorkflow = dockerAndDiskWdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val womWorkflow = dockerAndDiskWdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), + NoIoFunctionSet, + strictValidation = true + ) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -716,16 +891,29 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration)) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val batchInputs = testActorRef.underlyingActor.generateAwsBatchInputs(jobDescriptor) batchInputs should have size 2 - batchInputs should contain(AwsBatchFileInput("file1-0", "s3://path/to/file1", DefaultPathBuilder.get("path/to/file1"), workingDisk)) - batchInputs should contain(AwsBatchFileInput("file2-0", "s3://path/to/file2", DefaultPathBuilder.get("path/to/file2"), workingDisk)) + batchInputs should contain( + AwsBatchFileInput("file1-0", "s3://path/to/file1", DefaultPathBuilder.get("path/to/file1"), workingDisk) + ) + batchInputs should contain( + AwsBatchFileInput("file2-0", "s3://path/to/file2", DefaultPathBuilder.get("path/to/file2"), workingDisk) + ) case Left(badness) => fail(badness.toList.mkString(", ")) } @@ -733,30 +921,54 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "convert local Paths back to corresponding foreign paths in AwsBatchOutputs" taggedAs AwsTest in { val outputs = Set( - AwsBatchFileOutput("/cromwell_root/path/to/file1", "s3://path/to/file1", - DefaultPathBuilder.get("/cromwell_root/path/to/file1"), workingDisk), - AwsBatchFileOutput("/cromwell_root/path/to/file2", "s3://path/to/file2", - DefaultPathBuilder.get("/cromwell_root/path/to/file2"), workingDisk), - AwsBatchFileOutput("/cromwell_root/path/to/file3", "s3://path/to/file3", - DefaultPathBuilder.get("/cromwell_root/path/to/file3"), workingDisk), - AwsBatchFileOutput("/cromwell_root/path/to/file4", "s3://path/to/file4", - DefaultPathBuilder.get("/cromwell_root/path/to/file4"), workingDisk), - AwsBatchFileOutput("/cromwell_root/path/to/file5", "s3://path/to/file5", - DefaultPathBuilder.get("/cromwell_root/path/to/file5"), workingDisk) + AwsBatchFileOutput("/cromwell_root/path/to/file1", + "s3://path/to/file1", + DefaultPathBuilder.get("/cromwell_root/path/to/file1"), + workingDisk + ), + AwsBatchFileOutput("/cromwell_root/path/to/file2", + "s3://path/to/file2", + DefaultPathBuilder.get("/cromwell_root/path/to/file2"), + workingDisk + ), + AwsBatchFileOutput("/cromwell_root/path/to/file3", + "s3://path/to/file3", + DefaultPathBuilder.get("/cromwell_root/path/to/file3"), + workingDisk + ), + AwsBatchFileOutput("/cromwell_root/path/to/file4", + "s3://path/to/file4", + DefaultPathBuilder.get("/cromwell_root/path/to/file4"), + workingDisk + ), + AwsBatchFileOutput("/cromwell_root/path/to/file5", + "s3://path/to/file5", + DefaultPathBuilder.get("/cromwell_root/path/to/file5"), + workingDisk + ) ) val outputValues = Seq( WomSingleFile("/cromwell_root/path/to/file1"), - WomArray(WomArrayType(WomSingleFileType), Seq( - WomSingleFile("/cromwell_root/path/to/file2"), WomSingleFile("/cromwell_root/path/to/file3"))), - WomMap(WomMapType(WomSingleFileType, WomSingleFileType), Map( - WomSingleFile("/cromwell_root/path/to/file4") -> WomSingleFile("/cromwell_root/path/to/file5") - )) + WomArray(WomArrayType(WomSingleFileType), + Seq(WomSingleFile("/cromwell_root/path/to/file2"), WomSingleFile("/cromwell_root/path/to/file3")) + ), + WomMap(WomMapType(WomSingleFileType, WomSingleFileType), + Map( + WomSingleFile("/cromwell_root/path/to/file4") -> WomSingleFile("/cromwell_root/path/to/file5") + ) + ) ) val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, NoOptions, Labels.empty, @@ -768,33 +980,44 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration)) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) - def wdlValueToS3Path(outputs: Set[AwsBatchFileOutput])(womValue: WomValue): WomValue = { + def wdlValueToS3Path(outputs: Set[AwsBatchFileOutput])(womValue: WomValue): WomValue = WomFileMapper.mapWomFiles(testActorRef.underlyingActor.womFileToPath(outputs))(womValue).get - } val result = outputValues map wdlValueToS3Path(outputs) result should have size 3 result should contain(WomSingleFile("s3://path/to/file1")) - result should contain(WomArray(WomArrayType(WomSingleFileType), - Seq(WomSingleFile("s3://path/to/file2"), WomSingleFile("s3://path/to/file3"))) + result should contain( + WomArray(WomArrayType(WomSingleFileType), + Seq(WomSingleFile("s3://path/to/file2"), WomSingleFile("s3://path/to/file3")) + ) ) - result should contain(WomMap(WomMapType(WomSingleFileType, WomSingleFileType), - Map(WomSingleFile("s3://path/to/file4") -> WomSingleFile("s3://path/to/file5"))) + result should contain( + WomMap(WomMapType(WomSingleFileType, WomSingleFileType), + Map(WomSingleFile("s3://path/to/file4") -> WomSingleFile("s3://path/to/file5")) + ) ) } it should "return log paths for non-scattered call" taggedAs AwsTest in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7c")), - WdlNamespaceWithWorkflow.load( - SampleWdl.HelloWorld.asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.HelloWorld.asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, WorkflowOptions.fromJsonString(""" {"aws_s3_root": "s3://path/to/root"} """).get, Labels.empty, @@ -806,11 +1029,14 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "hello").get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration)) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val backend = testActorRef.underlyingActor @@ -825,9 +1051,15 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "return log paths for scattered call" taggedAs AwsTest ignore { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7d")), - WdlNamespaceWithWorkflow.load( - new SampleWdl.ScatterWdl().asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load( + new SampleWdl.ScatterWdl().asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, WorkflowOptions.fromJsonString(""" {"root": "s3://path/to/root"} """).get, Labels.empty, @@ -839,11 +1071,14 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "B").get val key = BackendJobDescriptorKey(call, Option(2), 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableAwsBatchJobExecutionActor(jobDescriptor, Promise(), configuration)) val testActorRef = TestActorRef[TestableAwsBatchJobExecutionActor]( - props, s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableAwsBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val backend = testActorRef.underlyingActor @@ -862,11 +1097,12 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val jobId = "the-job-id" - def execute:Future[ExecutionHandle] ={ backend.recoverAsync(StandardAsyncJob(jobId)) } + def execute: Future[ExecutionHandle] = backend.recoverAsync(StandardAsyncJob(jobId)) whenReady(execute, PatienceConfiguration.Timeout(10.seconds.dilated)) { executionResponse => - executionResponse should be(a[PendingExecutionHandle[_,_,_]]) - val pendingExecutionResponse = executionResponse.asInstanceOf[PendingExecutionHandle[StandardAsyncJob, AwsBatchJob, RunStatus]] + executionResponse should be(a[PendingExecutionHandle[_, _, _]]) + val pendingExecutionResponse = + executionResponse.asInstanceOf[PendingExecutionHandle[StandardAsyncJob, AwsBatchJob, RunStatus]] pendingExecutionResponse.isDone should be(false) pendingExecutionResponse.jobDescriptor should be(jobDescriptor) pendingExecutionResponse.pendingJob should be(StandardAsyncJob(jobId)) @@ -876,8 +1112,10 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite } private def makeRuntimeAttributes(job: CommandCallNode) = { - val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(job.callable.runtimeAttributes, null, Map.empty) - RuntimeAttributeDefinition.addDefaultsToAttributes( - runtimeAttributesBuilder.definitions.toSet, NoOptions)(evaluatedAttributes.getOrElse(fail("Failed to evaluate runtime attributes"))) + val evaluatedAttributes = + RuntimeAttributeDefinition.evaluateRuntimeAttributes(job.callable.runtimeAttributes, null, Map.empty) + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributesBuilder.definitions.toSet, NoOptions)( + evaluatedAttributes.getOrElse(fail("Failed to evaluate runtime attributes")) + ) } } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttachedDiskSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttachedDiskSpec.scala index f2022fe3456..d76a745c4bc 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttachedDiskSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttachedDiskSpec.scala @@ -40,15 +40,16 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks._ import org.scalatest.prop.Tables.Table - class AwsBatchAttachedDiskSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TryValues { val validTable = Table( ("unparsed", "parsed"), // AwsBatchEmptyMountedDisk has a toString override that uses the MD5sum of // the mount path in the return value, so these values are deterministic ("d-39de0dbcfb68c8735bd088c62fa061a4 /mnt", AwsBatchEmptyMountedDisk(DefaultPathBuilder.get("/mnt"))), - ("d-753b3ff55ce6e29b10951ad6190f7c84 /mnt/my_path", AwsBatchEmptyMountedDisk(DefaultPathBuilder.get("/mnt/my_path"))), - ("local-disk /cromwell_root", AwsBatchWorkingDisk()), + ("d-753b3ff55ce6e29b10951ad6190f7c84 /mnt/my_path", + AwsBatchEmptyMountedDisk(DefaultPathBuilder.get("/mnt/my_path")) + ), + ("local-disk /cromwell_root", AwsBatchWorkingDisk()) ) // TODO: Work through this syntax diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttributesSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttributesSpec.scala index e3a6d099e66..aeb9d825f83 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttributesSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAttributesSpec.scala @@ -58,10 +58,9 @@ class AwsBatchAttributesSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "not parse invalid config" taggedAs IntegrationTest in { val nakedConfig = - ConfigFactory.parseString( - """ - |{ - |} + ConfigFactory.parseString(""" + |{ + |} """.stripMargin) val exception = intercept[IllegalArgumentException with MessageAggregation] { @@ -76,21 +75,21 @@ class AwsBatchAttributesSpec extends AnyFlatSpec with CromwellTimeoutSpec with M def configString(): String = s""" - |{ - | root = "s3://myBucket" - | maximum-polling-interval = 600 - | numSubmitAttempts = 6 - | numCreateDefinitionAttempts = 6 - | - | - | filesystems = { - | local { - | // A reference to a potentially different auth for manipulating files via engine functions. - | auth = "default" - | } - | } - |} - | """.stripMargin + |{ + | root = "s3://myBucket" + | maximum-polling-interval = 600 + | numSubmitAttempts = 6 + | numCreateDefinitionAttempts = 6 + | + | + | filesystems = { + | local { + | // A reference to a potentially different auth for manipulating files via engine functions. + | auth = "default" + | } + | } + |} + | """.stripMargin def configStringS3(): String = s""" |{ diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchConfigurationSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchConfigurationSpec.scala index 829c0b0514f..0e7ff6c8a56 100755 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchConfigurationSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchConfigurationSpec.scala @@ -40,7 +40,12 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -class AwsBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks with BeforeAndAfterAll { +class AwsBatchConfigurationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks + with BeforeAndAfterAll { behavior of "AwsBatchConfigurationSpec" @@ -51,32 +56,31 @@ class AwsBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit () } - val globalConfig = ConfigFactory.parseString( - s""" - |aws { - | - | application-name = "cromwell" - | numSubmitAttempts = 6 - | numCreateDefinitionAttempts = 6 - | - | auths = [ - | { - | name = "application-default" - | scheme = "default" - | }, - | { - | name = "user-via-refresh" - | scheme = "custom_keys" - | access-key = "secret_key" - | secret-key = "${mockFile.pathAsString}" - | }, - | { - | name = "service-account" - | scheme = "default" - | } - | ] - |} - | + val globalConfig = ConfigFactory.parseString(s""" + |aws { + | + | application-name = "cromwell" + | numSubmitAttempts = 6 + | numCreateDefinitionAttempts = 6 + | + | auths = [ + | { + | name = "application-default" + | scheme = "default" + | }, + | { + | name = "user-via-refresh" + | scheme = "custom_keys" + | access-key = "secret_key" + | secret-key = "${mockFile.pathAsString}" + | }, + | { + | name = "service-account" + | scheme = "default" + | } + | ] + |} + | """.stripMargin) val backendConfig = ConfigFactory.parseString( @@ -110,7 +114,8 @@ class AwsBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit | } | } | - """.stripMargin) + """.stripMargin + ) it should "fail to instantiate if any required configuration is missing" in { @@ -131,11 +136,14 @@ class AwsBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "have correct root" in { - new AwsBatchConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig)).root shouldBe "s3://my-cromwell-workflows-bucket" + new AwsBatchConfiguration( + BackendConfigurationDescriptor(backendConfig, globalConfig) + ).root shouldBe "s3://my-cromwell-workflows-bucket" } it should "have correct docker" in { - val dockerConf = new AwsBatchConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig)).dockerCredentials + val dockerConf = + new AwsBatchConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig)).dockerCredentials dockerConf shouldBe defined dockerConf.get.token shouldBe "dockerToken" } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala index f1e2eb40bf9..762830d78dc 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala @@ -50,45 +50,43 @@ import wom.graph.CommandCallNode import scala.concurrent.duration._ -class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers - with ImplicitSender { +class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender { val Timeout: FiniteDuration = 30.second.dilated import BackendSpec._ val HelloWorld: String = s""" - |task hello { - | String addressee = "you" - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - |} + |task hello { + | String addressee = "you" + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + |} """.stripMargin - val globalConfig: Config = ConfigFactory.parseString( - """ - |aws { - | - | application-name = "cromwell" - | - | auths = [ - | { - | name = "default" - | scheme = "default" - | } - | ] - |} - |""".stripMargin) + val globalConfig: Config = ConfigFactory.parseString(""" + |aws { + | + | application-name = "cromwell" + | + | auths = [ + | { + | name = "default" + | scheme = "default" + | } + | ] + |} + |""".stripMargin) val backendConfigTemplate: String = """ @@ -157,28 +155,42 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[DOCKERHUBCONFIG]", "")) - val dockerBackendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[DOCKERHUBCONFIG]", - """ - |dockerhub { - | account = "my@docker.account" - | # no secrets here guys this is just `echo -n username:password | base64` - | token = "dXNlcm5hbWU6cGFzc3dvcmQ=" - |} - | """.stripMargin)) + val dockerBackendConfig: Config = ConfigFactory.parseString( + backendConfigTemplate.replace( + "[DOCKERHUBCONFIG]", + """ + |dockerhub { + | account = "my@docker.account" + | # no secrets here guys this is just `echo -n username:password | base64` + | token = "dXNlcm5hbWU6cGFzc3dvcmQ=" + |} + | """.stripMargin + ) + ) val defaultBackendConfig: BackendConfigurationDescriptor = BackendConfigurationDescriptor(backendConfig, globalConfig) val refreshTokenConfig: Config = ConfigFactory.parseString(refreshTokenConfigTemplate) private def getAwsBatchBackendProps(workflowDescriptor: BackendWorkflowDescriptor, - calls: Set[CommandCallNode], - configuration: AwsBatchConfiguration): Props = { + calls: Set[CommandCallNode], + configuration: AwsBatchConfiguration + ): Props = { val ioActor = mockIoActor - val params = AwsBatchInitializationActorParams(workflowDescriptor, ioActor, calls, configuration, emptyActor, restarting = false) + val params = AwsBatchInitializationActorParams(workflowDescriptor, + ioActor, + calls, + configuration, + emptyActor, + restarting = false + ) Props(new AwsBatchInitializationActor(params)).withDispatcher(BackendDispatcher) } - private def getAwsBatchBackend(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], conf: BackendConfigurationDescriptor) = { + private def getAwsBatchBackend(workflowDescriptor: BackendWorkflowDescriptor, + calls: Set[CommandCallNode], + conf: BackendConfigurationDescriptor + ) = { val props = getAwsBatchBackendProps(workflowDescriptor, calls, new AwsBatchConfiguration(conf)) system.actorOf(props, "TestableAwsBatchInitializationActor-" + UUID.randomUUID) } @@ -188,17 +200,17 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike it should "log a warning message when there are unsupported runtime attributes" taggedAs IntegrationTest in { within(Timeout) { - val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, - runtime = """runtime { docker: "ubuntu/latest" test: true }""") - val backend = getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - defaultBackendConfig) + val workflowDescriptor = + buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { docker: "ubuntu/latest" test: true }""") + val backend = + getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, defaultBackendConfig) val eventPattern = "Key/s [test] is/are not supported by backend. Unsupported attributes will not be part of job executions." EventFilter.warning(pattern = escapePattern(eventPattern), occurrences = 1) intercept { backend ! Initialize } expectMsgPF() { - case InitializationSuccess(_) => //Docker entry is present. + case InitializationSuccess(_) => // Docker entry is present. case InitializationFailed(failure) => fail(s"InitializationSuccess was expected but got $failure") } } @@ -207,26 +219,32 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike it should "return InitializationFailed when docker runtime attribute key is not present" taggedAs IntegrationTest in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") - val backend = getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - defaultBackendConfig) + val backend = + getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, defaultBackendConfig) backend ! Initialize - expectMsgPF() { - case InitializationFailed(failure) => - failure match { - case exception: RuntimeAttributeValidationFailures => - if (!exception.getMessage.equals("Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!")) - fail("Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'.") - } + expectMsgPF() { case InitializationFailed(failure) => + failure match { + case exception: RuntimeAttributeValidationFailures => + if ( + !exception.getMessage.equals( + "Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!" + ) + ) + fail( + "Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'." + ) + } } } } - private case class TestingBits(actorRef: TestActorRef[AwsBatchInitializationActor], configuration: AwsBatchConfiguration) + private case class TestingBits(actorRef: TestActorRef[AwsBatchInitializationActor], + configuration: AwsBatchConfiguration + ) } object AwsBatchInitializationActorSpec { - def normalize(str: String): String = { + def normalize(str: String): String = str.parseJson.prettyPrint - } } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala index be384d2af77..37b9ebaef99 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala @@ -35,7 +35,11 @@ import akka.actor.{Actor, ActorRef, Props} import akka.testkit._ import cromwell.backend.BackendJobExecutionActor.{ExecuteJobCommand, JobFailedNonRetryableResponse} import cromwell.backend.impl.aws.ControllableFailingJabjea.JabjeaExplode -import cromwell.backend.standard.{DefaultStandardSyncExecutionActorParams, StandardSyncExecutionActor, StandardSyncExecutionActorParams} +import cromwell.backend.standard.{ + DefaultStandardSyncExecutionActorParams, + StandardSyncExecutionActor, + StandardSyncExecutionActorParams +} import cromwell.backend.{BackendJobDescriptor, MinimumRuntimeSettings} import cromwell.core.TestKitSuite import org.scalatest.flatspec.AnyFlatSpecLike @@ -67,12 +71,21 @@ class AwsBatchJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val parent = TestProbe("parent") val deathwatch = TestProbe("deathwatch") - val params = DefaultStandardSyncExecutionActorParams(AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey, serviceRegistryActor, ioActor, - jobDescriptor, null, Option(initializationData), backendSingletonActor, - classOf[AwsBatchAsyncBackendJobExecutionActor], MinimumRuntimeSettings()) + val params = DefaultStandardSyncExecutionActorParams( + AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey, + serviceRegistryActor, + ioActor, + jobDescriptor, + null, + Option(initializationData), + backendSingletonActor, + classOf[AwsBatchAsyncBackendJobExecutionActor], + MinimumRuntimeSettings() + ) val testJJEA = TestActorRef[TestAwsBatchJobExecutionActor]( props = Props(new TestAwsBatchJobExecutionActor(params, Props(new ConstructorFailingJABJEA))), - supervisor = parent.ref) + supervisor = parent.ref + ) deathwatch watch testJJEA // Nothing happens: @@ -81,9 +94,10 @@ class AwsBatchJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi testJJEA.tell(msg = ExecuteJobCommand, sender = parent.ref) - parent.expectMsgPF(max = TimeoutDuration) { - case JobFailedNonRetryableResponse(_, throwable, _) => - throwable.getMessage should be("AwsBatchAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed.") + parent.expectMsgPF(max = TimeoutDuration) { case JobFailedNonRetryableResponse(_, throwable, _) => + throwable.getMessage should be( + "AwsBatchAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed." + ) } } @@ -100,13 +114,22 @@ class AwsBatchJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi val parent = TestProbe("parent") val deathwatch = TestProbe("deathwatch") val constructionPromise = Promise[ActorRef]() - val params = DefaultStandardSyncExecutionActorParams(AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey, serviceRegistryActor, ioActor, - jobDescriptor, null, Option(initializationData), backendSingletonActor, + val params = DefaultStandardSyncExecutionActorParams( + AwsBatchAsyncBackendJobExecutionActor.AwsBatchOperationIdKey, + serviceRegistryActor, + ioActor, + jobDescriptor, + null, + Option(initializationData), + backendSingletonActor, classOf[AwsBatchAsyncBackendJobExecutionActor], - MinimumRuntimeSettings()) + MinimumRuntimeSettings() + ) val testJJEA = TestActorRef[TestAwsBatchJobExecutionActor]( - props = Props(new TestAwsBatchJobExecutionActor(params, Props(new ControllableFailingJabjea(constructionPromise)))), - supervisor = parent.ref) + props = + Props(new TestAwsBatchJobExecutionActor(params, Props(new ControllableFailingJabjea(constructionPromise)))), + supervisor = parent.ref + ) deathwatch watch testJJEA // Nothing happens: @@ -127,15 +150,16 @@ class AwsBatchJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi throw exception } - parent.expectMsgPF(max = TimeoutDuration) { - case JobFailedNonRetryableResponse(_, throwable, _) => - throwable.getMessage should be("AwsBatchAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed.") + parent.expectMsgPF(max = TimeoutDuration) { case JobFailedNonRetryableResponse(_, throwable, _) => + throwable.getMessage should be( + "AwsBatchAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed." + ) } } } -class TestAwsBatchJobExecutionActor(params: StandardSyncExecutionActorParams, - fakeJabjeaProps: Props) extends StandardSyncExecutionActor(params) { +class TestAwsBatchJobExecutionActor(params: StandardSyncExecutionActorParams, fakeJabjeaProps: Props) + extends StandardSyncExecutionActor(params) { override def createAsyncProps(): Props = fakeJabjeaProps } @@ -147,12 +171,12 @@ class ConstructorFailingJABJEA extends ControllableFailingJabjea(Promise[ActorRe class ControllableFailingJabjea(constructionPromise: Promise[ActorRef]) extends Actor { def explode(): Unit = { val boom = 1 == 1 - if (boom) throw new RuntimeException("Test Exception! Don't panic if this appears during a test run!") - with NoStackTrace + if (boom) + throw new RuntimeException("Test Exception! Don't panic if this appears during a test run!") with NoStackTrace } constructionPromise.trySuccess(self) - override def receive: Receive = { - case JabjeaExplode => explode() + override def receive: Receive = { case JabjeaExplode => + explode() } } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala index e9a5f1f86d9..32d0cfab6bb 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala @@ -102,30 +102,51 @@ class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wi val cpu: Int Refined Positive = 2 val runtimeAttributes: AwsBatchRuntimeAttributes = new AwsBatchRuntimeAttributes( - cpu = cpu, - zones = Vector("us-east-1"), - memory = MemorySize(2.0, MemoryUnit.GB), - disks = Seq.empty, - dockerImage = "ubuntu:latest", - queueArn = "arn:aws:batch:us-east-1:123456789:job-queue/default-gwf-core", - failOnStderr = true, - continueOnReturnCode = ContinueOnReturnCodeFlag(false), - noAddress = false, - scriptS3BucketName = "script-bucket", - fileSystem = "s3") + cpu = cpu, + zones = Vector("us-east-1"), + memory = MemorySize(2.0, MemoryUnit.GB), + disks = Seq.empty, + dockerImage = "ubuntu:latest", + queueArn = "arn:aws:batch:us-east-1:123456789:job-queue/default-gwf-core", + failOnStderr = true, + continueOnReturnCode = ContinueOnReturnCodeFlag(false), + noAddress = false, + scriptS3BucketName = "script-bucket", + fileSystem = "s3" + ) private def generateBasicJob: AwsBatchJob = { - val job = AwsBatchJob(null, runtimeAttributes, "commandLine", script, - "/cromwell_root/hello-rc.txt", "/cromwell_root/hello-stdout.log", "/cromwell_root/hello-stderr.log", - Seq.empty[AwsBatchInput].toSet, Seq.empty[AwsBatchFileOutput].toSet, - jobPaths, Seq.empty[AwsBatchParameter], None) + val job = AwsBatchJob( + null, + runtimeAttributes, + "commandLine", + script, + "/cromwell_root/hello-rc.txt", + "/cromwell_root/hello-stdout.log", + "/cromwell_root/hello-stderr.log", + Seq.empty[AwsBatchInput].toSet, + Seq.empty[AwsBatchFileOutput].toSet, + jobPaths, + Seq.empty[AwsBatchParameter], + None + ) job } private def generateBasicJobForLocalFS: AwsBatchJob = { - val job = AwsBatchJob(null, runtimeAttributes.copy(fileSystem="local"), "commandLine", script, - "/cromwell_root/hello-rc.txt", "/cromwell_root/hello-stdout.log", "/cromwell_root/hello-stderr.log", - Seq.empty[AwsBatchInput].toSet, Seq.empty[AwsBatchFileOutput].toSet, - jobPaths, Seq.empty[AwsBatchParameter], None) + val job = AwsBatchJob( + null, + runtimeAttributes.copy(fileSystem = "local"), + "commandLine", + script, + "/cromwell_root/hello-rc.txt", + "/cromwell_root/hello-stdout.log", + "/cromwell_root/hello-stderr.log", + Seq.empty[AwsBatchInput].toSet, + Seq.empty[AwsBatchFileOutput].toSet, + jobPaths, + Seq.empty[AwsBatchParameter], + None + ) job } @@ -136,10 +157,10 @@ class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wi val job: AwsBatchJob = generateBasicJob - job.AWS_RETRY_MODE should be ("AWS_RETRY_MODE") - job.AWS_RETRY_MODE_DEFAULT_VALUE should be ("adaptive") - job.AWS_MAX_ATTEMPTS should be ("AWS_MAX_ATTEMPTS") - job.AWS_MAX_ATTEMPTS_DEFAULT_VALUE should be ("14") + job.AWS_RETRY_MODE should be("AWS_RETRY_MODE") + job.AWS_RETRY_MODE_DEFAULT_VALUE should be("adaptive") + job.AWS_MAX_ATTEMPTS should be("AWS_MAX_ATTEMPTS") + job.AWS_MAX_ATTEMPTS_DEFAULT_VALUE should be("14") } it should "generate appropriate KV pairs for the container environment for S3" in { @@ -149,10 +170,10 @@ class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wi // testing a private method see https://www.scalatest.org/user_guide/using_PrivateMethodTester val kvPairs = job invokePrivate generateEnvironmentKVPairs("script-bucket", "prefix-", "key") - kvPairs should contain (buildKVPair(job.AWS_MAX_ATTEMPTS, job.AWS_MAX_ATTEMPTS_DEFAULT_VALUE)) - kvPairs should contain (buildKVPair(job.AWS_RETRY_MODE, "adaptive")) - kvPairs should contain (buildKVPair("BATCH_FILE_TYPE", "script")) - kvPairs should contain (buildKVPair("BATCH_FILE_S3_URL", "s3://script-bucket/prefix-key")) + kvPairs should contain(buildKVPair(job.AWS_MAX_ATTEMPTS, job.AWS_MAX_ATTEMPTS_DEFAULT_VALUE)) + kvPairs should contain(buildKVPair(job.AWS_RETRY_MODE, "adaptive")) + kvPairs should contain(buildKVPair("BATCH_FILE_TYPE", "script")) + kvPairs should contain(buildKVPair("BATCH_FILE_S3_URL", "s3://script-bucket/prefix-key")) } it should "generate appropriate KV pairs for the container environment for Local FS" in { @@ -162,9 +183,9 @@ class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wi // testing a private method see https://www.scalatest.org/user_guide/using_PrivateMethodTester val kvPairs = job invokePrivate generateEnvironmentKVPairs("script-bucket", "prefix-", "key") - kvPairs should contain (buildKVPair(job.AWS_MAX_ATTEMPTS, job.AWS_MAX_ATTEMPTS_DEFAULT_VALUE)) - kvPairs should contain (buildKVPair(job.AWS_RETRY_MODE, "adaptive")) - kvPairs should contain (buildKVPair("BATCH_FILE_TYPE", "script")) - kvPairs should contain (buildKVPair("BATCH_FILE_S3_URL", "")) + kvPairs should contain(buildKVPair(job.AWS_MAX_ATTEMPTS, job.AWS_MAX_ATTEMPTS_DEFAULT_VALUE)) + kvPairs should contain(buildKVPair(job.AWS_RETRY_MODE, "adaptive")) + kvPairs should contain(buildKVPair("BATCH_FILE_TYPE", "script")) + kvPairs should contain(buildKVPair("BATCH_FILE_S3_URL", "")) } } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala index fbab5a81a23..957126da9ad 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala @@ -50,32 +50,41 @@ import wom.values._ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers { - def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = { - WorkflowOptions(JsObject(Map( - "default_runtime_attributes" -> JsObject(defaults) - ))) - } - - val expectedDefaults = new AwsBatchRuntimeAttributes(refineMV[Positive](1), Vector("us-east-1a", "us-east-1b"), + def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = + WorkflowOptions( + JsObject( + Map( + "default_runtime_attributes" -> JsObject(defaults) + ) + ) + ) - MemorySize(2, MemoryUnit.GB), Vector(AwsBatchWorkingDisk()), + val expectedDefaults = new AwsBatchRuntimeAttributes( + refineMV[Positive](1), + Vector("us-east-1a", "us-east-1b"), + MemorySize(2, MemoryUnit.GB), + Vector(AwsBatchWorkingDisk()), "ubuntu:latest", "arn:aws:batch:us-east-1:111222333444:job-queue/job-queue", false, ContinueOnReturnCodeSet(Set(0)), false, - "my-stuff") - - val expectedDefaultsLocalFS = new AwsBatchRuntimeAttributes(refineMV[Positive](1), Vector("us-east-1a", "us-east-1b"), - - MemorySize(2, MemoryUnit.GB), Vector(AwsBatchWorkingDisk()), + "my-stuff" + ) + + val expectedDefaultsLocalFS = new AwsBatchRuntimeAttributes( + refineMV[Positive](1), + Vector("us-east-1a", "us-east-1b"), + MemorySize(2, MemoryUnit.GB), + Vector(AwsBatchWorkingDisk()), "ubuntu:latest", "arn:aws:batch:us-east-1:111222333444:job-queue/job-queue", false, ContinueOnReturnCodeSet(Set(0)), false, "", - "local") + "local" + ) "AwsBatchRuntimeAttributes" should { @@ -106,12 +115,16 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout "fail to validate a scriptBucketName with trailing slash entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff/")) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "The Script Bucket name has an invalid s3 bucket format") + assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, + "The Script Bucket name has an invalid s3 bucket format" + ) } "fail to validate an invalid scriptBucketName entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my**Bucket")) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "The Script Bucket name has an invalid s3 bucket format") + assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, + "The Script Bucket name has an invalid s3 bucket format" + ) } "validate a valid Docker entry" in { @@ -122,113 +135,196 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout "fail to validate an invalid Docker entry" in { val runtimeAttributes = Map("docker" -> WomInteger(1)) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting docker runtime attribute to be a String") + assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, + "Expecting docker runtime attribute to be a String" + ) } "validate a valid failOnStderr entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "failOnStderr" -> WomBoolean(true)) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "failOnStderr" -> WomBoolean(true) + ) val expectedRuntimeAttributes = expectedDefaults.copy(failOnStderr = true) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid failOnStderr entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "failOnStderr" -> WomString("yes")) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") + assertAwsBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'" + ) } "validate a valid continueOnReturnCode integer entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomInteger(1)) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "continueOnReturnCode" -> WomInteger(1) + ) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1))) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "validate a valid continueOnReturnCode boolean entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomBoolean(false)) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "continueOnReturnCode" -> WomBoolean(false) + ) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeFlag(false)) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = Map( + "docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))) + ) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "coerce then validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2")))) + val runtimeAttributes = Map( + "docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2"))) + ) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid continueOnReturnCode entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomString("value")) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "continueOnReturnCode" -> WomString("value") + ) + assertAwsBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } "validate a valid cpu entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomInteger(2)) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomInteger(2)) val expectedRuntimeAttributes = expectedDefaults.copy(cpu = refineMV[Positive](2)) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "validate a valid cpu string entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomString("2")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "cpu" -> WomString("2") + ) val expectedRuntimeAttributes = expectedDefaults.copy(cpu = refineMV[Positive](2)) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "validate a valid Filesystem string entry S3" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "filesystem" -> WomString("s3")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "filesystem" -> WomString("s3") + ) val expectedRuntimeAttributes = expectedDefaults assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "validate a valid Filesystem string entry local Filesystem" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"),"scriptBucketName" -> WomString(""), "filesystem" -> WomString("local")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString(""), + "filesystem" -> WomString("local") + ) val expectedRuntimeAttributes = expectedDefaultsLocalFS - assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes,WorkflowOptions.fromMap(Map.empty).get, - NonEmptyList.of("us-east-1a", "us-east-1b"), new AwsBatchConfiguration(AwsBatchTestConfigForLocalFS.AwsBatchBackendConfigurationDescriptor)) + assertAwsBatchRuntimeAttributesSuccessfulCreation( + runtimeAttributes, + expectedRuntimeAttributes, + WorkflowOptions.fromMap(Map.empty).get, + NonEmptyList.of("us-east-1a", "us-east-1b"), + new AwsBatchConfiguration(AwsBatchTestConfigForLocalFS.AwsBatchBackendConfigurationDescriptor) + ) } "fail to validate an invalid cpu entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomString("value")) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting cpu runtime attribute to be an Integer") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "cpu" -> WomString("value") + ) + assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, + "Expecting cpu runtime attribute to be an Integer" + ) } "validate a valid zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomString("us-east-1a")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "zones" -> WomString("us-east-1a") + ) val expectedRuntimeAttributes = expectedDefaults.copy(zones = Vector("us-east-1a")) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomInteger(1)) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "zones" -> WomInteger(1) + ) + assertAwsBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]" + ) } "validate a valid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomArray(WomArrayType(WomStringType), List(WomString("us-east-1a"), WomString("us-east-1b")))) + val runtimeAttributes = Map( + "docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "zones" -> WomArray(WomArrayType(WomStringType), List(WomString("us-east-1a"), WomString("us-east-1b"))) + ) val expectedRuntimeAttributes = expectedDefaults.copy(zones = Vector("us-east-1a", "us-east-1b")) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") + val runtimeAttributes = Map( + "docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))) + ) + assertAwsBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]" + ) } "validate a valid disks entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "disks" -> WomString("local-disk")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "disks" -> WomString("local-disk") + ) val expectedRuntimeAttributes = expectedDefaults.copy(disks = Seq(AwsBatchVolume.parse("local-disk").get)) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid disks entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "disks" -> WomInteger(10)) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting disks runtime attribute to be a comma separated String or Array[String]") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "disks" -> WomInteger(10) + ) + assertAwsBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting disks runtime attribute to be a comma separated String or Array[String]" + ) } "validate a valid disks array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("local-disk"), WomString("local-disk")))) - val expectedRuntimeAttributes = expectedDefaults.copy(disks = Seq(AwsBatchVolume.parse("local-disk").get, AwsBatchVolume.parse("local-disk").get)) + val runtimeAttributes = Map( + "docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("local-disk"), WomString("local-disk"))) + ) + val expectedRuntimeAttributes = expectedDefaults.copy(disks = + Seq(AwsBatchVolume.parse("local-disk").get, AwsBatchVolume.parse("local-disk").get) + ) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } @@ -239,26 +335,42 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout // } "validate a valid memory entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "memory" -> WomString("1 GB")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "memory" -> WomString("1 GB") + ) val expectedRuntimeAttributes = expectedDefaults.copy(memory = MemorySize(1, MemoryUnit.GB)) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid memory entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "memory" -> WomString("blah")) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting memory runtime attribute to be an Integer or String with format '8 GB'") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "memory" -> WomString("blah") + ) + assertAwsBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting memory runtime attribute to be an Integer or String with format '8 GB'" + ) } "validate a valid noAddress entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "noAddress" -> WomBoolean(true)) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "noAddress" -> WomBoolean(true) + ) val expectedRuntimeAttributes = expectedDefaults.copy(noAddress = true) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid noAddress entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "noAddress" -> WomInteger(1)) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "noAddress" -> WomInteger(1) + ) assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, - "Expecting noAddress runtime attribute to be a Boolean") + "Expecting noAddress runtime attribute to be a Boolean" + ) } "validate a valid queueArn entry" in { @@ -269,10 +381,13 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout "arn:aws-cn:batch:us-west-2:123456789012:job-queue/default-a4e50e00-b850-11e9", "arn:aws-cn:batch:us-gov-west-1:123456789012:job-queue/default-a4e50e00-b850-11e9", "arn:aws-us-gov:batch:us-west-2:123456789012:job-queue/default-a4e50e00-b850-11e9", - "arn:aws:batch:us-east-1:123456789012:job-queue/my_queue", + "arn:aws:batch:us-east-1:123456789012:job-queue/my_queue" ) validArnsAsStrings foreach { validArn => - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "queueArn" -> WomString(validArn)) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "scriptBucketName" -> WomString("my-stuff"), + "queueArn" -> WomString(validArn) + ) val expectedRuntimeAttributes = expectedDefaults.copy(queueArn = validArn) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } @@ -292,13 +407,12 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout ) invalidArnsAsStrings foreach { invalidArn => val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "queueArn" -> WomString(invalidArn)) - assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, - "ARN has invalid format") + assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "ARN has invalid format") } } "override config default attributes with default attributes declared in workflow options" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff") ) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff")) val workflowOptionsJson = """{ @@ -312,7 +426,8 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } "override config default runtime attributes with task runtime attributes" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomInteger(4)) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomInteger(4)) val workflowOptionsJson = """{ @@ -326,7 +441,8 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } "override invalid config default attributes with task runtime attributes" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"),"scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomInteger(4)) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "cpu" -> WomInteger(4)) val workflowOptionsJson = """{ @@ -341,10 +457,11 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } private def assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes: Map[String, WomValue], - expectedRuntimeAttributes: AwsBatchRuntimeAttributes, - workflowOptions: WorkflowOptions = emptyWorkflowOptions, - defaultZones: NonEmptyList[String] = defaultZones, - configuration: AwsBatchConfiguration = configuration): Unit = { + expectedRuntimeAttributes: AwsBatchRuntimeAttributes, + workflowOptions: WorkflowOptions = emptyWorkflowOptions, + defaultZones: NonEmptyList[String] = defaultZones, + configuration: AwsBatchConfiguration = configuration + ): Unit = { try { val actualRuntimeAttributes = toAwsBatchRuntimeAttributes(runtimeAttributes, workflowOptions, configuration) assert(actualRuntimeAttributes == expectedRuntimeAttributes) @@ -355,8 +472,9 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } private def assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], - exMsg: String, - workflowOptions: WorkflowOptions = emptyWorkflowOptions): Unit = { + exMsg: String, + workflowOptions: WorkflowOptions = emptyWorkflowOptions + ): Unit = { try { toAwsBatchRuntimeAttributes(runtimeAttributes, workflowOptions, configuration) fail(s"A RuntimeException was expected with message: $exMsg") @@ -367,11 +485,14 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } private def toAwsBatchRuntimeAttributes(runtimeAttributes: Map[String, WomValue], - workflowOptions: WorkflowOptions, - configuration: AwsBatchConfiguration): AwsBatchRuntimeAttributes = { + workflowOptions: WorkflowOptions, + configuration: AwsBatchConfiguration + ): AwsBatchRuntimeAttributes = { val runtimeAttributesBuilder = AwsBatchRuntimeAttributes.runtimeAttributesBuilder(configuration) - val defaultedAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes( - staticRuntimeAttributeDefinitions, workflowOptions)(runtimeAttributes) + val defaultedAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(staticRuntimeAttributeDefinitions, workflowOptions)( + runtimeAttributes + ) val validatedRuntimeAttributes = runtimeAttributesBuilder.build(defaultedAttributes, NOPLogger.NOP_LOGGER) AwsBatchRuntimeAttributes(validatedRuntimeAttributes, configuration.runtimeConfig, configuration.fileSystem) } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchTestConfig.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchTestConfig.scala index 38545c7e472..d5095989c2c 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchTestConfig.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchTestConfig.scala @@ -109,8 +109,10 @@ object AwsBatchTestConfig { val AwsBatchBackendConfig = ConfigFactory.parseString(AwsBatchBackendConfigString) val AwsBatchGlobalConfig = ConfigFactory.parseString(AwsBatchGlobalConfigString) val AwsBatchBackendNoDefaultConfig = ConfigFactory.parseString(NoDefaultsConfigString) - val AwsBatchBackendConfigurationDescriptor = BackendConfigurationDescriptor(AwsBatchBackendConfig, AwsBatchGlobalConfig) - val NoDefaultsConfigurationDescriptor = BackendConfigurationDescriptor(AwsBatchBackendNoDefaultConfig, AwsBatchGlobalConfig) + val AwsBatchBackendConfigurationDescriptor = + BackendConfigurationDescriptor(AwsBatchBackendConfig, AwsBatchGlobalConfig) + val NoDefaultsConfigurationDescriptor = + BackendConfigurationDescriptor(AwsBatchBackendNoDefaultConfig, AwsBatchGlobalConfig) } object AwsBatchTestConfigForLocalFS { @@ -188,6 +190,8 @@ object AwsBatchTestConfigForLocalFS { val AwsBatchBackendConfig = ConfigFactory.parseString(AwsBatchBackendConfigString) val AwsBatchGlobalConfig = ConfigFactory.parseString(AwsBatchGlobalConfigString) val AwsBatchBackendNoDefaultConfig = ConfigFactory.parseString(NoDefaultsConfigString) - val AwsBatchBackendConfigurationDescriptor = BackendConfigurationDescriptor(AwsBatchBackendConfig, AwsBatchGlobalConfig) - val NoDefaultsConfigurationDescriptor = BackendConfigurationDescriptor(AwsBatchBackendNoDefaultConfig, AwsBatchGlobalConfig) -} \ No newline at end of file + val AwsBatchBackendConfigurationDescriptor = + BackendConfigurationDescriptor(AwsBatchBackendConfig, AwsBatchGlobalConfig) + val NoDefaultsConfigurationDescriptor = + BackendConfigurationDescriptor(AwsBatchBackendNoDefaultConfig, AwsBatchGlobalConfig) +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/GcpBatchBackendLifecycleActorFactory.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/GcpBatchBackendLifecycleActorFactory.scala index 1bf708e8f8c..456fd822e7b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/GcpBatchBackendLifecycleActorFactory.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/GcpBatchBackendLifecycleActorFactory.scala @@ -3,29 +3,40 @@ package cromwell.backend.google.batch import akka.actor.{ActorRef, Props} import com.google.api.client.util.ExponentialBackOff import com.typesafe.scalalogging.StrictLogging -import cromwell.backend.google.batch.GcpBatchBackendLifecycleActorFactory.{preemptionCountKey, robustBuildAttributes, unexpectedRetryCountKey} +import cromwell.backend.google.batch.GcpBatchBackendLifecycleActorFactory.{ + preemptionCountKey, + robustBuildAttributes, + unexpectedRetryCountKey +} import cromwell.backend.google.batch.actors._ import cromwell.backend.google.batch.api.{GcpBatchApiRequestHandler, GcpBatchRequestFactoryImpl} import cromwell.backend.google.batch.models.{GcpBatchConfiguration, GcpBatchConfigurationAttributes} import cromwell.backend.google.batch.callcaching.{BatchBackendCacheHitCopyingActor, BatchBackendFileHashingActor} import cromwell.backend.standard._ import cromwell.backend.standard.callcaching.{StandardCacheHitCopyingActor, StandardFileHashingActor} -import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor, JobExecutionMap} +import cromwell.backend.{ + BackendConfigurationDescriptor, + BackendInitializationData, + BackendWorkflowDescriptor, + JobExecutionMap +} import cromwell.cloudsupport.gcp.GoogleConfiguration import cromwell.core.CallOutputs import wom.graph.CommandCallNode import scala.util.{Failure, Try} -class GcpBatchBackendLifecycleActorFactory(override val name: String, override val configurationDescriptor: BackendConfigurationDescriptor) - extends StandardLifecycleActorFactory { +class GcpBatchBackendLifecycleActorFactory(override val name: String, + override val configurationDescriptor: BackendConfigurationDescriptor +) extends StandardLifecycleActorFactory { override val requestedKeyValueStoreKeys: Seq[String] = Seq(preemptionCountKey, unexpectedRetryCountKey) override def jobIdKey: String = "__gcp_batch" protected val googleConfig: GoogleConfiguration = GoogleConfiguration(configurationDescriptor.globalConfig) - override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = classOf[GcpBatchInitializationActor] + override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = + classOf[GcpBatchInitializationActor] override def asyncExecutionActorClass: Class[_ <: StandardAsyncExecutionActor] = classOf[GcpBatchAsyncBackendJobExecutionActor] @@ -33,47 +44,58 @@ class GcpBatchBackendLifecycleActorFactory(override val name: String, override v override lazy val finalizationActorClassOption: Option[Class[_ <: StandardFinalizationActor]] = Option(classOf[GcpBatchFinalizationActor]) - - protected val batchAttributes: GcpBatchConfigurationAttributes = { - def defaultBuildAttributes()= + def defaultBuildAttributes() = GcpBatchConfigurationAttributes(googleConfig, configurationDescriptor.backendConfig, "batchConfig") robustBuildAttributes(defaultBuildAttributes) } val batchConfiguration = new GcpBatchConfiguration(configurationDescriptor, googleConfig, batchAttributes) - override def workflowInitializationActorParams( - - workflowDescriptor: BackendWorkflowDescriptor, - ioActor: ActorRef, - calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, - restart: Boolean): StandardInitializationActorParams = { - GcpBatchInitializationActorParams(workflowDescriptor, ioActor , calls, batchConfiguration, serviceRegistryActor, restart) - } - - override def workflowFinalizationActorParams( - workflowDescriptor: BackendWorkflowDescriptor, - ioActor: ActorRef, - //batchConfiguration: GcpBatchConfiguration, - calls: Set[CommandCallNode], - jobExecutionMap: JobExecutionMap, - workflowOutputs: CallOutputs, - initializationDataOption: Option[BackendInitializationData]): StandardFinalizationActorParams = { - GcpBatchFinalizationActorParams(workflowDescriptor, ioActor, batchConfiguration, calls, jobExecutionMap, workflowOutputs, initializationDataOption) - } - - override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = { + override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restart: Boolean + ): StandardInitializationActorParams = + GcpBatchInitializationActorParams(workflowDescriptor, + ioActor, + calls, + batchConfiguration, + serviceRegistryActor, + restart + ) + + override def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + // batchConfiguration: GcpBatchConfiguration, + calls: Set[CommandCallNode], + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs, + initializationDataOption: Option[BackendInitializationData] + ): StandardFinalizationActorParams = + GcpBatchFinalizationActorParams(workflowDescriptor, + ioActor, + batchConfiguration, + calls, + jobExecutionMap, + workflowOutputs, + initializationDataOption + ) + + override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = Option(classOf[BatchBackendCacheHitCopyingActor]) - } - override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option(classOf[BatchBackendFileHashingActor]) + override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option( + classOf[BatchBackendFileHashingActor] + ) override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = { val requestHandler = new GcpBatchApiRequestHandler val requestFactory = new GcpBatchRequestFactoryImpl()(batchConfiguration.batchAttributes.gcsTransferConfiguration) - Option(GcpBatchBackendSingletonActor.props(requestFactory, serviceRegistryActor = serviceRegistryActor)(requestHandler)) + Option( + GcpBatchBackendSingletonActor.props(requestFactory, serviceRegistryActor = serviceRegistryActor)(requestHandler) + ) } } @@ -81,13 +103,13 @@ object GcpBatchBackendLifecycleActorFactory extends StrictLogging { val preemptionCountKey = "PreemptionCount" val unexpectedRetryCountKey = "UnexpectedRetryCount" - - private [batch] def robustBuildAttributes(buildAttributes: () => GcpBatchConfigurationAttributes, - maxAttempts: Int = 3, - initialIntervalMillis: Int = 5000, - maxIntervalMillis: Int = 10000, - multiplier: Double = 1.5, - randomizationFactor: Double = 0.5): GcpBatchConfigurationAttributes = { + private[batch] def robustBuildAttributes(buildAttributes: () => GcpBatchConfigurationAttributes, + maxAttempts: Int = 3, + initialIntervalMillis: Int = 5000, + maxIntervalMillis: Int = 10000, + multiplier: Double = 1.5, + randomizationFactor: Double = 0.5 + ): GcpBatchConfigurationAttributes = { val backoff = new ExponentialBackOff.Builder() .setInitialIntervalMillis(initialIntervalMillis) .setMaxIntervalMillis(maxIntervalMillis) @@ -96,7 +118,7 @@ object GcpBatchBackendLifecycleActorFactory extends StrictLogging { .build() // `attempt` is 1-based - def build(attempt: Int): Try[GcpBatchConfigurationAttributes] = { + def build(attempt: Int): Try[GcpBatchConfigurationAttributes] = Try { buildAttributes() } recoverWith { @@ -106,9 +128,13 @@ object GcpBatchBackendLifecycleActorFactory extends StrictLogging { .warn(s"Failed to build GcpBatchConfigurationAttributes on attempt $attempt of $maxAttempts, retrying.", ex) Thread.sleep(backoff.nextBackOffMillis()) build(attempt + 1) - case e => Failure(new RuntimeException(s"Failed to build GcpBatchConfigurationAttributes on attempt $attempt of $maxAttempts", e)) + case e => + Failure( + new RuntimeException(s"Failed to build GcpBatchConfigurationAttributes on attempt $attempt of $maxAttempts", + e + ) + ) } - } // This intentionally throws if the final result of `build` is a `Failure`. build(attempt = 1).get } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiAbortClient.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiAbortClient.scala index f025ae05282..892980441bf 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiAbortClient.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiAbortClient.scala @@ -6,9 +6,8 @@ import cromwell.backend.google.batch.monitoring.BatchInstrumentation trait BatchApiAbortClient { this: Actor with ActorLogging with BatchInstrumentation => - def abortJob(jobName: JobName, backendSingletonActor: ActorRef): Unit = { + def abortJob(jobName: JobName, backendSingletonActor: ActorRef): Unit = backendSingletonActor ! GcpBatchBackendSingletonActor.Action.AbortJob(jobName) - } def abortActorClientReceive: Actor.Receive = { case GcpBatchBackendSingletonActor.Event.JobAbortRequestSent(job) => diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiFetchJobClient.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiFetchJobClient.scala index e502bb18c2e..b18bdc23f4e 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiFetchJobClient.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiFetchJobClient.scala @@ -33,7 +33,7 @@ trait BatchApiFetchJobClient { this: Actor with ActorLogging with BatchInstrumen pollingActorClientPromise = None } - def fetchJob(jobName: JobName, backendSingletonActor: ActorRef): Future[Job] = { + def fetchJob(jobName: JobName, backendSingletonActor: ActorRef): Future[Job] = pollingActorClientPromise match { case Some(p) => p.future case None => @@ -43,5 +43,4 @@ trait BatchApiFetchJobClient { this: Actor with ActorLogging with BatchInstrumen pollingActorClientPromise = Option(newPromise) newPromise.future } - } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiRunCreationClient.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiRunCreationClient.scala index 0763369dc50..0825f865f77 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiRunCreationClient.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/BatchApiRunCreationClient.scala @@ -35,7 +35,7 @@ trait BatchApiRunCreationClient { this: Actor with ActorLogging with BatchInstru runCreationClientPromise = None } - def runBatchJob(request: GcpBatchRequest, backendSingletonActor: ActorRef): Future[StandardAsyncJob] = { + def runBatchJob(request: GcpBatchRequest, backendSingletonActor: ActorRef): Future[StandardAsyncJob] = runCreationClientPromise match { case Some(p) => p.future @@ -46,5 +46,4 @@ trait BatchApiRunCreationClient { this: Actor with ActorLogging with BatchInstru runCreationClientPromise = Option(newPromise) newPromise.future } - } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala index b3f9d47d316..eba54bdc4c3 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala @@ -25,7 +25,13 @@ import cromwell.filesystems.gcs.GcsPathBuilder import cromwell.filesystems.gcs.GcsPathBuilder.ValidFullGcsPath import java.io.FileNotFoundException -import cromwell.backend.standard.{ScriptPreambleData, StandardAdHocValue, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.backend.standard.{ + ScriptPreambleData, + StandardAdHocValue, + StandardAsyncExecutionActor, + StandardAsyncExecutionActorParams, + StandardAsyncJob +} import cromwell.core._ import cromwell.core.io.IoCommandBuilder import cromwell.core.path.{DefaultPathBuilder, Path} @@ -84,7 +90,9 @@ object GcpBatchAsyncBackendJobExecutionActor { val plainTextContentType: Option[ContentType.WithCharset] = Option(ContentTypes.`text/plain(UTF-8)`) - private[batch] def groupParametersByGcsBucket[T <: BatchParameter](parameters: List[T]): Map[String, NonEmptyList[T]] = { + private[batch] def groupParametersByGcsBucket[T <: BatchParameter]( + parameters: List[T] + ): Map[String, NonEmptyList[T]] = parameters.map { param => def pathTypeString = if (param.isFileParameter) "File" else "Directory" val regexToUse = if (param.isFileParameter) gcsFilePathMatcher else gcsDirectoryPathMatcher @@ -93,16 +101,17 @@ object GcpBatchAsyncBackendJobExecutionActor { case regexToUse(bucket) => Map(bucket -> NonEmptyList.of(param)) case regexToUse(bucket, _) => Map(bucket -> NonEmptyList.of(param)) case other => - throw new Exception(s"$pathTypeString path '$other' did not match the expected regex: ${regexToUse.pattern.toString}") with NoStackTrace + throw new Exception( + s"$pathTypeString path '$other' did not match the expected regex: ${regexToUse.pattern.toString}" + ) with NoStackTrace } } combineAll - } private[batch] def generateDrsLocalizerManifest(inputs: List[GcpBatchInput]): String = { val outputStream = new ByteArrayOutputStream() val csvPrinter = new CSVPrinter(new OutputStreamWriter(outputStream), CSVFormat.DEFAULT) - val drsFileInputs = inputs collect { - case drsInput@GcpBatchFileInput(_, drsPath: DrsPath, _, _) => (drsInput, drsPath) + val drsFileInputs = inputs collect { case drsInput @ GcpBatchFileInput(_, drsPath: DrsPath, _, _) => + (drsInput, drsPath) } drsFileInputs foreach { case (drsInput, drsPath) => csvPrinter.printRecord(drsPath.pathAsString, drsInput.containerPath.pathAsString) @@ -114,7 +123,7 @@ object GcpBatchAsyncBackendJobExecutionActor { } class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackendJobLifecycleActor + extends BackendJobLifecycleActor with StandardAsyncExecutionActor with BatchApiRunCreationClient with BatchApiFetchJobClient @@ -137,7 +146,8 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar /** The type of the run status returned during each poll. */ override type StandardAsyncRunState = RunStatus - override def receive: Receive = runCreationClientReceive orElse pollingActorClientReceive orElse abortActorClientReceive orElse kvClientReceive orElse super.receive + override def receive: Receive = + runCreationClientReceive orElse pollingActorClientReceive orElse abortActorClientReceive orElse kvClientReceive orElse super.receive /** Should return true if the status contained in `thiz` is equivalent to `that`, delta any other data that might be carried around * in the state type. @@ -145,17 +155,22 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar def statusEquivalentTo(thiz: StandardAsyncRunState)(that: StandardAsyncRunState): Boolean = thiz == that protected lazy val cmdInput: GcpBatchFileInput = - GcpBatchFileInput(GcpBatchJobPaths.BatchExecParamName, gcpBatchCallPaths.script, DefaultPathBuilder.get(gcpBatchCallPaths.scriptFilename), workingDisk) + GcpBatchFileInput(GcpBatchJobPaths.BatchExecParamName, + gcpBatchCallPaths.script, + DefaultPathBuilder.get(gcpBatchCallPaths.scriptFilename), + workingDisk + ) private lazy val jobDockerImage = jobDescriptor.maybeCallCachingEligible.dockerHash .getOrElse(runtimeAttributes.dockerImage) override def dockerImageUsed: Option[String] = Option(jobDockerImage) - //noinspection ActorMutableStateInspection + // noinspection ActorMutableStateInspection // Need to add previousRetryReasons and preemptible in order to get preemptible to work in the tests - protected val previousRetryReasons: ErrorOr[PreviousRetryReasons] = PreviousRetryReasons.tryApply(jobDescriptor.prefetchedKvStoreEntries, jobDescriptor.key.attempt) + protected val previousRetryReasons: ErrorOr[PreviousRetryReasons] = + PreviousRetryReasons.tryApply(jobDescriptor.prefetchedKvStoreEntries, jobDescriptor.key.attempt) lazy val preemptible: Boolean = previousRetryReasons match { case Valid(PreviousRetryReasons(p, _)) => p < maxPreemption @@ -167,32 +182,56 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar val backendSingletonActor: ActorRef = standardParams.backendSingletonActorOption .getOrElse(throw new RuntimeException("GCP Batch actor cannot exist without its backend singleton 2")) - /** * Takes two arrays of remote and local WOM File paths and generates the necessary `GcpBatchInput`s. */ protected def gcpBatchInputsFromWomFiles(inputName: String, remotePathArray: Seq[WomFile], localPathArray: Seq[WomFile], - jobDescriptor: BackendJobDescriptor): Iterable[GcpBatchInput] = { - (remotePathArray zip localPathArray) flatMap { - case (remotePath: WomMaybeListedDirectory, localPath) => - maybeListedDirectoryToBatchParameters(inputName, remotePath, localPath.valueString) - case (remotePath: WomUnlistedDirectory, localPath) => - Seq(GcpBatchDirectoryInput(inputName, getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) - case (remotePath: WomMaybePopulatedFile, localPath) => - maybePopulatedFileToBatchParameters(inputName, remotePath, localPath.valueString) - case (remotePath, localPath) => - Seq(GcpBatchFileInput(inputName, getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) - } - } + jobDescriptor: BackendJobDescriptor + ): Iterable[GcpBatchInput] = + (remotePathArray zip localPathArray) flatMap { + case (remotePath: WomMaybeListedDirectory, localPath) => + maybeListedDirectoryToBatchParameters(inputName, remotePath, localPath.valueString) + case (remotePath: WomUnlistedDirectory, localPath) => + Seq( + GcpBatchDirectoryInput(inputName, + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) + case (remotePath: WomMaybePopulatedFile, localPath) => + maybePopulatedFileToBatchParameters(inputName, remotePath, localPath.valueString) + case (remotePath, localPath) => + Seq( + GcpBatchFileInput(inputName, + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) + } - private def maybePopulatedFileToBatchParameters(inputName: String, maybePopulatedFile: WomMaybePopulatedFile, localPath: String) = { - val secondaryFiles = maybePopulatedFile.secondaryFiles.flatMap({ secondaryFile => - gcpBatchInputsFromWomFiles(secondaryFile.valueString, List(secondaryFile), List(relativeLocalizationPath(secondaryFile)), jobDescriptor) - }) + private def maybePopulatedFileToBatchParameters(inputName: String, + maybePopulatedFile: WomMaybePopulatedFile, + localPath: String + ) = { + val secondaryFiles = maybePopulatedFile.secondaryFiles.flatMap { secondaryFile => + gcpBatchInputsFromWomFiles(secondaryFile.valueString, + List(secondaryFile), + List(relativeLocalizationPath(secondaryFile)), + jobDescriptor + ) + } - Seq(GcpBatchFileInput(inputName, getPath(maybePopulatedFile.valueString).get, DefaultPathBuilder.get(localPath), workingDisk)) ++ secondaryFiles + Seq( + GcpBatchFileInput(inputName, + getPath(maybePopulatedFile.valueString).get, + DefaultPathBuilder.get(localPath), + workingDisk + ) + ) ++ secondaryFiles } /** @@ -201,7 +240,7 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar * relativeLocalizationPath("foo/bar.txt") -> "foo/bar.txt" * relativeLocalizationPath("gs://some/bucket/foo.txt") -> "some/bucket/foo.txt" */ - override protected def relativeLocalizationPath(file: WomFile): WomFile = { + override protected def relativeLocalizationPath(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { case Success(drsPath: DrsPath) => DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() @@ -209,21 +248,21 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _ => value } ) - } lazy val localMonitoringImageScriptPath: Path = DefaultPathBuilder.get(gcpBatchCallPaths.batchMonitoringImageScriptFilename) - override protected def fileName(file: WomFile): WomFile = { + override protected def fileName(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { - case Success(drsPath: DrsPath) => DefaultPathBuilder - .get(DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync()).name + case Success(drsPath: DrsPath) => + DefaultPathBuilder + .get(DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync()) + .name case Success(path) => path.name case _ => value } ) - } override lazy val inputsToNotLocalize: Set[WomFile] = { val localizeOptional = jobDescriptor.findInputFilesByParameterMeta { @@ -252,21 +291,20 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar } // The original implementation recursively finds all non directory files, in V2 we can keep directory as is - protected lazy val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = { - + protected lazy val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = // NOTE: This causes the tests to fail - jobDescriptor.localInputs map { - case (key, womFile) => - key -> womFile.collectAsSeq({ - case womFile: WomFile if !inputsToNotLocalize.contains(womFile) => womFile - }) + jobDescriptor.localInputs map { case (key, womFile) => + key -> womFile.collectAsSeq { + case womFile: WomFile if !inputsToNotLocalize.contains(womFile) => womFile } - } + } private lazy val gcsTransferLibrary = Source.fromInputStream(Thread.currentThread.getContextClassLoader.getResourceAsStream("gcs_transfer.sh")).mkString - private def gcsLocalizationTransferBundle[T <: GcpBatchInput](gcsTransferConfiguration: GcsTransferConfiguration)(bucket: String, inputs: NonEmptyList[T]): String = { + private def gcsLocalizationTransferBundle[T <: GcpBatchInput]( + gcsTransferConfiguration: GcsTransferConfiguration + )(bucket: String, inputs: NonEmptyList[T]): String = { val project = inputs.head.cloudPath.asInstanceOf[GcsPath].projectId val maxAttempts = gcsTransferConfiguration.transferAttempts @@ -285,25 +323,27 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // Deduplicate any inputs since parallel localization can't deal with this. val uniqueFilesByContainerParentDirectory = filesByContainerParentDirectory map { case (p, fs) => p -> fs.toSet } - val filesWithSameNamesTransferBundles: List[String] = uniqueFilesByContainerParentDirectory.toList map { case (containerParent, filesWithSameParent) => - val arrayIdentifier = s"files_to_localize_" + DigestUtils.md5Hex(bucket + containerParent) - val entries = filesWithSameParent.map(_.cloudPath) mkString("\"", "\"\n| \"", "\"") + val filesWithSameNamesTransferBundles: List[String] = uniqueFilesByContainerParentDirectory.toList map { + case (containerParent, filesWithSameParent) => + val arrayIdentifier = s"files_to_localize_" + DigestUtils.md5Hex(bucket + containerParent) + val entries = filesWithSameParent.map(_.cloudPath) mkString ("\"", "\"\n| \"", "\"") - s""" - |# Localize files from source bucket '$bucket' to container parent directory '$containerParent'. - |$arrayIdentifier=( - | "$project" # project to use if requester pays - | "$maxAttempts" # max transfer attempts - | "${containerParent.ensureSlashed}" # container parent directory - | $entries - |) - | - |localize_files "$${$arrayIdentifier[@]}" + s""" + |# Localize files from source bucket '$bucket' to container parent directory '$containerParent'. + |$arrayIdentifier=( + | "$project" # project to use if requester pays + | "$maxAttempts" # max transfer attempts + | "${containerParent.ensureSlashed}" # container parent directory + | $entries + |) + | + |localize_files "$${$arrayIdentifier[@]}" """.stripMargin } val filesWithDifferentNamesTransferBundles = filesWithDifferentNames map { f => - val arrayIdentifier = s"singleton_file_to_localize_" + DigestUtils.md5Hex(f.cloudPath.pathAsString + f.containerPath.pathAsString) + val arrayIdentifier = + s"singleton_file_to_localize_" + DigestUtils.md5Hex(f.cloudPath.pathAsString + f.containerPath.pathAsString) s""" |# Localize singleton file '${f.cloudPath.pathAsString}' to '${f.containerPath.pathAsString}'. |$arrayIdentifier=( @@ -319,27 +359,31 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // Only write a transfer bundle for directories if there are directories to be localized. Emptiness isn't a concern // for files since there is always at least the command script to be localized. - val directoryTransferBundle = if (directories.isEmpty) "" else { - val entries = directories flatMap { i => List(i.cloudPath, i.containerPath) } mkString("\"", "\"\n| \"", "\"") + val directoryTransferBundle = + if (directories.isEmpty) "" + else { + val entries = directories flatMap { i => List(i.cloudPath, i.containerPath) } mkString ("\"", "\"\n| \"", "\"") - val arrayIdentifier = s"directories_to_localize_" + DigestUtils.md5Hex(bucket) + val arrayIdentifier = s"directories_to_localize_" + DigestUtils.md5Hex(bucket) - s""" - |# Directories from source bucket '$bucket'. - |$arrayIdentifier=( - | "$project" # project to use if requester pays - | "$maxAttempts" # max transfer attempts - | $entries - |) - | - |localize_directories "$${$arrayIdentifier[@]}" + s""" + |# Directories from source bucket '$bucket'. + |$arrayIdentifier=( + | "$project" # project to use if requester pays + | "$maxAttempts" # max transfer attempts + | $entries + |) + | + |localize_directories "$${$arrayIdentifier[@]}" """.stripMargin - } + } (directoryTransferBundle :: (filesWithSameNamesTransferBundles ++ filesWithDifferentNamesTransferBundles)) mkString "\n\n" } - private def gcsDelocalizationTransferBundle[T <: GcpBatchOutput](transferConfiguration: GcsTransferConfiguration)(bucket: String, outputs: NonEmptyList[T]): String = { + private def gcsDelocalizationTransferBundle[T <: GcpBatchOutput]( + transferConfiguration: GcsTransferConfiguration + )(bucket: String, outputs: NonEmptyList[T]): String = { val project = outputs.head.cloudPath.asInstanceOf[GcsPath].projectId val maxAttempts = transferConfiguration.transferAttempts @@ -350,14 +394,16 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _: GcpBatchDirectoryOutput => "directory" // a primary directory } - val optional = Option(output) collectFirst { case o: GcpBatchFileOutput if o.secondary || o.optional => "optional" } getOrElse "required" + val optional = Option(output) collectFirst { + case o: GcpBatchFileOutput if o.secondary || o.optional => "optional" + } getOrElse "required" val contentType = output.contentType.map(_.toString).getOrElse("") List(kind, output.cloudPath.toString, output.containerPath.toString, optional, contentType) - } mkString("\"", "\"\n| \"", "\"") + } mkString ("\"", "\"\n| \"", "\"") - val parallelCompositeUploadThreshold = jobDescriptor.workflowDescriptor.workflowOptions.getOrElse( - "parallel_composite_upload_threshold", transferConfiguration.parallelCompositeUploadThreshold) + val parallelCompositeUploadThreshold = jobDescriptor.workflowDescriptor.workflowOptions + .getOrElse("parallel_composite_upload_threshold", transferConfiguration.parallelCompositeUploadThreshold) // Use a digest as bucket names can contain characters that are not legal in bash identifiers. val arrayIdentifier = s"delocalize_" + DigestUtils.md5Hex(bucket) @@ -374,13 +420,12 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar """.stripMargin } - private def bracketTransfersWithMessages(activity: String)(transferBody: String): String = { + private def bracketTransfersWithMessages(activity: String)(transferBody: String): String = List( s"timestamped_message '$activity script execution started...'", transferBody, s"timestamped_message '$activity script execution complete.'" ) mkString "\n" - } def uploadDrsLocalizationManifest(createParameters: CreateBatchJobParameters, cloudPath: Path): Future[Unit] = { val content = generateDrsLocalizerManifest(createParameters.inputOutputParameters.fileInputParameters) @@ -390,21 +435,24 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar Future.unit } - def uploadScriptFile(): Future[Unit] = { + def uploadScriptFile(): Future[Unit] = commandScriptContents .fold( - errors => Future - .failed(new RuntimeException(errors - .toList - .mkString(", "))), + errors => + Future + .failed( + new RuntimeException( + errors.toList + .mkString(", ") + ) + ), asyncIo - .writeAsync(jobPaths - .script, _, Seq(CloudStorageOptions.withMimeType("text/plain"))) + .writeAsync(jobPaths.script, _, Seq(CloudStorageOptions.withMimeType("text/plain"))) ) - } def sendGoogleLabelsToMetadata(customLabels: Seq[GcpLabel]): Unit = { - lazy val backendLabelEvents: Map[String, String] = ((backendLabels ++ customLabels) map { l => s"${CallMetadataKeys.BackendLabels}:${l.key}" -> l.value }).toMap + lazy val backendLabelEvents: Map[String, String] = + ((backendLabels ++ customLabels) map { l => s"${CallMetadataKeys.BackendLabels}:${l.key}" -> l.value }).toMap tellMetadata(backendLabelEvents) } @@ -415,34 +463,37 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case Failure(OptionNotFoundException(_)) => false case Failure(f) => // Should not happen, this case should have been screened for and fast-failed during workflow materialization. - log.error(f, s"Programmer error: unexpected failure attempting to read value for workflow option '$optionName' as a Boolean") + log.error( + f, + s"Programmer error: unexpected failure attempting to read value for workflow option '$optionName' as a Boolean" + ) false } } - - def getReferenceInputsToMountedPathsOpt(createParameters: CreateBatchJobParameters): Option[Map[GcpBatchInput, String]] = { + def getReferenceInputsToMountedPathsOpt( + createParameters: CreateBatchJobParameters + ): Option[Map[GcpBatchInput, String]] = if (useReferenceDisks) { - batchAttributes - .referenceFileToDiskImageMappingOpt + batchAttributes.referenceFileToDiskImageMappingOpt .map(getReferenceInputsToMountedPathMappings(_, createParameters.inputOutputParameters.fileInputParameters)) } else { None } - } private def generateGcsLocalizationScript(inputs: List[GcpBatchInput], - referenceInputsToMountedPathsOpt: Option[Map[GcpBatchInput, String]]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + referenceInputsToMountedPathsOpt: Option[Map[GcpBatchInput, String]] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { // Generate a mapping of reference inputs to their mounted paths and a section of the localization script to // "faux localize" these reference inputs with symlinks to their locations on mounted reference disks. import cromwell.backend.google.batch.runnable.RunnableUtils.shellEscaped val referenceFilesLocalizationScript = { val symlinkCreationCommandsOpt = referenceInputsToMountedPathsOpt map { referenceInputsToMountedPaths => - referenceInputsToMountedPaths map { - case (input, absolutePathOnRefDisk) => - s"mkdir -p ${shellEscaped(input.containerPath.parent.pathAsString)} && ln -s ${shellEscaped(absolutePathOnRefDisk)} ${shellEscaped(input.containerPath.pathAsString)}" + referenceInputsToMountedPaths map { case (input, absolutePathOnRefDisk) => + s"mkdir -p ${shellEscaped(input.containerPath.parent.pathAsString)} && ln -s ${shellEscaped( + absolutePathOnRefDisk + )} ${shellEscaped(input.containerPath.pathAsString)}" } } @@ -464,9 +515,9 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar } val regularFilesLocalizationScript = { - val regularFiles = referenceInputsToMountedPathsOpt.map(maybeReferenceInputsToMountedPaths => - inputs diff maybeReferenceInputsToMountedPaths.keySet.toList - ).getOrElse(inputs) + val regularFiles = referenceInputsToMountedPathsOpt + .map(maybeReferenceInputsToMountedPaths => inputs diff maybeReferenceInputsToMountedPaths.keySet.toList) + .getOrElse(inputs) if (regularFiles.nonEmpty) { val bundleFunction = (gcsLocalizationTransferBundle(gcsTransferConfiguration) _).tupled generateGcsTransferScript(regularFiles, bundleFunction) @@ -485,12 +536,16 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar combinedLocalizationScript |> bracketTransfersWithMessages("Localization") } - private def generateGcsDelocalizationScript(outputs: List[GcpBatchOutput])(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + private def generateGcsDelocalizationScript( + outputs: List[GcpBatchOutput] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { val bundleFunction = (gcsDelocalizationTransferBundle(gcsTransferConfiguration) _).tupled generateGcsTransferScript(outputs, bundleFunction) |> bracketTransfersWithMessages("Delocalization") } - private def generateGcsTransferScript[T <: BatchParameter](items: List[T], bundleFunction: ((String, NonEmptyList[T])) => String): String = { + private def generateGcsTransferScript[T <: BatchParameter](items: List[T], + bundleFunction: ((String, NonEmptyList[T])) => String + ): String = { val gcsItems = items collect { case i if i.cloudPath.isInstanceOf[GcsPath] => i } groupParametersByGcsBucket(gcsItems) map bundleFunction mkString "\n" } @@ -499,24 +554,35 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar cloudPath: Path, transferLibraryContainerPath: Path, gcsTransferConfiguration: GcsTransferConfiguration, - referenceInputsToMountedPathsOpt: Option[Map[GcpBatchInput, String]]): Future[Unit] = { - val content = generateGcsLocalizationScript(createParameters.inputOutputParameters.fileInputParameters, referenceInputsToMountedPathsOpt)(gcsTransferConfiguration) - asyncIo.writeAsync(cloudPath, s"source '$transferLibraryContainerPath'\n\n" + content, Seq(CloudStorageOptions.withMimeType("text/plain"))) + referenceInputsToMountedPathsOpt: Option[Map[GcpBatchInput, String]] + ): Future[Unit] = { + val content = generateGcsLocalizationScript(createParameters.inputOutputParameters.fileInputParameters, + referenceInputsToMountedPathsOpt + )(gcsTransferConfiguration) + asyncIo.writeAsync(cloudPath, + s"source '$transferLibraryContainerPath'\n\n" + content, + Seq(CloudStorageOptions.withMimeType("text/plain")) + ) } def uploadGcsDelocalizationScript(createParameters: CreateBatchJobParameters, cloudPath: Path, transferLibraryContainerPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = { - val content = generateGcsDelocalizationScript(createParameters.inputOutputParameters.fileOutputParameters)(gcsTransferConfiguration) - asyncIo.writeAsync(cloudPath, s"source '$transferLibraryContainerPath'\n\n" + content, Seq(CloudStorageOptions.withMimeType("text/plain"))) + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = { + val content = generateGcsDelocalizationScript(createParameters.inputOutputParameters.fileOutputParameters)( + gcsTransferConfiguration + ) + asyncIo.writeAsync(cloudPath, + s"source '$transferLibraryContainerPath'\n\n" + content, + Seq(CloudStorageOptions.withMimeType("text/plain")) + ) } - // TAG DISK private def createBatchParameters(inputOutputParameters: InputOutputParameters, - customLabels: Seq[GcpLabel], - ): CreateBatchJobParameters = { + customLabels: Seq[GcpLabel] + ): CreateBatchJobParameters = standardParams.backendInitializationDataOption match { case Some(data: GcpBackendInitializationData) => val dockerKeyAndToken: Option[CreateBatchDockerKeyAndToken] = for { @@ -525,36 +591,39 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar } yield CreateBatchDockerKeyAndToken(key, token) /* - * Right now this doesn't cost anything, because sizeOption returns the size if it was previously already fetched - * for some reason (expression evaluation for instance), but otherwise does not retrieve it and returns None. - * In CWL-land we tend to be aggressive in pre-fetching the size in order to be able to evaluate JS expressions, - * but less in WDL as we can get it last minute and on demand because size is a WDL function, whereas in CWL - * we don't inspect the JS to know if size is called and therefore always pre-fetch it. - * - * We could decide to call withSize before in which case we would retrieve the size for all files and have - * a guaranteed more accurate total size, but there might be performance impacts ? - */ + * Right now this doesn't cost anything, because sizeOption returns the size if it was previously already fetched + * for some reason (expression evaluation for instance), but otherwise does not retrieve it and returns None. + * In CWL-land we tend to be aggressive in pre-fetching the size in order to be able to evaluate JS expressions, + * but less in WDL as we can get it last minute and on demand because size is a WDL function, whereas in CWL + * we don't inspect the JS to know if size is called and therefore always pre-fetch it. + * + * We could decide to call withSize before in which case we would retrieve the size for all files and have + * a guaranteed more accurate total size, but there might be performance impacts ? + */ val inputFileSize = Option(callInputFiles.values.flatMap(_.flatMap(_.sizeOption)).sum) // Attempt to adjust the disk size by taking into account the size of input files - val adjustedSizeDisks = inputFileSize.map(size => MemorySize.apply(size.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB)) map { inputFileSizeInformation => - runtimeAttributes.disks.adjustWorkingDiskWithNewMin( - inputFileSizeInformation, - jobLogger.info(s"Adjusted working disk size to ${inputFileSizeInformation.amount} GB to account for input files") - ) - } getOrElse runtimeAttributes.disks + val adjustedSizeDisks = + inputFileSize.map(size => MemorySize.apply(size.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB)) map { + inputFileSizeInformation => + runtimeAttributes.disks.adjustWorkingDiskWithNewMin( + inputFileSizeInformation, + jobLogger.info( + s"Adjusted working disk size to ${inputFileSizeInformation.amount} GB to account for input files" + ) + ) + } getOrElse runtimeAttributes.disks val inputFilePaths = inputOutputParameters.jobInputParameters.map(_.cloudPath.pathAsString).toSet val referenceDisksToMount = batchAttributes.referenceFileToDiskImageMappingOpt.map(getReferenceDisksToMount(_, inputFilePaths)) - val dockerhubCredentials: (String, String) = { + val dockerhubCredentials: (String, String) = new String(Base64.getDecoder.decode(batchAttributes.dockerhubToken), "UTF-8").split(":", 2) match { case Array(username, password) => (username, password) case _ => ("", "") } - } val workflowOptions = workflowDescriptor.workflowOptions @@ -564,7 +633,7 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar workflowPaths = workflowPaths, commandDirectory = commandDirectory, workingDisk = workingDisk, - localMonitoringImageScriptPath = localMonitoringImageScriptPath, + localMonitoringImageScriptPath = localMonitoringImageScriptPath ) val checkpointingConfiguration = @@ -614,9 +683,10 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case None => throw new RuntimeException("No batch backend initialization data found?") } - } - protected def relativePathAndAttachedDisk(path: String, disks: Seq[GcpBatchAttachedDisk]): (Path, GcpBatchAttachedDisk) = { + protected def relativePathAndAttachedDisk(path: String, + disks: Seq[GcpBatchAttachedDisk] + ): (Path, GcpBatchAttachedDisk) = { val absolutePath = DefaultPathBuilder.get(path) match { case p if !p.isAbsolute => GcpBatchWorkingDisk.MountPoint.resolve(p) case p => p @@ -625,13 +695,14 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar disks.find(d => absolutePath.startsWith(d.mountPoint)) match { case Some(disk) => (disk.mountPoint.relativize(absolutePath), disk) case None => - throw new Exception(s"Absolute path $path doesn't appear to be under any mount points: ${disks.map(_.toString).mkString(", ")}") + throw new Exception( + s"Absolute path $path doesn't appear to be under any mount points: ${disks.map(_.toString).mkString(", ")}" + ) } } - protected def makeSafeReferenceName(referenceName: String): String = { + protected def makeSafeReferenceName(referenceName: String): String = if (referenceName.length <= 127) referenceName else referenceName.md5Sum - } // De-localize the glob directory as a GcpBatchDirectoryOutput instead of using * pattern match protected def generateGlobFileOutputs(womFile: WomGlobFile): List[GcpBatchOutput] = { @@ -646,9 +717,23 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // We need both the glob directory and the glob list: List( // The glob directory: - GcpBatchDirectoryOutput(makeSafeReferenceName(globDirectory), gcsGlobDirectoryDestinationPath, DefaultPathBuilder.get(globDirectory), globDirectoryDisk, optional = false, secondary = false), + GcpBatchDirectoryOutput( + makeSafeReferenceName(globDirectory), + gcsGlobDirectoryDestinationPath, + DefaultPathBuilder.get(globDirectory), + globDirectoryDisk, + optional = false, + secondary = false + ), // The glob list file: - GcpBatchFileOutput(makeSafeReferenceName(globListFile), gcsGlobListFileDestinationPath, DefaultPathBuilder.get(globListFile), globDirectoryDisk, optional = false, secondary = false) + GcpBatchFileOutput( + makeSafeReferenceName(globListFile), + gcsGlobListFileDestinationPath, + DefaultPathBuilder.get(globListFile), + globDirectoryDisk, + optional = false, + secondary = false + ) ) } @@ -656,23 +741,22 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar lazy val localMonitoringLogPath: Path = DefaultPathBuilder.get(gcpBatchCallPaths.batchMonitoringLogFilename) lazy val localMonitoringScriptPath: Path = DefaultPathBuilder.get(gcpBatchCallPaths.batchMonitoringScriptFilename) - lazy val monitoringScript: Option[GcpBatchFileInput] = { + lazy val monitoringScript: Option[GcpBatchFileInput] = gcpBatchCallPaths.workflowPaths.monitoringScriptPath map { path => GcpBatchFileInput(s"$batchMonitoringParamName-in", path, localMonitoringScriptPath, workingDisk) } - } - private val DockerMonitoringLogPath: Path = GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringLogFilename) - private val DockerMonitoringScriptPath: Path = GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringScriptFilename) + private val DockerMonitoringLogPath: Path = + GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringLogFilename) + private val DockerMonitoringScriptPath: Path = + GcpBatchWorkingDisk.MountPoint.resolve(gcpBatchCallPaths.batchMonitoringScriptFilename) - override def scriptPreamble: ErrorOr[ScriptPreambleData] = { + override def scriptPreamble: ErrorOr[ScriptPreambleData] = if (monitoringOutput.isDefined) - ScriptPreambleData( - s"""|touch $DockerMonitoringLogPath - |chmod u+x $DockerMonitoringScriptPath - |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin).valid + ScriptPreambleData(s"""|touch $DockerMonitoringLogPath + |chmod u+x $DockerMonitoringScriptPath + |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin).valid else ScriptPreambleData("").valid - } private[actors] def generateInputs(jobDescriptor: BackendJobDescriptor): Set[GcpBatchInput] = { // We need to tell PAPI about files that were created as part of command instantiation (these need to be defined @@ -680,38 +764,57 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // md5's of their paths. val writeFunctionFiles = instantiatedCommand.createdFiles map { f => f.file.value.md5SumShort -> List(f) } toMap - val writeFunctionInputs = writeFunctionFiles flatMap { - case (name, files) => gcpBatchInputsFromWomFiles(name, files.map(_.file), files.map(localizationPath), jobDescriptor) + val writeFunctionInputs = writeFunctionFiles flatMap { case (name, files) => + gcpBatchInputsFromWomFiles(name, files.map(_.file), files.map(localizationPath), jobDescriptor) } - val callInputInputs = callInputFiles flatMap { - case (name, files) => gcpBatchInputsFromWomFiles(name, files, files.map(relativeLocalizationPath), jobDescriptor) + val callInputInputs = callInputFiles flatMap { case (name, files) => + gcpBatchInputsFromWomFiles(name, files, files.map(relativeLocalizationPath), jobDescriptor) } (writeFunctionInputs ++ callInputInputs).toSet } // Simply create a GcpBatchDirectoryOutput instead of globbing - protected def generateUnlistedDirectoryOutputs(unlistedDirectory: WomUnlistedDirectory, fileEvaluation: FileEvaluation): List[GcpBatchOutput] = { + protected def generateUnlistedDirectoryOutputs(unlistedDirectory: WomUnlistedDirectory, + fileEvaluation: FileEvaluation + ): List[GcpBatchOutput] = { val destination = callRootPath.resolve(unlistedDirectory.value.stripPrefix("/")) val (relpath, disk) = relativePathAndAttachedDisk(unlistedDirectory.value, runtimeAttributes.disks) - val directoryOutput = GcpBatchDirectoryOutput(makeSafeReferenceName(unlistedDirectory.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val directoryOutput = GcpBatchDirectoryOutput(makeSafeReferenceName(unlistedDirectory.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(directoryOutput) } - private def maybeListedDirectoryToBatchParameters(inputName: String, womMaybeListedDirectory: WomMaybeListedDirectory, localPath: String) = womMaybeListedDirectory match { + private def maybeListedDirectoryToBatchParameters(inputName: String, + womMaybeListedDirectory: WomMaybeListedDirectory, + localPath: String + ) = womMaybeListedDirectory match { // If there is a path, simply localize as a directory case WomMaybeListedDirectory(Some(path), _, _, _) => List(GcpBatchDirectoryInput(inputName, getPath(path).get, DefaultPathBuilder.get(localPath), workingDisk)) // If there is a listing, recurse and call gcpBatchInputsFromWomFiles on all the listed files case WomMaybeListedDirectory(_, Some(listing), _, _) if listing.nonEmpty => - listing.flatMap({ + listing.flatMap { case womFile: WomFile if isAdHocFile(womFile) => - gcpBatchInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), List(womFile), List(fileName(womFile)), jobDescriptor) + gcpBatchInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), + List(womFile), + List(fileName(womFile)), + jobDescriptor + ) case womFile: WomFile => - gcpBatchInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), List(womFile), List(relativeLocalizationPath(womFile)), jobDescriptor) - }) + gcpBatchInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), + List(womFile), + List(relativeLocalizationPath(womFile)), + jobDescriptor + ) + } case _ => List.empty } @@ -719,58 +822,69 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar val (relpath, disk) = relativePathAndAttachedDisk(womFile.value, runtimeAttributes.disks) // If the file is on a custom mount point, resolve it so that the full mount path will show up in the cloud path // For the default one (cromwell_root), the expectation is that it does not appear - val mountedPath = if (!disk.mountPoint.isSamePathAs(GcpBatchWorkingDisk.Default.mountPoint)) disk.mountPoint.resolve(relpath) else relpath + val mountedPath = + if (!disk.mountPoint.isSamePathAs(GcpBatchWorkingDisk.Default.mountPoint)) disk.mountPoint.resolve(relpath) + else relpath // Normalize the local path (to get rid of ".." and "."). Also strip any potential leading / so that it gets appended to the call root val normalizedPath = mountedPath.normalize().pathAsString.stripPrefix("/") val destination = callRootPath.resolve(normalizedPath) - val batchFileOutput = GcpBatchFileOutput(makeSafeReferenceName(womFile.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val batchFileOutput = GcpBatchFileOutput(makeSafeReferenceName(womFile.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(batchFileOutput) } private[actors] def generateOutputs(jobDescriptor: BackendJobDescriptor): Set[GcpBatchOutput] = { - def evaluateFiles(output: OutputDefinition): List[FileEvaluation] = { + def evaluateFiles(output: OutputDefinition): List[FileEvaluation] = Try( output.expression.evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType).map(_.toList) ).getOrElse(List.empty[FileEvaluation].validNel) .getOrElse(List.empty) - } - def relativeFileEvaluation(evaluation: FileEvaluation): FileEvaluation = { + def relativeFileEvaluation(evaluation: FileEvaluation): FileEvaluation = evaluation.copy(file = relativeLocalizationPath(evaluation.file)) - } val womFileOutputs = jobDescriptor.taskCall.callable.outputs.flatMap(evaluateFiles) map relativeFileEvaluation val outputs: Seq[GcpBatchOutput] = womFileOutputs.distinct flatMap { fileEvaluation => fileEvaluation.file.flattenFiles flatMap { - case unlistedDirectory: WomUnlistedDirectory => generateUnlistedDirectoryOutputs(unlistedDirectory, fileEvaluation) + case unlistedDirectory: WomUnlistedDirectory => + generateUnlistedDirectoryOutputs(unlistedDirectory, fileEvaluation) case singleFile: WomSingleFile => generateSingleFileOutputs(singleFile, fileEvaluation) case globFile: WomGlobFile => generateGlobFileOutputs(globFile) // Assumes optional = false for globs. } } - val additionalGlobOutput = jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(generateGlobFileOutputs).toSet + val additionalGlobOutput = + jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(generateGlobFileOutputs).toSet outputs.toSet ++ additionalGlobOutput } protected def uploadGcsTransferLibrary(createBatchParameters: CreateBatchJobParameters, cloudPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = { - + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = asyncIo.writeAsync(cloudPath, gcsTransferLibrary, Seq(CloudStorageOptions.withMimeType("text/plain"))) - } - lazy val monitoringOutput: Option[GcpBatchFileOutput] = monitoringScript map { _ => - GcpBatchFileOutput(s"$batchMonitoringParamName-out", - gcpBatchCallPaths.batchMonitoringLogPath, localMonitoringLogPath, workingDisk, optional = false, secondary = false, - contentType = plainTextContentType) + GcpBatchFileOutput( + s"$batchMonitoringParamName-out", + gcpBatchCallPaths.batchMonitoringLogPath, + localMonitoringLogPath, + workingDisk, + optional = false, + secondary = false, + contentType = plainTextContentType + ) } override lazy val commandDirectory: Path = GcpBatchWorkingDisk.MountPoint - // Primary entry point for cromwell to run GCP Batch job override def executeAsync(): Future[ExecutionHandle] = { @@ -778,8 +892,15 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar def evaluateRuntimeAttributes = Future.fromTry(Try(runtimeAttributes)) def generateInputOutputParameters: Future[InputOutputParameters] = Future.fromTry(Try { - val rcFileOutput = GcpBatchFileOutput(returnCodeFilename, returnCodeGcsPath, DefaultPathBuilder.get(returnCodeFilename), workingDisk, optional = false, secondary = false, - contentType = plainTextContentType) + val rcFileOutput = GcpBatchFileOutput( + returnCodeFilename, + returnCodeGcsPath, + DefaultPathBuilder.get(returnCodeFilename), + workingDisk, + optional = false, + secondary = false, + contentType = plainTextContentType + ) val memoryRetryRCFileOutput = GcpBatchFileOutput( memoryRetryRCFilename, @@ -799,8 +920,16 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar StandardStream("stdout", _.output), StandardStream("stderr", _.error) ) map { s => - GcpBatchFileOutput(s.name, returnCodeGcsPath.sibling(s.filename), DefaultPathBuilder.get(s.filename), - workingDisk, optional = false, secondary = false, uploadPeriod = batchAttributes.logFlushPeriod, contentType = plainTextContentType) + GcpBatchFileOutput( + s.name, + returnCodeGcsPath.sibling(s.filename), + DefaultPathBuilder.get(s.filename), + workingDisk, + optional = false, + secondary = false, + uploadPeriod = batchAttributes.logFlushPeriod, + contentType = plainTextContentType + ) } InputOutputParameters( @@ -820,13 +949,13 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar }) - val gcpBatchParameters = CreateGcpBatchParameters( jobDescriptor = jobDescriptor, runtimeAttributes = runtimeAttributes, batchAttributes = batchAttributes, projectId = batchAttributes.project, - region = batchAttributes.location) + region = batchAttributes.location + ) val runBatchResponse = for { _ <- evaluateRuntimeAttributes @@ -842,9 +971,18 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar _ <- uploadGcsTransferLibrary(createParameters, gcsTransferLibraryCloudPath, gcsTransferConfiguration) gcsLocalizationScriptCloudPath = jobPaths.callExecutionRoot / GcpBatchJobPaths.GcsLocalizationScriptName referenceInputsToMountedPathsOpt = getReferenceInputsToMountedPathsOpt(createParameters) - _ <- uploadGcsLocalizationScript(createParameters, gcsLocalizationScriptCloudPath, transferLibraryContainerPath, gcsTransferConfiguration, referenceInputsToMountedPathsOpt) + _ <- uploadGcsLocalizationScript(createParameters, + gcsLocalizationScriptCloudPath, + transferLibraryContainerPath, + gcsTransferConfiguration, + referenceInputsToMountedPathsOpt + ) gcsDelocalizationScriptCloudPath = jobPaths.callExecutionRoot / GcpBatchJobPaths.GcsDelocalizationScriptName - _ <- uploadGcsDelocalizationScript(createParameters, gcsDelocalizationScriptCloudPath, transferLibraryContainerPath, gcsTransferConfiguration) + _ <- uploadGcsDelocalizationScript(createParameters, + gcsDelocalizationScriptCloudPath, + transferLibraryContainerPath, + gcsTransferConfiguration + ) _ = createParameters.privateDockerKeyAndEncryptedToken.isDefined jobName = "job-" + java.util.UUID.randomUUID.toString request = GcpBatchRequest(workflowId, createParameters, jobName = jobName, gcpBatchParameters) @@ -856,34 +994,37 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar // TODO: Handle when the job gets aborted before it starts being processed runBatchResponse.map { runId => - PendingExecutionHandle( - jobDescriptor = jobDescriptor, - pendingJob = runId, - runInfo = Option(Run(runId)), - previousState = None) + PendingExecutionHandle(jobDescriptor = jobDescriptor, + pendingJob = runId, + runInfo = Option(Run(runId)), + previousState = None + ) } } override def reconnectAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = { log.info("reconnect async runs") // in for debugging remove later - val handle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState](jobDescriptor, jobId, Option(Run(jobId)), previousState = None) + val handle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState]( + jobDescriptor, + jobId, + Option(Run(jobId)), + previousState = None + ) Future.successful(handle) } - override lazy val pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(5 - .second, 5 - .minutes, 1.1) + override lazy val pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(5.second, 5.minutes, 1.1) - override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff( - initialInterval = 5 - .seconds, maxInterval = 20 - .seconds, multiplier = 1.1) + override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = + SimpleExponentialBackoff(initialInterval = 5.seconds, maxInterval = 20.seconds, multiplier = 1.1) - override lazy val runtimeEnvironment: RuntimeEnvironment = { - RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, GcpBatchWorkingDisk.MountPoint, GcpBatchWorkingDisk.MountPoint)(standardParams.minimumRuntimeSettings) - } + override lazy val runtimeEnvironment: RuntimeEnvironment = + RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, + GcpBatchWorkingDisk.MountPoint, + GcpBatchWorkingDisk.MountPoint + )(standardParams.minimumRuntimeSettings) - protected def sendIncrementMetricsForReferenceFiles(referenceInputFilesOpt: Option[Set[GcpBatchInput]]): Unit = { + protected def sendIncrementMetricsForReferenceFiles(referenceInputFilesOpt: Option[Set[GcpBatchInput]]): Unit = referenceInputFilesOpt match { case Some(referenceInputFiles) => referenceInputFiles.foreach { referenceInputFile => @@ -892,18 +1033,20 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _ => // do nothing - reference disks feature is either not configured in Cromwell or disabled in workflow options } - } protected def sendIncrementMetricsForDockerImageCache(dockerImageCacheDiskOpt: Option[String], dockerImageAsSpecifiedByUser: String, - isDockerImageCacheUsageRequested: Boolean): Unit = { + isDockerImageCacheUsageRequested: Boolean + ): Unit = (isDockerImageCacheUsageRequested, dockerImageCacheDiskOpt) match { - case (true, None) => increment(NonEmptyList("docker", List("image", "cache", "image_not_in_cache", dockerImageAsSpecifiedByUser))) - case (true, Some(_)) => increment(NonEmptyList("docker", List("image", "cache", "used_image_from_cache", dockerImageAsSpecifiedByUser))) - case (false, Some(_)) => increment(NonEmptyList("docker", List("image", "cache", "cached_image_not_used", dockerImageAsSpecifiedByUser))) + case (true, None) => + increment(NonEmptyList("docker", List("image", "cache", "image_not_in_cache", dockerImageAsSpecifiedByUser))) + case (true, Some(_)) => + increment(NonEmptyList("docker", List("image", "cache", "used_image_from_cache", dockerImageAsSpecifiedByUser))) + case (false, Some(_)) => + increment(NonEmptyList("docker", List("image", "cache", "cached_image_not_used", dockerImageAsSpecifiedByUser))) case _ => // docker image cache not requested and image is not in cache anyway - do nothing } - } override def pollStatusAsync(handle: GcpBatchPendingExecutionHandle): Future[RunStatus] = { // yes, we use the whole jobName as the id @@ -917,7 +1060,7 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar } yield RunStatus.fromJobStatus(job.getStatus.getState) } - override def isTerminal(runStatus: RunStatus): Boolean = { + override def isTerminal(runStatus: RunStatus): Boolean = runStatus match { case _: RunStatus.TerminalRunStatus => log.info(s"isTerminal match terminal run status with $runStatus") @@ -926,9 +1069,8 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar log.info(f"isTerminal match _ running with status $other") false } - } - override def isDone(runStatus: RunStatus): Boolean = { + override def isDone(runStatus: RunStatus): Boolean = runStatus match { case _: RunStatus.Succeeded => log.info("GCP batch job succeeded matched isDone") @@ -938,11 +1080,12 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar false case _ => log.info(s"did not match isDone: $runStatus") - throw new RuntimeException(s"Cromwell programmer blunder: isDone was called on an incomplete RunStatus ($runStatus).") + throw new RuntimeException( + s"Cromwell programmer blunder: isDone was called on an incomplete RunStatus ($runStatus)." + ) } - } - override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = { + override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = runStatus match { case t: RunStatus.TerminalRunStatus => log.warning(s"Tried to get terminal events on a terminal status without events: $runStatus") @@ -951,46 +1094,40 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case unknown => throw new RuntimeException(s"handleExecutionSuccess not called with RunStatus.Success. Instead got $unknown") } - } - override lazy val startMetadataKeyValues: Map[String, Any] = super[GcpBatchJobCachingActorHelper].startMetadataKeyValues + override lazy val startMetadataKeyValues: Map[String, Any] = + super[GcpBatchJobCachingActorHelper].startMetadataKeyValues // TODO: review sending machine and Instance type - override def getTerminalMetadata(runStatus: RunStatus): Map[String, Any] = { + override def getTerminalMetadata(runStatus: RunStatus): Map[String, Any] = runStatus match { case _: TerminalRunStatus => Map() case unknown => throw new RuntimeException(s"Attempt to get terminal metadata from non terminal status: $unknown") } - } - override def mapOutputWomFile(womFile: WomFile): WomFile = { + override def mapOutputWomFile(womFile: WomFile): WomFile = womFileToGcsPath(generateOutputs(jobDescriptor))(womFile) - } - override def globParentDirectory(womGlobFile: WomGlobFile): Path = { val (_, disk) = relativePathAndAttachedDisk(womGlobFile.value, runtimeAttributes.disks) disk.mountPoint } - - protected def googleProject(descriptor: BackendWorkflowDescriptor): String = { + protected def googleProject(descriptor: BackendWorkflowDescriptor): String = descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleProject, batchAttributes.project) - } - protected def computeServiceAccount(descriptor: BackendWorkflowDescriptor): String = { - descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleComputeServiceAccount, batchAttributes.computeServiceAccount) - } + protected def computeServiceAccount(descriptor: BackendWorkflowDescriptor): String = + descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleComputeServiceAccount, + batchAttributes.computeServiceAccount + ) - protected def fuseEnabled(descriptor: BackendWorkflowDescriptor): Boolean = { + protected def fuseEnabled(descriptor: BackendWorkflowDescriptor): Boolean = descriptor.workflowOptions.getBoolean(WorkflowOptionKeys.EnableFuse).toOption.getOrElse(batchAttributes.enableFuse) - } - protected def useDockerImageCache(descriptor: BackendWorkflowDescriptor): Boolean = { + protected def useDockerImageCache(descriptor: BackendWorkflowDescriptor): Boolean = descriptor.workflowOptions.getBoolean(WorkflowOptionKeys.UseDockerImageCache).getOrElse(false) - } - override def cloudResolveWomFile(womFile: WomFile): WomFile = { + override def cloudResolveWomFile(womFile: WomFile): WomFile = womFile.mapFile { value => getPath(value) match { case Success(drsPath: DrsPath) => DrsResolver.getSimpleGsUri(drsPath).unsafeRunSync().getOrElse(value) @@ -998,16 +1135,15 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _ => value } } - } - override def mapCommandLineWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineWomFile(womFile: WomFile): WomFile = womFile.mapFile { value => (getPath(value), asAdHocFile(womFile)) match { case (Success(gcsPath: GcsPath), Some(adHocFile)) => // Ad hoc files will be placed directly at the root ("/cromwell_root/ad_hoc_file.txt") unlike other input files // for which the full path is being propagated ("/cromwell_root/path/to/input_file.txt") workingDisk.mountPoint.resolve(adHocFile.alternativeName.getOrElse(gcsPath.name)).pathAsString - case (Success(path@(_: GcsPath | _: HttpPath)), _) => + case (Success(path @ (_: GcsPath | _: HttpPath)), _) => workingDisk.mountPoint.resolve(path.pathWithoutScheme).pathAsString case (Success(drsPath: DrsPath), _) => val filePath = DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() @@ -1017,9 +1153,8 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _ => value } } - } - override def mapCommandLineJobInputWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineJobInputWomFile(womFile: WomFile): WomFile = womFile.mapFile(value => getPath(value) match { case Success(gcsPath: GcsPath) => workingDisk.mountPoint.resolve(gcsPath.pathWithoutScheme).pathAsString @@ -1029,9 +1164,8 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _ => value } ) - } - def womFileToGcsPath(batchOutputs: Set[GcpBatchOutput])(womFile: WomFile): WomFile = { + def womFileToGcsPath(batchOutputs: Set[GcpBatchOutput])(womFile: WomFile): WomFile = womFile mapFile { path => batchOutputs collectFirst { case batchOutput if batchOutput.name == makeSafeReferenceName(path) => @@ -1048,37 +1182,32 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar case _: ValidFullGcsPath => path /* - * Strip the prefixes in RuntimeOutputMapping.prefixFilters from the path, one at a time. - * For instance - * file:///cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt will progressively become - * - * /cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt - * bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt - * call-A/file.txt - * - * This code is called as part of a path mapper that will be applied to the WOMified cwl.output.json. - * The cwl.output.json when it's being read by Cromwell from the bucket still contains local paths - * (as they were created by the cwl tool). - * In order to keep things working we need to map those local paths to where they were actually delocalized, - * which is determined in cromwell.backend.google.pipelines.v2beta.api.Delocalization. - */ - case _ => (callRootPath / - RuntimeOutputMapping - .prefixFilters(workflowPaths.workflowRoot) - .foldLeft(path)({ - case (newPath, prefix) => newPath.stripPrefix(prefix) - }) - ).pathAsString + * Strip the prefixes in RuntimeOutputMapping.prefixFilters from the path, one at a time. + * For instance + * file:///cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt will progressively become + * + * /cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt + * bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt + * call-A/file.txt + * + * This code is called as part of a path mapper that will be applied to the WOMified cwl.output.json. + * The cwl.output.json when it's being read by Cromwell from the bucket still contains local paths + * (as they were created by the cwl tool). + * In order to keep things working we need to map those local paths to where they were actually delocalized, + * which is determined in cromwell.backend.google.pipelines.v2beta.api.Delocalization. + */ + case _ => + (callRootPath / + RuntimeOutputMapping + .prefixFilters(workflowPaths.workflowRoot) + .foldLeft(path) { case (newPath, prefix) => + newPath.stripPrefix(prefix) + }).pathAsString } } } - } // No need for Cromwell-performed localization in the PAPI backend, ad hoc values are localized directly from GCS to the VM by PAPI. - override lazy val localizeAdHocValues: List[AdHocValue] => ErrorOr[List[StandardAdHocValue]] = _.map(Coproduct[StandardAdHocValue](_)).validNel + override lazy val localizeAdHocValues: List[AdHocValue] => ErrorOr[List[StandardAdHocValue]] = + _.map(Coproduct[StandardAdHocValue](_)).validNel } - - - - - diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchBackendSingletonActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchBackendSingletonActor.scala index 66e74b3f2c8..1e17767f029 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchBackendSingletonActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchBackendSingletonActor.scala @@ -14,10 +14,11 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} object GcpBatchBackendSingletonActor { - def props(requestFactory: GcpBatchRequestFactory, serviceRegistryActor: ActorRef)(implicit requestHandler: GcpBatchApiRequestHandler): Props = { + def props(requestFactory: GcpBatchRequestFactory, serviceRegistryActor: ActorRef)(implicit + requestHandler: GcpBatchApiRequestHandler + ): Props = Props(new GcpBatchBackendSingletonActor(requestFactory, serviceRegistryActor = serviceRegistryActor)) .withDispatcher(BackendDispatcher) - } // This is the only type of messages that can be processed by this actor from this actor sealed trait Action extends Product with Serializable @@ -38,8 +39,10 @@ object GcpBatchBackendSingletonActor { } -final class GcpBatchBackendSingletonActor(requestFactory: GcpBatchRequestFactory, override val serviceRegistryActor: ActorRef)(implicit requestHandler: GcpBatchApiRequestHandler) - extends Actor +final class GcpBatchBackendSingletonActor(requestFactory: GcpBatchRequestFactory, + override val serviceRegistryActor: ActorRef +)(implicit requestHandler: GcpBatchApiRequestHandler) + extends Actor with ActorLogging with BatchInstrumentation with CromwellInstrumentationScheduler @@ -47,7 +50,7 @@ final class GcpBatchBackendSingletonActor(requestFactory: GcpBatchRequestFactory import GcpBatchBackendSingletonActor._ - private implicit val ec: ExecutionContext = context.dispatcher + implicit private val ec: ExecutionContext = context.dispatcher override def preStart() = { startInstrumentationTimer() @@ -81,8 +84,8 @@ final class GcpBatchBackendSingletonActor(requestFactory: GcpBatchRequestFactory replyTo ! Event.JobStatusRetrieved(job) case Failure(exception) => - log.error(exception, s"Failed to query job status ($jobName) from GCP") - replyTo ! Event.ActionFailed(jobName.toString ,exception) + log.error(exception, s"Failed to query job status ($jobName) from GCP") + replyTo ! Event.ActionFailed(jobName.toString, exception) } case Action.AbortJob(jobName) => @@ -108,7 +111,8 @@ final class GcpBatchBackendSingletonActor(requestFactory: GcpBatchRequestFactory // that are on a final state. log.info(s"Cromwell requested to abort workflow $workflowId") - case other => log.error(s"Unexpected message from {} to ${this.getClass.getSimpleName}: {}", sender().path.name, other) + case other => + log.error(s"Unexpected message from {} to ${this.getClass.getSimpleName}: {}", sender().path.name, other) } override def receive = instrumentationReceive(loadMetricHandler _).orElse(normalReceive) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchFinalizationActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchFinalizationActor.scala index dd57edc8b4e..1749a229621 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchFinalizationActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchFinalizationActor.scala @@ -9,8 +9,7 @@ import cromwell.core.io.AsyncIoActorClient import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder import wom.graph.CommandCallNode -case class GcpBatchFinalizationActorParams -( +case class GcpBatchFinalizationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, batchConfiguration: GcpBatchConfiguration, @@ -22,7 +21,9 @@ case class GcpBatchFinalizationActorParams override def configurationDescriptor: BackendConfigurationDescriptor = batchConfiguration.configurationDescriptor } -class GcpBatchFinalizationActor(val batchParams: GcpBatchFinalizationActorParams) extends StandardFinalizationActor(batchParams) with AsyncIoActorClient { +class GcpBatchFinalizationActor(val batchParams: GcpBatchFinalizationActorParams) + extends StandardFinalizationActor(batchParams) + with AsyncIoActorClient { lazy val batchConfiguration: GcpBatchConfiguration = batchParams.batchConfiguration @@ -30,6 +31,4 @@ class GcpBatchFinalizationActor(val batchParams: GcpBatchFinalizationActorParams override def ioActor: ActorRef = batchParams.ioActor } -object GcpBatchFinalizationActor { - -} \ No newline at end of file +object GcpBatchFinalizationActor {} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala index 466a4f20d7e..5456c28ccbe 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala @@ -16,10 +16,18 @@ import com.google.auth.http.HttpCredentialsAdapter import com.google.auth.oauth2.OAuth2Credentials import cromwell.backend.google.batch._ import cromwell.backend.google.batch.actors.GcpBatchInitializationActor._ -import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.{VirtualPrivateCloudConfiguration, VirtualPrivateCloudLabels, VirtualPrivateCloudLiterals} +import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.{ + VirtualPrivateCloudConfiguration, + VirtualPrivateCloudLabels, + VirtualPrivateCloudLiterals +} import cromwell.backend.google.batch.models._ import cromwell.backend.google.batch.runnable.WorkflowOptionKeys -import cromwell.backend.standard.{StandardInitializationActor, StandardInitializationActorParams, StandardValidatedRuntimeAttributesBuilder} +import cromwell.backend.standard.{ + StandardInitializationActor, + StandardInitializationActorParams, + StandardValidatedRuntimeAttributesBuilder +} import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor} import cromwell.cloudsupport.gcp.auth.GoogleAuthMode.{httpTransport, jsonFactory} import cromwell.cloudsupport.gcp.auth.{GoogleAuthMode, UserServiceAccountMode} @@ -36,8 +44,7 @@ import scala.concurrent.Future import scala.util.Try import scala.util.control.NonFatal -case class GcpBatchInitializationActorParams -( +case class GcpBatchInitializationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], @@ -48,7 +55,9 @@ case class GcpBatchInitializationActorParams override val configurationDescriptor: BackendConfigurationDescriptor = batchConfiguration.configurationDescriptor } -class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams) extends StandardInitializationActor(batchParams) with AsyncIoActorClient { +class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams) + extends StandardInitializationActor(batchParams) + with AsyncIoActorClient { override lazy val ioActor: ActorRef = batchParams.ioActor protected val gcpBatchConfiguration: GcpBatchConfiguration = batchParams.batchConfiguration @@ -65,11 +74,12 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams // Credentials object for the Genomics API private lazy val genomicsCredentials: Future[Credentials] = gcpBatchConfiguration.batchAttributes.auths.genomics - .retryCredentials(workflowOptions, List( - CloudLifeSciencesScopes - .CLOUD_PLATFORM, - GenomicsScopes.GENOMICS - )) + .retryCredentials(workflowOptions, + List( + CloudLifeSciencesScopes.CLOUD_PLATFORM, + GenomicsScopes.GENOMICS + ) + ) val privateDockerEncryptionKeyName: Option[String] = { val optionsEncryptionKey = workflowOptions.get(GoogleAuthMode.DockerCredentialsEncryptionKeyNameKey).toOption @@ -89,11 +99,14 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams // That doesn't seem great but it's effectively what the existing code around user service accounts appears to be doing. val userServiceAccountAuth: Option[GoogleAuthMode] = for { _ <- workflowOptions.get(GoogleAuthMode.UserServiceAccountKey).toOption - usaAuth <- gcpBatchConfiguration.googleConfig.authsByName.values collectFirst { case u: UserServiceAccountMode => u } + usaAuth <- gcpBatchConfiguration.googleConfig.authsByName.values collectFirst { + case u: UserServiceAccountMode => u + } } yield usaAuth - def encryptionAuthFromConfig: Option[GoogleAuthMode] = gcpBatchConfiguration.dockerEncryptionAuthName.flatMap { name => - gcpBatchConfiguration.googleConfig.auth(name).toOption + def encryptionAuthFromConfig: Option[GoogleAuthMode] = gcpBatchConfiguration.dockerEncryptionAuthName.flatMap { + name => + gcpBatchConfiguration.googleConfig.auth(name).toOption } // If there's no user service account auth in the workflow options fall back to an auth specified in config. userServiceAccountAuth orElse encryptionAuthFromConfig @@ -103,12 +116,18 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams new String(Base64.decodeBase64(dockerToken)).split(':') match { case Array(username, password) => // unencrypted tokens are base64-encoded username:password - Option(JsObject( - Map( - "username" -> JsString(username), - "password" -> JsString(password) - )).compactPrint) - case _ => throw new RuntimeException(s"provided dockerhub token '$dockerToken' is not a base64-encoded username:password") + Option( + JsObject( + Map( + "username" -> JsString(username), + "password" -> JsString(password) + ) + ).compactPrint + ) + case _ => + throw new RuntimeException( + s"provided dockerhub token '$dockerToken' is not a base64-encoded username:password" + ) } } @@ -122,17 +141,25 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams } private def vpcNetworkAndSubnetworkProjectLabelsFuture(): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { - def googleProject(descriptor: BackendWorkflowDescriptor): String = { - descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleProject, batchParams.batchConfiguration.batchAttributes.project) - } + def googleProject(descriptor: BackendWorkflowDescriptor): String = + descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleProject, + batchParams.batchConfiguration.batchAttributes.project + ) - def projectMetadataRequest(vpcConfig: VirtualPrivateCloudLabels): Future[HttpRequest] = { + def projectMetadataRequest(vpcConfig: VirtualPrivateCloudLabels): Future[HttpRequest] = Future { - val credentials = vpcConfig.auth.credentials(workflowOptions.get(_).getOrElse(throw new RuntimeException("Unable to find the necessary workflow option for auth credentials")), List(CloudResourceManagerScopes.CLOUD_PLATFORM)) + val credentials = vpcConfig.auth.credentials( + workflowOptions + .get(_) + .getOrElse(throw new RuntimeException("Unable to find the necessary workflow option for auth credentials")), + List(CloudResourceManagerScopes.CLOUD_PLATFORM) + ) val httpCredentialsAdapter = new HttpCredentialsAdapter(credentials) - val cloudResourceManagerBuilder = new CloudResourceManager - .Builder(GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, httpCredentialsAdapter) + val cloudResourceManagerBuilder = new CloudResourceManager.Builder(GoogleAuthMode.httpTransport, + GoogleAuthMode.jsonFactory, + httpCredentialsAdapter + ) .setApplicationName(gcpBatchConfiguration.googleConfig.applicationName) .build() @@ -140,19 +167,23 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams project.buildHttpRequest() } - } def projectMetadataResponseToLabels(httpResponse: HttpResponse): Future[ProjectLabels] = { implicit val googleProjectMetadataLabelDecoder: Decoder[ProjectLabels] = deriveDecoder - Future.fromTry(decode[ProjectLabels](httpResponse.parseAsString()).toTry).recoverWith { - case NonFatal(e) => Future.failed(new RuntimeException(s"Failed to parse labels from project metadata response from Google Cloud Resource Manager API. " + - s"${ExceptionUtils.getMessage(e)}", e)) + Future.fromTry(decode[ProjectLabels](httpResponse.parseAsString()).toTry).recoverWith { case NonFatal(e) => + Future.failed( + new RuntimeException( + s"Failed to parse labels from project metadata response from Google Cloud Resource Manager API. " + + s"${ExceptionUtils.getMessage(e)}", + e + ) + ) } } def networkLabelsFromProjectLabels(vpcConfig: VirtualPrivateCloudLabels, - projectLabels: ProjectLabels, - ): Option[VpcAndSubnetworkProjectLabelValues] = { + projectLabels: ProjectLabels + ): Option[VpcAndSubnetworkProjectLabelValues] = projectLabels.labels.get(vpcConfig.network) map { vpcNetworkLabelValue => val subnetworkLabelOption = vpcConfig.subnetwork.flatMap { s => projectLabels.labels.collectFirst { @@ -162,22 +193,22 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams VpcAndSubnetworkProjectLabelValues(vpcNetworkLabelValue, subnetworkLabelOption) } - } - def fetchVpcLabelsFromProjectMetadata(vpcConfig: VirtualPrivateCloudLabels - ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { + def fetchVpcLabelsFromProjectMetadata( + vpcConfig: VirtualPrivateCloudLabels + ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = for { projectMetadataResponse <- projectMetadataRequest(vpcConfig).map(_.executeAsync().get()) projectLabels <- projectMetadataResponseToLabels(projectMetadataResponse) } yield networkLabelsFromProjectLabels(vpcConfig, projectLabels) - } /* First, try to fetch the network information from labels, where that fetch may still return None. Then, if we did not discover a network via labels for whatever reason try to look for literal values. */ - def fetchVpcLabels(vpcConfig: VirtualPrivateCloudConfiguration - ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { + def fetchVpcLabels( + vpcConfig: VirtualPrivateCloudConfiguration + ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { // Added explicit types to hopefully help future devs who stumble across this two-step code val fetchedFromLabels: Future[Option[VpcAndSubnetworkProjectLabelValues]] = vpcConfig.labelsOption match { case Some(labels: VirtualPrivateCloudLabels) => fetchVpcLabelsFromProjectMetadata(labels) @@ -201,9 +232,13 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams gcsCred <- gcsCredentials genomicsCred <- genomicsCredentials validatedPathBuilders <- pathBuilders - } yield new GcpBatchWorkflowPaths( - workflowDescriptor, gcsCred, genomicsCred, gcpBatchConfiguration, validatedPathBuilders, standardStreamNameToFileNameMetadataMapper)(ioEc) - + } yield new GcpBatchWorkflowPaths(workflowDescriptor, + gcsCred, + genomicsCred, + gcpBatchConfiguration, + validatedPathBuilders, + standardStreamNameToFileNameMetadataMapper + )(ioEc) override lazy val initializationData: Future[GcpBackendInitializationData] = for { batchWorkflowPaths <- workflowPaths @@ -221,13 +256,12 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams override def validateWorkflowOptions(): Try[Unit] = GcpLabel.fromWorkflowOptions(workflowOptions).map(_ => ()) - override def beforeAll(): Future[Option[BackendInitializationData]] = { + override def beforeAll(): Future[Option[BackendInitializationData]] = for { paths <- workflowPaths _ = publishWorkflowRoot(paths.workflowRoot.pathAsString) data <- initializationData } yield Option(data) - } def standardStreamNameToFileNameMetadataMapper(gcpBatchJobPaths: GcpBatchJobPaths, streamName: String): String = GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper(gcpBatchJobPaths, streamName) @@ -238,7 +272,9 @@ class GcpBatchInitializationActor(batchParams: GcpBatchInitializationActorParams object GcpBatchInitializationActor { // For metadata publishing purposes default to using the name of a standard stream as the stream's filename. - def defaultStandardStreamNameToFileNameMetadataMapper(gcpBatchJobPaths: GcpBatchJobPaths, streamName: String): String = streamName + def defaultStandardStreamNameToFileNameMetadataMapper(gcpBatchJobPaths: GcpBatchJobPaths, + streamName: String + ): String = streamName def encryptKms(keyName: String, credentials: OAuth2Credentials, plainText: String): String = { val httpCredentialsAdapter = new HttpCredentialsAdapter(credentials) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala index e86404bef1e..8f305778f29 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala @@ -14,22 +14,19 @@ import scala.language.postfixOps trait GcpBatchJobCachingActorHelper extends StandardCachingActorHelper { this: GcpBatchAsyncBackendJobExecutionActor with JobLogging => - lazy val initializationData: GcpBackendInitializationData = { + lazy val initializationData: GcpBackendInitializationData = backendInitializationDataAs[GcpBackendInitializationData] - } lazy val batchConfiguration: GcpBatchConfiguration = initializationData.gcpBatchConfiguration lazy val gcpBatchCallPaths: GcpBatchJobPaths = jobPaths.asInstanceOf[GcpBatchJobPaths] lazy val runtimeAttributes = GcpBatchRuntimeAttributes( validatedRuntimeAttributes, - batchConfiguration - .runtimeConfig + batchConfiguration.runtimeConfig ) lazy val maxPreemption: Int = runtimeAttributes.preemptible - lazy val workingDisk: GcpBatchAttachedDisk = runtimeAttributes.disks.find(_.name == GcpBatchWorkingDisk.Name).get lazy val callRootPath: Path = gcpBatchCallPaths.callExecutionRoot @@ -45,16 +42,18 @@ trait GcpBatchJobCachingActorHelper extends StandardCachingActorHelper { val workflow = jobDescriptor.workflowDescriptor val call = jobDescriptor.taskCall val subWorkflow = workflow.callable - val subWorkflowLabels = if (!subWorkflow.equals(workflow.rootWorkflow)) - Labels("cromwell-sub-workflow-name" -> subWorkflow.name) - else - Labels.empty + val subWorkflowLabels = + if (!subWorkflow.equals(workflow.rootWorkflow)) + Labels("cromwell-sub-workflow-name" -> subWorkflow.name) + else + Labels.empty val alias = call.localName - val aliasLabels = if (!alias.equals(call.callable.name)) - Labels("wdl-call-alias" -> alias) - else - Labels.empty + val aliasLabels = + if (!alias.equals(call.callable.name)) + Labels("wdl-call-alias" -> alias) + else + Labels.empty Labels( "cromwell-workflow-id" -> s"cromwell-${workflow.rootWorkflowId}", @@ -62,18 +61,16 @@ trait GcpBatchJobCachingActorHelper extends StandardCachingActorHelper { ) ++ subWorkflowLabels ++ aliasLabels } - lazy val originalLabels: Labels = defaultLabels lazy val backendLabels: Seq[GcpLabel] = GcpLabel.safeLabels(originalLabels.asTuple: _*) - lazy val originalLabelEvents: Map[String, String] = originalLabels.value map { l => s"${CallMetadataKeys.Labels}:${l.key}" -> l.value } toMap + lazy val originalLabelEvents: Map[String, String] = originalLabels.value map { l => + s"${CallMetadataKeys.Labels}:${l.key}" -> l.value + } toMap override protected def nonStandardMetadata: Map[String, Any] = { - val googleProject = initializationData - .workflowPaths - .workflowDescriptor - .workflowOptions + val googleProject = initializationData.workflowPaths.workflowDescriptor.workflowOptions .get(WorkflowOptionKeys.GoogleProject) .getOrElse(batchAttributes.project) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchApiRequestHandler.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchApiRequestHandler.scala index 1a877558942..b10b8361130 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchApiRequestHandler.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchApiRequestHandler.scala @@ -25,10 +25,9 @@ class GcpBatchApiRequestHandler { val headerProvider = FixedHeaderProvider.create(headers) val batchSettings = BatchServiceSettings.newBuilder.setHeaderProvider(headerProvider).build val client = BatchServiceClient.create(batchSettings) - try { + try f(client) - } finally { + finally client.close() - } } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala index 6976bd09867..3c69c3c8fb4 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala @@ -28,9 +28,9 @@ object GcpBatchRequestFactory { * Input parameters that are not strictly needed by the user's command but are Cromwell byproducts. */ case class DetritusInputParameters( - executionScriptInputParameter: GcpBatchFileInput, - monitoringScriptInputParameter: Option[GcpBatchFileInput] - ) { + executionScriptInputParameter: GcpBatchFileInput, + monitoringScriptInputParameter: Option[GcpBatchFileInput] + ) { def all: List[GcpBatchFileInput] = List(executionScriptInputParameter) ++ monitoringScriptInputParameter } @@ -38,11 +38,12 @@ object GcpBatchRequestFactory { * Output parameters that are not produced by the user's command but are Cromwell byproducts. */ case class DetritusOutputParameters( - monitoringScriptOutputParameter: Option[GcpBatchFileOutput], - rcFileOutputParameter: GcpBatchFileOutput, - memoryRetryRCFileOutputParameter: GcpBatchFileOutput - ) { - def all: List[GcpBatchFileOutput] = memoryRetryRCFileOutputParameter :: List(rcFileOutputParameter) ++ monitoringScriptOutputParameter + monitoringScriptOutputParameter: Option[GcpBatchFileOutput], + rcFileOutputParameter: GcpBatchFileOutput, + memoryRetryRCFileOutputParameter: GcpBatchFileOutput + ) { + def all: List[GcpBatchFileOutput] = + memoryRetryRCFileOutputParameter :: List(rcFileOutputParameter) ++ monitoringScriptOutputParameter } /** @@ -51,12 +52,12 @@ object GcpBatchRequestFactory { * to treat them differently. */ case class InputOutputParameters( - detritusInputParameters: DetritusInputParameters, - jobInputParameters: List[GcpBatchInput], - jobOutputParameters: List[GcpBatchOutput], - detritusOutputParameters: DetritusOutputParameters, - literalInputParameters: List[GcpBatchLiteralInput] - ) { + detritusInputParameters: DetritusInputParameters, + jobInputParameters: List[GcpBatchInput], + jobOutputParameters: List[GcpBatchOutput], + detritusOutputParameters: DetritusOutputParameters, + literalInputParameters: List[GcpBatchLiteralInput] + ) { lazy val fileInputParameters: List[GcpBatchInput] = jobInputParameters ++ detritusInputParameters.all lazy val fileOutputParameters: List[GcpBatchOutput] = detritusOutputParameters.all ++ jobOutputParameters } @@ -89,7 +90,7 @@ object GcpBatchRequestFactory { enableSshAccess: Boolean, vpcNetworkAndSubnetworkProjectLabels: Option[VpcAndSubnetworkProjectLabelValues], dockerhubCredentials: (String, String) - ) { + ) { def literalInputs = inputOutputParameters.literalInputParameters def inputParameters = inputOutputParameters.fileInputParameters diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala index 9c482d852d1..a8f6519b7a5 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala @@ -2,7 +2,21 @@ package cromwell.backend.google.batch.api import com.google.cloud.batch.v1.AllocationPolicy._ import com.google.cloud.batch.v1.LogsPolicy.Destination -import com.google.cloud.batch.v1.{AllocationPolicy, ComputeResource, CreateJobRequest, DeleteJobRequest, GetJobRequest, Job, JobName, LogsPolicy, Runnable, ServiceAccount, TaskGroup, TaskSpec, Volume} +import com.google.cloud.batch.v1.{ + AllocationPolicy, + ComputeResource, + CreateJobRequest, + DeleteJobRequest, + GetJobRequest, + Job, + JobName, + LogsPolicy, + Runnable, + ServiceAccount, + TaskGroup, + TaskSpec, + Volume +} import com.google.protobuf.Duration import cromwell.backend.google.batch.io.GcpBatchAttachedDisk import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration @@ -12,25 +26,27 @@ import cromwell.backend.google.batch.util.BatchUtilityConversions import scala.jdk.CollectionConverters._ -class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransferConfiguration) extends GcpBatchRequestFactory - with BatchUtilityConversions - with UserRunnable - with ContainerSetup - with Localization - with Delocalization - with MemoryRetryCheckRunnable - with MonitoringRunnable - with CheckpointingRunnable { +class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransferConfiguration) + extends GcpBatchRequestFactory + with BatchUtilityConversions + with UserRunnable + with ContainerSetup + with Localization + with Delocalization + with MemoryRetryCheckRunnable + with MonitoringRunnable + with CheckpointingRunnable { override def queryRequest(jobName: JobName): GetJobRequest = GetJobRequest.newBuilder.setName(jobName.toString).build - override def abortRequest(jobName: JobName): DeleteJobRequest = DeleteJobRequest.newBuilder.setName(jobName.toString).build() - + override def abortRequest(jobName: JobName): DeleteJobRequest = + DeleteJobRequest.newBuilder.setName(jobName.toString).build() - def createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues: VpcAndSubnetworkProjectLabelValues, data: GcpBatchRequest): NetworkInterface.Builder = { + def createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues: VpcAndSubnetworkProjectLabelValues, + data: GcpBatchRequest + ): NetworkInterface.Builder = { - val network = NetworkInterface - .newBuilder + val network = NetworkInterface.newBuilder .setNoExternalIpAddress(data.gcpBatchParameters.runtimeAttributes.noAddress) .setNetwork(vpcAndSubnetworkProjectLabelValues.networkName(data.gcpBatchParameters.projectId)) @@ -42,35 +58,35 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe } - def createNetwork(data: GcpBatchRequest): NetworkInterface.Builder = { + def createNetwork(data: GcpBatchRequest): NetworkInterface.Builder = data.createParameters.vpcNetworkAndSubnetworkProjectLabels match { case Some(vpcAndSubnetworkProjectLabelValues) => createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues, data) case _ => NetworkInterface.newBuilder().setNoExternalIpAddress(data.createParameters.runtimeAttributes.noAddress) } - } - private def createComputeResource(cpu: Long, memory: Long, bootDiskSizeMb: Long) = { - ComputeResource - .newBuilder + private def createComputeResource(cpu: Long, memory: Long, bootDiskSizeMb: Long) = + ComputeResource.newBuilder .setCpuMilli(cpu) .setMemoryMib(memory) .setBootDiskMib(bootDiskSizeMb) .build - } - private def createInstancePolicy(cpuPlatform: String, spotModel: ProvisioningModel, accelerators: Option[Accelerator.Builder], attachedDisks: List[AttachedDisk]): InstancePolicy.Builder = { + private def createInstancePolicy(cpuPlatform: String, + spotModel: ProvisioningModel, + accelerators: Option[Accelerator.Builder], + attachedDisks: List[AttachedDisk] + ): InstancePolicy.Builder = { - //set GPU count to 0 if not included in workflow + // set GPU count to 0 if not included in workflow val gpuAccelerators = accelerators.getOrElse(Accelerator.newBuilder.setCount(0).setType("")) // TODO: Driver version - val instancePolicy = InstancePolicy - .newBuilder + val instancePolicy = InstancePolicy.newBuilder .setProvisioningModel(spotModel) .addAllDisks(attachedDisks.asJava) .setMinCpuPlatform(cpuPlatform) .buildPartial() - //add GPUs if GPU count is greater than 1 + // add GPUs if GPU count is greater than 1 if (gpuAccelerators.getCount >= 1) { val instancePolicyGpu = instancePolicy.toBuilder instancePolicyGpu.addAccelerators(gpuAccelerators).build @@ -81,65 +97,71 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe } - private def createNetworkPolicy(networkInterface: NetworkInterface): NetworkPolicy = { - NetworkPolicy - .newBuilder + private def createNetworkPolicy(networkInterface: NetworkInterface): NetworkPolicy = + NetworkPolicy.newBuilder .addNetworkInterfaces(0, networkInterface) .build - } - private def createTaskSpec(runnables: List[Runnable], computeResource: ComputeResource, retryCount: Int, durationInSeconds: Long, volumes: List[Volume]) = { - TaskSpec - .newBuilder + private def createTaskSpec(runnables: List[Runnable], + computeResource: ComputeResource, + retryCount: Int, + durationInSeconds: Long, + volumes: List[Volume] + ) = + TaskSpec.newBuilder .addAllRunnables(runnables.asJava) .setComputeResource(computeResource) .addAllVolumes(volumes.asJava) .setMaxRetryCount(retryCount) - .setMaxRunDuration(Duration - .newBuilder - .setSeconds(durationInSeconds) - .build) - } + .setMaxRunDuration( + Duration.newBuilder + .setSeconds(durationInSeconds) + .build + ) - private def createTaskGroup(taskCount: Long, task: TaskSpec.Builder): TaskGroup = { - TaskGroup - .newBuilder + private def createTaskGroup(taskCount: Long, task: TaskSpec.Builder): TaskGroup = + TaskGroup.newBuilder .setTaskCount(taskCount) .setTaskSpec(task) .build - } - - private def createAllocationPolicy(data: GcpBatchRequest, locationPolicy: LocationPolicy, instancePolicy: InstancePolicy, networkPolicy: NetworkPolicy, serviceAccount: ServiceAccount, accelerators: Option[Accelerator.Builder]) = { + private def createAllocationPolicy(data: GcpBatchRequest, + locationPolicy: LocationPolicy, + instancePolicy: InstancePolicy, + networkPolicy: NetworkPolicy, + serviceAccount: ServiceAccount, + accelerators: Option[Accelerator.Builder] + ) = { - val allocationPolicy = AllocationPolicy - .newBuilder + val allocationPolicy = AllocationPolicy.newBuilder .setLocation(locationPolicy) .setNetwork(networkPolicy) - .putLabels("cromwell-workflow-id", toLabel(data.workflowId.toString)) //label for workflow from WDL + .putLabels("cromwell-workflow-id", toLabel(data.workflowId.toString)) // label for workflow from WDL .putLabels("goog-batch-worker", "true") - .putAllLabels((data.createParameters.googleLabels.map(label => label.key -> label.value).toMap.asJava)) + .putAllLabels(data.createParameters.googleLabels.map(label => label.key -> label.value).toMap.asJava) .setServiceAccount(serviceAccount) .buildPartial() val gpuAccelerators = accelerators.getOrElse(Accelerator.newBuilder.setCount(0).setType("")) - //add GPUs if GPU count is greater than or equal to 1 + // add GPUs if GPU count is greater than or equal to 1 if (gpuAccelerators.getCount >= 1) { - allocationPolicy.toBuilder.addInstances(InstancePolicyOrTemplate.newBuilder.setPolicy(instancePolicy).setInstallGpuDrivers(true).build) + allocationPolicy.toBuilder.addInstances( + InstancePolicyOrTemplate.newBuilder.setPolicy(instancePolicy).setInstallGpuDrivers(true).build + ) } else { allocationPolicy.toBuilder.addInstances(InstancePolicyOrTemplate.newBuilder.setPolicy(instancePolicy).build) } } - override def submitRequest(data: GcpBatchRequest): CreateJobRequest = { val batchAttributes = data.gcpBatchParameters.batchAttributes val runtimeAttributes = data.gcpBatchParameters.runtimeAttributes val createParameters = data.createParameters val retryCount = data.gcpBatchParameters.runtimeAttributes.preemptible - val allDisksToBeMounted: Seq[GcpBatchAttachedDisk] = createParameters.adjustedSizeDisks ++ createParameters.referenceDisksForLocalizationOpt.getOrElse(List.empty) + val allDisksToBeMounted: Seq[GcpBatchAttachedDisk] = + createParameters.adjustedSizeDisks ++ createParameters.referenceDisksForLocalizationOpt.getOrElse(List.empty) val gcpBootDiskSizeMb = convertGbToMib(runtimeAttributes) // set parent for metadata storage of job information @@ -184,43 +206,44 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe val monitoringShutdown: List[Runnable] = monitoringShutdownRunnables(createParameters) val checkpointingStart: List[Runnable] = checkpointingSetupRunnables(createParameters, allVolumes) val checkpointingShutdown: List[Runnable] = checkpointingShutdownRunnables(createParameters, allVolumes) - val sshAccess: List[Runnable] = List.empty //sshAccessActions(createPipelineParameters, mounts) + val sshAccess: List[Runnable] = List.empty // sshAccessActions(createPipelineParameters, mounts) val sortedRunnables: List[Runnable] = RunnableUtils.sortRunnables( containerSetup = containerSetup, - localization = localization, - userRunnable = userRunnable, - memoryRetryRunnable = memoryRetryRunnable, - deLocalization = deLocalization, - monitoringSetup = monitoringSetup, - monitoringShutdown = monitoringShutdown, - checkpointingStart = checkpointingStart, - checkpointingShutdown = checkpointingShutdown, - sshAccess = sshAccess, - isBackground = _.getBackground, - ) + localization = localization, + userRunnable = userRunnable, + memoryRetryRunnable = memoryRetryRunnable, + deLocalization = deLocalization, + monitoringSetup = monitoringSetup, + monitoringShutdown = monitoringShutdown, + checkpointingStart = checkpointingStart, + checkpointingShutdown = checkpointingShutdown, + sshAccess = sshAccess, + isBackground = _.getBackground + ) val computeResource = createComputeResource(cpuCores, memory, gcpBootDiskSizeMb) val taskSpec = createTaskSpec(sortedRunnables, computeResource, retryCount, durationInSeconds, allVolumes) val taskGroup: TaskGroup = createTaskGroup(taskCount, taskSpec) val instancePolicy = createInstancePolicy(cpuPlatform, spotModel, accelerators, allDisks) val locationPolicy = LocationPolicy.newBuilder.addAllowedLocations(zones).build - val allocationPolicy = createAllocationPolicy(data, locationPolicy, instancePolicy.build, networkPolicy, gcpSa, accelerators) - val job = Job - .newBuilder + val allocationPolicy = + createAllocationPolicy(data, locationPolicy, instancePolicy.build, networkPolicy, gcpSa, accelerators) + val job = Job.newBuilder .addTaskGroups(taskGroup) .setAllocationPolicy(allocationPolicy.build()) - .putLabels("submitter", "cromwell") // label to signify job submitted by cromwell for larger tracking purposes within GCP batch + .putLabels("submitter", + "cromwell" + ) // label to signify job submitted by cromwell for larger tracking purposes within GCP batch .putLabels("goog-batch-worker", "true") - .putAllLabels((data.createParameters.googleLabels.map(label => label.key -> label.value).toMap.asJava)) - .setLogsPolicy(LogsPolicy - .newBuilder - .setDestination(Destination.CLOUD_LOGGING) - .build) - + .putAllLabels(data.createParameters.googleLabels.map(label => label.key -> label.value).toMap.asJava) + .setLogsPolicy( + LogsPolicy.newBuilder + .setDestination(Destination.CLOUD_LOGGING) + .build + ) - CreateJobRequest - .newBuilder + CreateJobRequest.newBuilder .setParent(parent) .setJob(job) .setJobId(data.jobName) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/authentication/GcpBatchVMAuthentication.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/authentication/GcpBatchVMAuthentication.scala index 4f14d1b92b1..7858429ebd4 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/authentication/GcpBatchVMAuthentication.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/authentication/GcpBatchVMAuthentication.scala @@ -28,7 +28,9 @@ object GcpBatchDockerCredentials { case None => ().validNel // fine case _ => for { - authName <- dockerCredentials.authName.toErrorOr("KMS Encryption key defined for private Docker but no auth specified") + authName <- dockerCredentials.authName.toErrorOr( + "KMS Encryption key defined for private Docker but no auth specified" + ) _ <- googleConfig.auth(authName) } yield () } @@ -37,10 +39,10 @@ object GcpBatchDockerCredentials { case Invalid(errors) => throw new RuntimeException(errors.toList.mkString(", ")) case Valid(_) => - new GcpBatchDockerCredentials( - token = dockerCredentials.token, - keyName = dockerCredentials.keyName, - authName = dockerCredentials.authName) + new GcpBatchDockerCredentials(token = dockerCredentials.token, + keyName = dockerCredentials.keyName, + authName = dockerCredentials.authName + ) } } } @@ -50,11 +52,12 @@ object GcpBatchDockerCredentials { */ case class GcpBatchDockerCredentials(override val token: String, override val keyName: Option[String], - override val authName: Option[String]) - extends DockerCredentials(token = token, keyName = keyName, authName = authName) with GcpBatchAuthObject { + override val authName: Option[String] +) extends DockerCredentials(token = token, keyName = keyName, authName = authName) + with GcpBatchAuthObject { override val context = "docker" override val map = Map( "token" -> JsString(token) ) -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendCacheHitCopyingActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendCacheHitCopyingActor.scala index a0fc2e96317..82fd47735f7 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendCacheHitCopyingActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendCacheHitCopyingActor.scala @@ -16,20 +16,24 @@ import wom.values.WomFile import scala.language.postfixOps import scala.util.Try -class BatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams){ +class BatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) + extends StandardCacheHitCopyingActor(standardParams) { override protected val commandBuilder: GcsBatchCommandBuilder.type = GcsBatchCommandBuilder private val cachingStrategy: BatchCacheHitDuplicationStrategy = BackendInitializationData .as[GcpBackendInitializationData](standardParams.backendInitializationDataOption) - .gcpBatchConfiguration.batchAttributes.cacheHitDuplicationStrategy + .gcpBatchConfiguration + .batchAttributes + .cacheHitDuplicationStrategy override def processSimpletons(womValueSimpletons: Seq[WomValueSimpleton], - sourceCallRootPath: Path, - ): Try[(CallOutputs, Set[IoCommand[_]])] = + sourceCallRootPath: Path + ): Try[(CallOutputs, Set[IoCommand[_]])] = cachingStrategy match { case CopyCachedOutputs => super.processSimpletons(womValueSimpletons, sourceCallRootPath) case UseOriginalCachedOutputs => val touchCommands: Seq[Try[IoTouchCommand]] = womValueSimpletons collect { - case WomValueSimpleton(_, wdlFile: WomFile) => getPath(wdlFile.value) flatMap GcsBatchCommandBuilder.touchCommand + case WomValueSimpleton(_, wdlFile: WomFile) => + getPath(wdlFile.value) flatMap GcsBatchCommandBuilder.touchCommand } TryUtil.sequence(touchCommands) map { @@ -37,10 +41,13 @@ class BatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingAc } } - override def extractBlacklistPrefix(path: String): Option[String] = Option(path.stripPrefix("gs://").takeWhile(_ != '/')) + override def extractBlacklistPrefix(path: String): Option[String] = Option( + path.stripPrefix("gs://").takeWhile(_ != '/') + ) - override def processDetritus(sourceJobDetritusFiles: Map[String, String] - ): Try[(Map[String, Path], Set[IoCommand[_]])] = + override def processDetritus( + sourceJobDetritusFiles: Map[String, String] + ): Try[(Map[String, Path], Set[IoCommand[_]])] = cachingStrategy match { case CopyCachedOutputs => super.processDetritus(sourceJobDetritusFiles) case UseOriginalCachedOutputs => @@ -63,7 +70,8 @@ class BatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingAc originalSimpletons: Seq[WomValueSimpleton], newOutputs: CallOutputs, originalDetritus: Map[String, String], - newDetritus: Map[String, Path]): Try[List[Set[IoCommand[_]]]] = Try { + newDetritus: Map[String, Path] + ): Try[List[Set[IoCommand[_]]]] = Try { cachingStrategy match { case UseOriginalCachedOutputs => val content = @@ -74,13 +82,17 @@ class BatchBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingAc """.stripMargin // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - List(Set( - GcsBatchCommandBuilder.writeCommand( - path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", - content = content, - options = Seq(CloudStorageOptions.withMimeType("text/plain")), - ).get - )) + List( + Set( + GcsBatchCommandBuilder + .writeCommand( + path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", + content = content, + options = Seq(CloudStorageOptions.withMimeType("text/plain")) + ) + .get + ) + ) case CopyCachedOutputs => List.empty } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendFileHashingActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendFileHashingActor.scala index a3f000e7085..20f34876403 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendFileHashingActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchBackendFileHashingActor.scala @@ -3,6 +3,7 @@ package cromwell.backend.google.batch.callcaching import cromwell.backend.standard.callcaching.{StandardFileHashingActor, StandardFileHashingActorParams} import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder -class BatchBackendFileHashingActor(standardParams: StandardFileHashingActorParams) extends StandardFileHashingActor(standardParams) { +class BatchBackendFileHashingActor(standardParams: StandardFileHashingActorParams) + extends StandardFileHashingActor(standardParams) { override val ioCommandBuilder = GcsBatchCommandBuilder } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchCacheHitDuplicationStrategy.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchCacheHitDuplicationStrategy.scala index 778a0c2fefe..37467fc5e32 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchCacheHitDuplicationStrategy.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/callcaching/BatchCacheHitDuplicationStrategy.scala @@ -4,4 +4,3 @@ sealed trait BatchCacheHitDuplicationStrategy case object CopyCachedOutputs extends BatchCacheHitDuplicationStrategy case object UseOriginalCachedOutputs extends BatchCacheHitDuplicationStrategy - diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/errors/InvalidGcsPathsInManifestFileException.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/errors/InvalidGcsPathsInManifestFileException.scala index 708906e8f17..abd0b447d25 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/errors/InvalidGcsPathsInManifestFileException.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/errors/InvalidGcsPathsInManifestFileException.scala @@ -3,5 +3,6 @@ package cromwell.backend.google.batch.errors import scala.util.control.NoStackTrace class InvalidGcsPathsInManifestFileException(paths: List[String]) extends Exception with NoStackTrace { - override def getMessage: String = s"Some of the paths in manifest file are not valid GCS paths: \n${paths.mkString("\n")}" -} \ No newline at end of file + override def getMessage: String = + s"Some of the paths in manifest file are not valid GCS paths: \n${paths.mkString("\n")}" +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDisk.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDisk.scala index 0e49985e24f..50a0704dd4e 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDisk.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDisk.scala @@ -21,11 +21,16 @@ object GcpBatchAttachedDisk { def diskTypeValidation(diskTypeString: String): ErrorOr[DiskType] = validateDiskType(diskTypeString) val validation: ErrorOr[GcpBatchAttachedDisk] = s match { - case WorkingDiskPattern(sizeGb, diskType) => (validateDiskType(diskType), sizeGbValidation(sizeGb)) mapN { - GcpBatchWorkingDisk.apply - } - case MountedDiskPattern(mountPoint, sizeGb, diskType) => (sizeGbValidation(sizeGb), diskTypeValidation(diskType)) mapN { (s, dt) => PipelinesApiEmptyMountedDisk(dt, s, DefaultPathBuilder.get(mountPoint)) } - case _ => s"Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: '$s'".invalidNel + case WorkingDiskPattern(sizeGb, diskType) => + (validateDiskType(diskType), sizeGbValidation(sizeGb)) mapN { + GcpBatchWorkingDisk.apply + } + case MountedDiskPattern(mountPoint, sizeGb, diskType) => + (sizeGbValidation(sizeGb), diskTypeValidation(diskType)) mapN { (s, dt) => + PipelinesApiEmptyMountedDisk(dt, s, DefaultPathBuilder.get(mountPoint)) + } + case _ => + s"Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: '$s'".invalidNel } Try(validation match { @@ -38,26 +43,25 @@ object GcpBatchAttachedDisk { }) } - private def validateDiskType(diskTypeName: String): ErrorOr[DiskType] = { + private def validateDiskType(diskTypeName: String): ErrorOr[DiskType] = DiskType.values().find(_.diskTypeName == diskTypeName) match { case Some(diskType) => diskType.validNel case None => val diskTypeNames = DiskType.values.map(_.diskTypeName).mkString(", ") s"Disk TYPE $diskTypeName should be one of $diskTypeNames".invalidNel } - } - private def validateLong(value: String): ErrorOr[Long] = { - try { + private def validateLong(value: String): ErrorOr[Long] = + try value.toLong.validNel - } catch { + catch { case _: IllegalArgumentException => s"$value not convertible to a Long".invalidNel } - } implicit class EnhancedDisks(val disks: Seq[GcpBatchAttachedDisk]) extends AnyVal { def adjustWorkingDiskWithNewMin(minimum: MemorySize, onAdjustment: => Unit): Seq[GcpBatchAttachedDisk] = disks map { - case disk: GcpBatchWorkingDisk if disk == GcpBatchWorkingDisk.Default && disk.sizeGb < minimum.to(MemoryUnit.GB).amount.toInt => + case disk: GcpBatchWorkingDisk + if disk == GcpBatchWorkingDisk.Default && disk.sizeGb < minimum.to(MemoryUnit.GB).amount.toInt => onAdjustment disk.copy(sizeGb = minimum.to(MemoryUnit.GB).amount.toInt) case other => other @@ -72,7 +76,8 @@ trait GcpBatchAttachedDisk { def mountPoint: Path } -case class PipelinesApiEmptyMountedDisk(diskType: DiskType, sizeGb: Int, mountPoint: Path) extends GcpBatchAttachedDisk { +case class PipelinesApiEmptyMountedDisk(diskType: DiskType, sizeGb: Int, mountPoint: Path) + extends GcpBatchAttachedDisk { val name = s"d-${mountPoint.pathAsString.md5Sum}" override def toString: String = s"$mountPoint $sizeGb ${diskType.diskTypeName}" @@ -95,4 +100,4 @@ case class GcpBatchReferenceFilesDisk(image: String, sizeGb: Int) extends GcpBat val mountPoint: Path = DefaultPathBuilder.get(s"/mnt/${image.md5Sum}") val name: String = s"d-${mountPoint.pathAsString.md5Sum}" val diskType: DiskType = DiskType.HDD -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/package.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/package.scala index 25489ff6869..50a2fbdcb31 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/package.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/package.scala @@ -1,4 +1,3 @@ - package cromwell.backend.google.batch import com.google.api.client.http.HttpResponseException @@ -8,24 +7,22 @@ import cromwell.core.path.Path package object io { implicit class PathEnhanced(val path: Path) extends AnyVal { - def writeAsJson(content: String): Path = { + def writeAsJson(content: String): Path = path.writeBytes(content.getBytes.iterator)(Seq(CloudStorageOptions.withMimeType("application/json"))) - } - def writeAsText(content: String): Path = { + def writeAsText(content: String): Path = path.writeBytes(content.getBytes.iterator)(Seq(CloudStorageOptions.withMimeType("text/plain"))) - } } - private [batch] def isFatalJesException(t: Throwable): Boolean = t match { + private[batch] def isFatalJesException(t: Throwable): Boolean = t match { case e: HttpResponseException if e.getStatusCode == 403 => true case e: HttpResponseException if e.getStatusCode == 400 && e.getContent.contains("INVALID_ARGUMENT") => true case _ => false } - private [batch] def isTransientJesException(t: Throwable): Boolean = t match { + private[batch] def isTransientJesException(t: Throwable): Boolean = t match { // Quota exceeded case e: HttpResponseException if e.getStatusCode == 429 => true case _ => false } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala index d6258169fa8..456f7115ae8 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala @@ -7,4 +7,4 @@ case class CreateGcpBatchParameters(jobDescriptor: BackendJobDescriptor, batchAttributes: GcpBatchConfigurationAttributes, projectId: String, region: String - ) +) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBackendInitializationData.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBackendInitializationData.scala index 47169b30f0a..a64b78fb7d2 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBackendInitializationData.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBackendInitializationData.scala @@ -5,12 +5,11 @@ import cromwell.backend.google.batch.util.BatchExpressionFunctions import cromwell.backend.standard.{StandardInitializationData, StandardValidatedRuntimeAttributesBuilder} case class GcpBackendInitializationData( - override val workflowPaths: GcpBatchWorkflowPaths, - override val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, - gcpBatchConfiguration: GcpBatchConfiguration, - gcsCredentials: Credentials, - privateDockerEncryptionKeyName: Option[String], - privateDockerEncryptedToken: Option[String], - vpcNetworkAndSubnetworkProjectLabels: Option[VpcAndSubnetworkProjectLabelValues] - - ) extends StandardInitializationData(workflowPaths, runtimeAttributesBuilder, classOf[BatchExpressionFunctions] ) + override val workflowPaths: GcpBatchWorkflowPaths, + override val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, + gcpBatchConfiguration: GcpBatchConfiguration, + gcsCredentials: Credentials, + privateDockerEncryptionKeyName: Option[String], + privateDockerEncryptedToken: Option[String], + vpcNetworkAndSubnetworkProjectLabels: Option[VpcAndSubnetworkProjectLabelValues] +) extends StandardInitializationData(workflowPaths, runtimeAttributesBuilder, classOf[BatchExpressionFunctions]) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfiguration.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfiguration.scala index dc397c5755e..7e7db17ddf5 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfiguration.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfiguration.scala @@ -10,29 +10,27 @@ import spray.json._ import scala.concurrent.duration.FiniteDuration - class GcpBatchConfiguration(val configurationDescriptor: BackendConfigurationDescriptor, val googleConfig: GoogleConfiguration, val batchAttributes: GcpBatchConfigurationAttributes - ) extends DefaultJsonProtocol { +) extends DefaultJsonProtocol { val batchAuths: GcpBatchAuths = batchAttributes.auths val root: String = configurationDescriptor.backendConfig.getString("root") val batchTimeout: FiniteDuration = batchAttributes.batchTimeout val runtimeConfig: Option[Config] = configurationDescriptor.backendRuntimeAttributesConfig - - val dockerCredentials: Option[GcpBatchDockerCredentials] = { + val dockerCredentials: Option[GcpBatchDockerCredentials] = BackendDockerConfiguration.build(configurationDescriptor.backendConfig).dockerCredentials map { creds => GcpBatchDockerCredentials.apply(creds, googleConfig) - } - } + } val dockerEncryptionKeyName: Option[String] = dockerCredentials flatMap { _.keyName } val dockerEncryptionAuthName: Option[String] = dockerCredentials flatMap { _.authName } val dockerToken: Option[String] = dockerCredentials map { _.token } - val jobShell: String = configurationDescriptor.backendConfig.as[Option[String]]("job-shell").getOrElse( - configurationDescriptor.globalConfig.getOrElse("system.job-shell", "/bin/bash")) + val jobShell: String = configurationDescriptor.backendConfig + .as[Option[String]]("job-shell") + .getOrElse(configurationDescriptor.globalConfig.getOrElse("system.job-shell", "/bin/bash")) } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala index 0a57b0a2a5e..a212ad35f2b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala @@ -11,10 +11,22 @@ import common.validation.Validation._ import cromwell.backend.CommonBackendConfigurationAttributes import cromwell.backend.google.batch._ import cromwell.backend.google.batch.authentication.GcpBatchAuths -import cromwell.backend.google.batch.callcaching.{BatchCacheHitDuplicationStrategy, CopyCachedOutputs, UseOriginalCachedOutputs} +import cromwell.backend.google.batch.callcaching.{ + BatchCacheHitDuplicationStrategy, + CopyCachedOutputs, + UseOriginalCachedOutputs +} import cromwell.backend.google.batch.io.GcpBatchReferenceFilesDisk -import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.{BatchRequestTimeoutConfiguration, GcsTransferConfiguration, VirtualPrivateCloudConfiguration} -import cromwell.backend.google.batch.util.{DockerImageCacheEntry, GcpBatchDockerCacheMappingOperations, GcpBatchReferenceFilesMappingOperations} +import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.{ + BatchRequestTimeoutConfiguration, + GcsTransferConfiguration, + VirtualPrivateCloudConfiguration +} +import cromwell.backend.google.batch.util.{ + DockerImageCacheEntry, + GcpBatchDockerCacheMappingOperations, + GcpBatchReferenceFilesMappingOperations +} import cromwell.cloudsupport.gcp.GoogleConfiguration import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import cromwell.filesystems.gcs.GcsPathBuilder @@ -30,29 +42,33 @@ import scala.jdk.CollectionConverters._ import scala.util.matching.Regex import scala.util.{Failure, Success, Try} -case class GcpBatchConfigurationAttributes(project: String, - computeServiceAccount: String, - auths: GcpBatchAuths, - restrictMetadataAccess: Boolean, - dockerhubToken: String, - enableFuse: Boolean, - executionBucket: String, - location: String, - maxPollingInterval: Int, - qps: Int Refined Positive, - cacheHitDuplicationStrategy: BatchCacheHitDuplicationStrategy, - requestWorkers: Int Refined Positive, - batchTimeout: FiniteDuration, - logFlushPeriod: Option[FiniteDuration], - gcsTransferConfiguration: GcsTransferConfiguration, - virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, - batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, - referenceFileToDiskImageMappingOpt: Option[Map[String, GcpBatchReferenceFilesDisk]], - dockerImageToCacheDiskImageMappingOpt: Option[Map[String, DockerImageCacheEntry]], - checkpointingInterval: FiniteDuration - ) - -object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperations with GcpBatchReferenceFilesMappingOperations with StrictLogging { +case class GcpBatchConfigurationAttributes( + project: String, + computeServiceAccount: String, + auths: GcpBatchAuths, + restrictMetadataAccess: Boolean, + dockerhubToken: String, + enableFuse: Boolean, + executionBucket: String, + location: String, + maxPollingInterval: Int, + qps: Int Refined Positive, + cacheHitDuplicationStrategy: BatchCacheHitDuplicationStrategy, + requestWorkers: Int Refined Positive, + batchTimeout: FiniteDuration, + logFlushPeriod: Option[FiniteDuration], + gcsTransferConfiguration: GcsTransferConfiguration, + virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, + referenceFileToDiskImageMappingOpt: Option[Map[String, GcpBatchReferenceFilesDisk]], + dockerImageToCacheDiskImageMappingOpt: Option[Map[String, DockerImageCacheEntry]], + checkpointingInterval: FiniteDuration +) + +object GcpBatchConfigurationAttributes + extends GcpBatchDockerCacheMappingOperations + with GcpBatchReferenceFilesMappingOperations + with StrictLogging { /** * param transferAttempts This is the number of attempts, not retries, hence it is positive. @@ -64,11 +80,12 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati final case class VirtualPrivateCloudLiterals(network: String, subnetwork: Option[String]) final case class VirtualPrivateCloudConfiguration(labelsOption: Option[VirtualPrivateCloudLabels], - literalsOption: Option[VirtualPrivateCloudLiterals], - ) - - final case class BatchRequestTimeoutConfiguration(readTimeoutMillis: Option[Int Refined Positive], connectTimeoutMillis: Option[Int Refined Positive]) + literalsOption: Option[VirtualPrivateCloudLiterals] + ) + final case class BatchRequestTimeoutConfiguration(readTimeoutMillis: Option[Int Refined Positive], + connectTimeoutMillis: Option[Int Refined Positive] + ) lazy val Logger: Logger = LoggerFactory.getLogger("BatchConfiguration") @@ -119,45 +136,47 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati "genomics.default-zones" -> "default-runtime-attributes.zones" ) - def apply(googleConfig: GoogleConfiguration, backendConfig: Config, backendName: String): GcpBatchConfigurationAttributes = { - + def apply(googleConfig: GoogleConfiguration, + backendConfig: Config, + backendName: String + ): GcpBatchConfigurationAttributes = { - def vpcErrorMessage(missingKeys: List[String]) = s"Virtual Private Cloud configuration is invalid. Missing keys: `${missingKeys.mkString(",")}`.".invalidNel + def vpcErrorMessage(missingKeys: List[String]) = + s"Virtual Private Cloud configuration is invalid. Missing keys: `${missingKeys.mkString(",")}`.".invalidNel def validateVPCLabelsConfig(networkOption: Option[String], subnetworkOption: Option[String], - authOption: Option[String], - ): ErrorOr[Option[VirtualPrivateCloudLabels]] = { + authOption: Option[String] + ): ErrorOr[Option[VirtualPrivateCloudLabels]] = (networkOption, subnetworkOption, authOption) match { - case (Some(network), _, Some(auth)) => googleConfig.auth(auth) match { - case Valid(validAuth) => - Option(VirtualPrivateCloudLabels(network, subnetworkOption, validAuth)).validNel - case Invalid(error) => s"Auth $auth is not valid for Virtual Private Cloud configuration. Reason: $error".invalidNel - } + case (Some(network), _, Some(auth)) => + googleConfig.auth(auth) match { + case Valid(validAuth) => + Option(VirtualPrivateCloudLabels(network, subnetworkOption, validAuth)).validNel + case Invalid(error) => + s"Auth $auth is not valid for Virtual Private Cloud configuration. Reason: $error".invalidNel + } case (Some(_), _, None) => vpcErrorMessage(List("auth")) case (None, _, Some(_)) => vpcErrorMessage(List("network-label-key")) case (None, Some(_), None) => vpcErrorMessage(List("network-label-key", "auth")) case (None, None, None) => None.validNel } - } - def validateVPCLiteralsConfig(networkNameOption: Option[String], - subnetworkNameOption: Option[String], - ): ErrorOr[Option[VirtualPrivateCloudLiterals]] = { + subnetworkNameOption: Option[String] + ): ErrorOr[Option[VirtualPrivateCloudLiterals]] = (networkNameOption, subnetworkNameOption) match { case (None, Some(_)) => vpcErrorMessage(List("network-name")) case (Some(networkName), _) => Option(VirtualPrivateCloudLiterals(networkName, subnetworkNameOption)).valid case (None, None) => None.valid } - } def validateVPCConfig(networkNameOption: Option[String], subnetworkNameOption: Option[String], networkLabelOption: Option[String], subnetworkLabelOption: Option[String], - authOption: Option[String], - ): ErrorOr[VirtualPrivateCloudConfiguration] = { + authOption: Option[String] + ): ErrorOr[VirtualPrivateCloudConfiguration] = { val vpcLabelsValidation = validateVPCLabelsConfig(networkLabelOption, subnetworkLabelOption, authOption) val vpcLiteralsValidation = @@ -165,18 +184,20 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati (vpcLabelsValidation, vpcLiteralsValidation) mapN VirtualPrivateCloudConfiguration } - val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => entry.getKey } + val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => + entry.getKey + } warnNotRecognized(configKeys, batchKeys, backendName, Logger) - def warnDeprecated(keys: Set[String], deprecated: Map[String, String], logger: Logger): Unit = { val deprecatedKeys = keys.intersect(deprecated.keySet) - deprecatedKeys foreach { key => logger.warn(s"Found deprecated configuration key $key, replaced with ${deprecated.get(key)}") } + deprecatedKeys foreach { key => + logger.warn(s"Found deprecated configuration key $key, replaced with ${deprecated.get(key)}") + } } warnDeprecated(configKeys, deprecatedJesKeys, Logger) - val project: ErrorOr[String] = validate { backendConfig.as[String]("project") } @@ -187,7 +208,8 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati backendConfig.as[String]("genomics.location") } val maxPollingInterval: Int = backendConfig.as[Option[Int]]("maximum-polling-interval").getOrElse(600) - val computeServiceAccount: String = backendConfig.as[Option[String]]("genomics.compute-service-account").getOrElse("default") + val computeServiceAccount: String = + backendConfig.as[Option[String]]("genomics.compute-service-account").getOrElse("default") val genomicsAuthName: ErrorOr[String] = validate { backendConfig.as[String]("genomics.auth") } @@ -210,10 +232,14 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati backendConfig.as[Option[String]]("filesystems.gcs.caching.duplication-strategy").getOrElse("copy") match { case "copy" => CopyCachedOutputs case "reference" => UseOriginalCachedOutputs - case other => throw new IllegalArgumentException(s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details.") + case other => + throw new IllegalArgumentException( + s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details." + ) } } - val requestWorkers: ErrorOr[Int Refined Positive] = validatePositiveInt(backendConfig.as[Option[Int]]("request-workers").getOrElse(3), "request-workers") + val requestWorkers: ErrorOr[Int Refined Positive] = + validatePositiveInt(backendConfig.as[Option[Int]]("request-workers").getOrElse(3), "request-workers") val batchTimeout: FiniteDuration = backendConfig.getOrElse("batch-timeout", 7.days) @@ -225,9 +251,11 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati case None => Option(1.minute) } - val parallelCompositeUploadThreshold = validateGsutilMemorySpecification(backendConfig, "genomics.parallel-composite-upload-threshold") + val parallelCompositeUploadThreshold = + validateGsutilMemorySpecification(backendConfig, "genomics.parallel-composite-upload-threshold") - val localizationAttempts: ErrorOr[Int Refined Positive] = backendConfig.as[Option[Int]]("genomics.localization-attempts") + val localizationAttempts: ErrorOr[Int Refined Positive] = backendConfig + .as[Option[Int]]("genomics.localization-attempts") .map(attempts => validatePositiveInt(attempts, "genomics.localization-attempts")) .getOrElse(DefaultGcsTransferAttempts.validNel) @@ -250,88 +278,93 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati backendConfig.getAs[String]("virtual-private-cloud.auth") } - val virtualPrivateCloudConfiguration: ErrorOr[VirtualPrivateCloudConfiguration] = { + val virtualPrivateCloudConfiguration: ErrorOr[VirtualPrivateCloudConfiguration] = (vpcNetworkName, vpcSubnetworkName, vpcNetworkLabel, vpcSubnetworkLabel, vpcAuth) flatMapN validateVPCConfig - } - val batchRequestsReadTimeout = readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.read") - val batchRequestsConnectTimeout = readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.connect") + val batchRequestsReadTimeout = + readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.read") + val batchRequestsConnectTimeout = + readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.connect") - val batchRequestTimeoutConfigurationValidation = (batchRequestsReadTimeout, batchRequestsConnectTimeout) mapN { (read, connect) => - BatchRequestTimeoutConfiguration(readTimeoutMillis = read, connectTimeoutMillis = connect) + val batchRequestTimeoutConfigurationValidation = (batchRequestsReadTimeout, batchRequestsConnectTimeout) mapN { + (read, connect) => + BatchRequestTimeoutConfiguration(readTimeoutMillis = read, connectTimeoutMillis = connect) } - val referenceDiskLocalizationManifestFiles: ErrorOr[Option[List[ManifestFile]]] = validateReferenceDiskManifestConfigs(backendConfig, backendName) + val referenceDiskLocalizationManifestFiles: ErrorOr[Option[List[ManifestFile]]] = + validateReferenceDiskManifestConfigs(backendConfig, backendName) - val dockerImageCacheManifestFile: ErrorOr[Option[ValidFullGcsPath]] = validateGcsPathToDockerImageCacheManifestFile(backendConfig) + val dockerImageCacheManifestFile: ErrorOr[Option[ValidFullGcsPath]] = validateGcsPathToDockerImageCacheManifestFile( + backendConfig + ) val checkpointingInterval: FiniteDuration = backendConfig.getOrElse(checkpointingIntervalKey, 10.minutes) - def authGoogleConfigForBatchConfigurationAttributes(project: String, - bucket: String, - genomicsName: String, - location: String, - restrictMetadata: Boolean, - dockerhubToken: String, - enableFuse: Boolean, - gcsName: String, - qps: Int Refined Positive, - cacheHitDuplicationStrategy: BatchCacheHitDuplicationStrategy, - requestWorkers: Int Refined Positive, - gcsTransferConfiguration: GcsTransferConfiguration, - virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, - batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, - referenceDiskLocalizationManifestFilesOpt: Option[List[ManifestFile]], - dockerImageCacheManifestFileOpt: Option[ValidFullGcsPath]): ErrorOr[GcpBatchConfigurationAttributes] = - (googleConfig.auth(genomicsName), googleConfig.auth(gcsName)) mapN { - (genomicsAuth, gcsAuth) => - val generatedReferenceFilesMappingOpt = referenceDiskLocalizationManifestFilesOpt map { - generateReferenceFilesMapping(genomicsAuth, _) - } - val dockerImageToCacheDiskImageMappingOpt = dockerImageCacheManifestFileOpt map { - generateDockerImageToDiskImageMapping(genomicsAuth, _) - } - models.GcpBatchConfigurationAttributes( - project = project, - computeServiceAccount = computeServiceAccount, - auths = GcpBatchAuths(genomicsAuth, gcsAuth), - restrictMetadataAccess = restrictMetadata, - dockerhubToken = dockerhubToken, - enableFuse = enableFuse, - executionBucket = bucket, - location = location, - maxPollingInterval = maxPollingInterval, - qps = qps, - cacheHitDuplicationStrategy = cacheHitDuplicationStrategy, - requestWorkers = requestWorkers, - batchTimeout = batchTimeout, - logFlushPeriod = logFlushPeriod, - gcsTransferConfiguration = gcsTransferConfiguration, - virtualPrivateCloudConfiguration = virtualPrivateCloudConfiguration, - batchRequestTimeoutConfiguration = batchRequestTimeoutConfiguration, - referenceFileToDiskImageMappingOpt = generatedReferenceFilesMappingOpt, - dockerImageToCacheDiskImageMappingOpt = dockerImageToCacheDiskImageMappingOpt, - checkpointingInterval = checkpointingInterval - ) + def authGoogleConfigForBatchConfigurationAttributes( + project: String, + bucket: String, + genomicsName: String, + location: String, + restrictMetadata: Boolean, + dockerhubToken: String, + enableFuse: Boolean, + gcsName: String, + qps: Int Refined Positive, + cacheHitDuplicationStrategy: BatchCacheHitDuplicationStrategy, + requestWorkers: Int Refined Positive, + gcsTransferConfiguration: GcsTransferConfiguration, + virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, + referenceDiskLocalizationManifestFilesOpt: Option[List[ManifestFile]], + dockerImageCacheManifestFileOpt: Option[ValidFullGcsPath] + ): ErrorOr[GcpBatchConfigurationAttributes] = + (googleConfig.auth(genomicsName), googleConfig.auth(gcsName)) mapN { (genomicsAuth, gcsAuth) => + val generatedReferenceFilesMappingOpt = referenceDiskLocalizationManifestFilesOpt map { + generateReferenceFilesMapping(genomicsAuth, _) + } + val dockerImageToCacheDiskImageMappingOpt = dockerImageCacheManifestFileOpt map { + generateDockerImageToDiskImageMapping(genomicsAuth, _) + } + models.GcpBatchConfigurationAttributes( + project = project, + computeServiceAccount = computeServiceAccount, + auths = GcpBatchAuths(genomicsAuth, gcsAuth), + restrictMetadataAccess = restrictMetadata, + dockerhubToken = dockerhubToken, + enableFuse = enableFuse, + executionBucket = bucket, + location = location, + maxPollingInterval = maxPollingInterval, + qps = qps, + cacheHitDuplicationStrategy = cacheHitDuplicationStrategy, + requestWorkers = requestWorkers, + batchTimeout = batchTimeout, + logFlushPeriod = logFlushPeriod, + gcsTransferConfiguration = gcsTransferConfiguration, + virtualPrivateCloudConfiguration = virtualPrivateCloudConfiguration, + batchRequestTimeoutConfiguration = batchRequestTimeoutConfiguration, + referenceFileToDiskImageMappingOpt = generatedReferenceFilesMappingOpt, + dockerImageToCacheDiskImageMappingOpt = dockerImageToCacheDiskImageMappingOpt, + checkpointingInterval = checkpointingInterval + ) } - (project, - executionBucket, - genomicsAuthName, - location, - genomicsRestrictMetadataAccess, - dockerhubToken, - genomicsEnableFuse, - gcsFilesystemAuthName, - qpsValidation, - duplicationStrategy, - requestWorkers, - gcsTransferConfiguration, - virtualPrivateCloudConfiguration, - batchRequestTimeoutConfigurationValidation, - referenceDiskLocalizationManifestFiles, - dockerImageCacheManifestFile + executionBucket, + genomicsAuthName, + location, + genomicsRestrictMetadataAccess, + dockerhubToken, + genomicsEnableFuse, + gcsFilesystemAuthName, + qpsValidation, + duplicationStrategy, + requestWorkers, + gcsTransferConfiguration, + virtualPrivateCloudConfiguration, + batchRequestTimeoutConfigurationValidation, + referenceDiskLocalizationManifestFiles, + dockerImageCacheManifestFile ) flatMapN authGoogleConfigForBatchConfigurationAttributes match { case Valid(r) => r case Invalid(f) => @@ -342,25 +375,27 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati } } - private def validateSingleGcsPath(gcsPath: String): ErrorOr[ValidFullGcsPath] = { + private def validateSingleGcsPath(gcsPath: String): ErrorOr[ValidFullGcsPath] = GcsPathBuilder.validateGcsPath(gcsPath) match { case validPath: ValidFullGcsPath => validPath.validNel case invalidPath => s"Invalid GCS path: $invalidPath".invalidNel } - } - private[batch] def validateGcsPathToDockerImageCacheManifestFile(backendConfig: Config): ErrorOr[Option[ValidFullGcsPath]] = { + private[batch] def validateGcsPathToDockerImageCacheManifestFile( + backendConfig: Config + ): ErrorOr[Option[ValidFullGcsPath]] = backendConfig.getAs[String]("docker-image-cache-manifest-file") match { case Some(gcsPath) => validateSingleGcsPath(gcsPath).map(Option.apply) case None => None.validNel } - } /** * Validate that the entries corresponding to "reference-disk-localization-manifests" in the specified * backend are parseable as `ManifestFile`s. */ - private[batch] def validateReferenceDiskManifestConfigs(backendConfig: Config, backendName: String): ErrorOr[Option[List[ManifestFile]]] = { + private[batch] def validateReferenceDiskManifestConfigs(backendConfig: Config, + backendName: String + ): ErrorOr[Option[List[ManifestFile]]] = Try(backendConfig.getAs[List[Config]]("reference-disk-localization-manifests")) match { case Failure(e) => ("Error attempting to parse value for 'reference-disk-localization-manifests' as List[Config]: " + @@ -375,10 +410,13 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati // equivalent using clunkier syntax: configs traverse parser.decode[ManifestFile] match { case Right(manifests) => - logger.info(s"Reference disks feature for $backendName backend is configured with the following reference images: ${manifests.map(_.imageIdentifier).mkString(", ")}.") + logger.info( + s"Reference disks feature for $backendName backend is configured with the following reference images: ${manifests.map(_.imageIdentifier).mkString(", ")}." + ) Option(manifests).validNel case Left(err) => - val message = s"Reference disks misconfigured for backend $backendName, could not parse as List[ManifestFile]" + val message = + s"Reference disks misconfigured for backend $backendName, could not parse as List[ManifestFile]" logger.error(message, err.getCause) s"$message: ${err.getMessage}".invalidNel } @@ -387,7 +425,6 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati None.validNel } } - } def validateQps(config: Config): ErrorOr[Int Refined Positive] = { import eu.timepit.refined._ @@ -396,7 +433,8 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati val qpsCandidate = qp100s / 100 refineV[Positive](qpsCandidate) match { - case Left(_) => s"Calculated QPS for Google Genomics API ($qpsCandidate/s) was not a positive integer (supplied value was $qp100s per 100s)".invalidNel + case Left(_) => + s"Calculated QPS for Google Genomics API ($qpsCandidate/s) was not a positive integer (supplied value was $qp100s per 100s)".invalidNel case Right(refined) => refined.validNel } } @@ -405,19 +443,21 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati val entry = config.as[Option[String]](configPath) entry match { case None => "0".validNel - case Some(v@GsutilHumanBytes(_, _)) => v.validNel - case Some(bad) => s"Invalid gsutil memory specification in Cromwell configuration at path '$configPath': '$bad'".invalidNel + case Some(v @ GsutilHumanBytes(_, _)) => v.validNel + case Some(bad) => + s"Invalid gsutil memory specification in Cromwell configuration at path '$configPath': '$bad'".invalidNel } } - def validatePositiveInt(n: Int, configPath: String): Validated[NonEmptyList[String], Refined[Int, Positive]] = { + def validatePositiveInt(n: Int, configPath: String): Validated[NonEmptyList[String], Refined[Int, Positive]] = refineV[Positive](n) match { case Left(_) => s"Value $n for $configPath is not strictly positive".invalidNel case Right(refined) => refined.validNel } - } - def readOptionalPositiveMillisecondsIntFromDuration(backendConfig: Config, configPath: String): ErrorOr[Option[Int Refined Positive]] = { + def readOptionalPositiveMillisecondsIntFromDuration(backendConfig: Config, + configPath: String + ): ErrorOr[Option[Int Refined Positive]] = { def validate(n: FiniteDuration) = { val result: ErrorOr[Int Refined Positive] = Try(n.toMillis.toInt).toErrorOr flatMap { millisInt => @@ -445,7 +485,7 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati List("GiB", "Gibit", "G"), List("TiB", "Tibit", "T"), List("PiB", "Pibit", "P"), - List("EiB", "Eibit", "E"), + List("EiB", "Eibit", "E") ) val suffixes = for { @@ -454,7 +494,7 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati } yield name // Differs from the Python original in a couple of ways: - //* The Python original uses named groups which are not supported in Scala regexes. + // * The Python original uses named groups which are not supported in Scala regexes. // (?P\d*\.\d+|\d+)\s*(?P%s)? // // * The Python original lowercases both the units and the human string before running the matcher. @@ -462,4 +502,4 @@ object GcpBatchConfigurationAttributes extends GcpBatchDockerCacheMappingOperati val orSuffixes = suffixes.mkString("|") "(?i)(\\d*\\.\\d+|\\d+)\\s*(%s)?".format(orSuffixes).r } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchCustomMachineType.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchCustomMachineType.scala index 84366f4e969..cf5db4df9e9 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchCustomMachineType.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchCustomMachineType.scala @@ -21,6 +21,7 @@ import scala.math.{log, pow} * - https://cloud.google.com/sdk/gcloud/reference/compute/instances/create#--custom-vm-type */ trait CustomMachineType { + /** * The vm prefix to create this custom machine type. */ @@ -54,10 +55,7 @@ trait CustomMachineType { /** * Generates a custom machine type based on the requested memory and cpu */ - def machineType(requestedMemory: MemorySize, - requestedCpu: Int Refined Positive, - jobLogger: Logger, - ): String = { + def machineType(requestedMemory: MemorySize, requestedCpu: Int Refined Positive, jobLogger: Logger): String = { val memory = requestedMemory |> validateMemory val cpu = requestedCpu |> validateCpu @@ -92,8 +90,8 @@ trait CustomMachineType { adjustedCpu: Int, originalMemory: MemorySize, adjustedMemory: MemorySize, - logger: Logger, - ): Unit = { + logger: Logger + ): Unit = { def memoryAdjustmentLog = s"memory was adjusted from ${originalMemory.toMBString} to ${adjustedMemory.toMBString}" def cpuAdjustmentLog = s"cpu was adjusted from $originalCpu to $adjustedCpu" @@ -101,7 +99,7 @@ trait CustomMachineType { val messageOption = ( originalCpu == adjustedCpu, - originalMemory.to(MemoryUnit.MB).amount == adjustedMemory.to(MemoryUnit.MB).amount, + originalMemory.to(MemoryUnit.MB).amount == adjustedMemory.to(MemoryUnit.MB).amount ) match { case (true, false) => Option(memoryAdjustmentLog) case (false, true) => Option(cpuAdjustmentLog) @@ -129,17 +127,15 @@ case object N1CustomMachineType extends CustomMachineType { override val maxMemoryPerCpu: MemorySize = MemorySize(6.5, MemoryUnit.GB) override val memoryFactor: MemorySize = MemorySize(256, MemoryUnit.MB) - override def validateCpu(cpu: Refined[Int, Positive]): Int = { + override def validateCpu(cpu: Refined[Int, Positive]): Int = // Either one cpu, or an even number of cpus cpu.value match { case 1 => 1 case cpu => cpu + (cpu % 2) } - } - override def validateMemory(memory: MemorySize): MemorySize = { + override def validateMemory(memory: MemorySize): MemorySize = memory.asMultipleOf(memoryFactor) - } } case object N2CustomMachineType extends CustomMachineType { @@ -148,18 +144,16 @@ case object N2CustomMachineType extends CustomMachineType { override val maxMemoryPerCpu: MemorySize = MemorySize(8.0, MemoryUnit.GB) override val memoryFactor: MemorySize = MemorySize(256, MemoryUnit.MB) - override def validateCpu(cpu: Refined[Int, Positive]): Int = { + override def validateCpu(cpu: Refined[Int, Positive]): Int = // cpus must be divisible by 2 up to 32, and higher numbers must be divisible by 4 cpu.value match { case cpu if cpu <= 32 => cpu + (cpu % 2) case cpu if cpu % 4 == 0 => cpu case cpu => cpu + (4 - (cpu % 4)) } - } - override def validateMemory(memory: MemorySize): MemorySize = { + override def validateMemory(memory: MemorySize): MemorySize = memory.asMultipleOf(memoryFactor) - } } case object N2DCustomMachineType extends CustomMachineType { @@ -167,18 +161,15 @@ case object N2DCustomMachineType extends CustomMachineType { override val minMemoryPerCpu: MemorySize = MemorySize(0.5, MemoryUnit.GB) override val maxMemoryPerCpu: MemorySize = MemorySize(8.0, MemoryUnit.GB) override val memoryFactor: MemorySize = MemorySize(256, MemoryUnit.MB) - - override def validateCpu(cpu: Refined[Int, Positive]): Int = { + + override def validateCpu(cpu: Refined[Int, Positive]): Int = cpu.value match { - case cpu if cpu <= 16 => 2 max pow(2, (log(cpu.toDouble)/log(2)).ceil).toInt + case cpu if cpu <= 16 => 2 max pow(2, (log(cpu.toDouble) / log(2)).ceil).toInt case cpu if cpu > 16 && cpu <= 96 && cpu % 16 == 0 => cpu case cpu if cpu > 16 && cpu <= 96 => cpu + 16 - (cpu % 16) case cpu if cpu > 96 => 96 } - } - override def validateMemory(memory: MemorySize): MemorySize = { + override def validateMemory(memory: MemorySize): MemorySize = memory.asMultipleOf(memoryFactor) - } } - diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala index 923e5dc16f5..494eb6b0faa 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala @@ -16,18 +16,16 @@ object GcpBatchJobPaths { val DrsLocalizationManifestName = "drs_manifest" } -case class GcpBatchJobPaths(override val workflowPaths: GcpBatchWorkflowPaths, jobKey: BackendJobDescriptorKey, override val isCallCacheCopyAttempt: Boolean = false) extends JobPaths { +case class GcpBatchJobPaths(override val workflowPaths: GcpBatchWorkflowPaths, + jobKey: BackendJobDescriptorKey, + override val isCallCacheCopyAttempt: Boolean = false +) extends JobPaths { def batchLogBasename = { - val index = jobKey - .index + val index = jobKey.index .map(s => s"-$s") .getOrElse("") - s"${ - jobKey - .node - .localName - }$index" + s"${jobKey.node.localName}$index" } val batchLogFilename: String = s"$batchLogBasename.log" diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchParameters.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchParameters.scala index c2c75ce5667..010ee9d2fdb 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchParameters.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchParameters.scala @@ -52,32 +52,33 @@ sealed trait GcpBatchOutput extends BatchParameter { def contentType: Option[ContentType] = None } -final case class GcpBatchFileInput(name: String, - cloudPath: Path, - relativeHostPath: Path, - mount: GcpBatchAttachedDisk) extends GcpBatchInput +final case class GcpBatchFileInput(name: String, cloudPath: Path, relativeHostPath: Path, mount: GcpBatchAttachedDisk) + extends GcpBatchInput final case class GcpBatchDirectoryInput(name: String, - cloudPath: Path, - relativeHostPath: Path, - mount: GcpBatchAttachedDisk) extends GcpBatchInput - -final case class GcpBatchFileOutput(name: String, cloudPath: Path, relativeHostPath: Path, - mount: GcpBatchAttachedDisk, - optional: Boolean, - secondary: Boolean, - uploadPeriod: Option[FiniteDuration] = None, - override val contentType: Option[ContentType] = None) extends GcpBatchOutput + mount: GcpBatchAttachedDisk +) extends GcpBatchInput + +final case class GcpBatchFileOutput(name: String, + cloudPath: Path, + relativeHostPath: Path, + mount: GcpBatchAttachedDisk, + optional: Boolean, + secondary: Boolean, + uploadPeriod: Option[FiniteDuration] = None, + override val contentType: Option[ContentType] = None +) extends GcpBatchOutput final case class GcpBatchDirectoryOutput(name: String, - cloudPath: Path, - relativeHostPath: Path, - mount: GcpBatchAttachedDisk, - optional: Boolean, - secondary: Boolean, - override val contentType: Option[ContentType] = None) extends GcpBatchOutput + cloudPath: Path, + relativeHostPath: Path, + mount: GcpBatchAttachedDisk, + optional: Boolean, + secondary: Boolean, + override val contentType: Option[ContentType] = None +) extends GcpBatchOutput // TODO: Remove when support for V1 is stopped, this is only used to pass the extra_param auth file -final case class GcpBatchLiteralInput(name: String, value: String) \ No newline at end of file +final case class GcpBatchLiteralInput(name: String, value: String) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRequest.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRequest.scala index 3c616c0ba4e..df0e8a9cede 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRequest.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRequest.scala @@ -6,4 +6,5 @@ import cromwell.core.WorkflowId case class GcpBatchRequest(workflowId: WorkflowId, createParameters: CreateBatchJobParameters, jobName: String, - gcpBatchParameters: CreateGcpBatchParameters) + gcpBatchParameters: CreateGcpBatchParameters +) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala index c9dc62b0cc7..cdf3826f7af 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributes.scala @@ -37,21 +37,21 @@ object GpuResource { final case class GpuResource(gpuType: GpuType, gpuCount: Int Refined Positive) -final case class GcpBatchRuntimeAttributes( - cpu: Int Refined Positive, - cpuPlatform: Option[String], - gpuResource: Option[GpuResource], - zones: Vector[String], - preemptible: Int, - bootDiskSize: Int, - memory: MemorySize, - disks: Seq[GcpBatchAttachedDisk], - dockerImage: String, - failOnStderr: Boolean, - continueOnReturnCode: ContinueOnReturnCode, - noAddress: Boolean, - useDockerImageCache: Option[Boolean], - checkpointFilename: Option[String]) +final case class GcpBatchRuntimeAttributes(cpu: Int Refined Positive, + cpuPlatform: Option[String], + gpuResource: Option[GpuResource], + zones: Vector[String], + preemptible: Int, + bootDiskSize: Int, + memory: MemorySize, + disks: Seq[GcpBatchAttachedDisk], + dockerImage: String, + failOnStderr: Boolean, + continueOnReturnCode: ContinueOnReturnCode, + noAddress: Boolean, + useDockerImageCache: Option[Boolean], + checkpointFilename: Option[String] +) object GcpBatchRuntimeAttributes { @@ -74,147 +74,188 @@ object GcpBatchRuntimeAttributes { private val DisksDefaultValue = WomString(s"${GcpBatchWorkingDisk.Name} 10 SSD") val CpuPlatformKey = "cpuPlatform" - private val cpuPlatformValidationInstance = new StringRuntimeAttributesValidation(CpuPlatformKey) - .optional + private val cpuPlatformValidationInstance = new StringRuntimeAttributesValidation(CpuPlatformKey).optional // via `gcloud compute zones describe us-central1-a` val CpuPlatformIntelCascadeLakeValue = "Intel Cascade Lake" val CpuPlatformAMDRomeValue = "AMD Rome" - private def cpuMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = CpuValidation - .instanceMin - .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) + private def cpuMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.instanceMin + .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) val UseDockerImageCacheKey = "useDockerImageCache" - private val useDockerImageCacheValidationInstance = new BooleanRuntimeAttributesValidation(UseDockerImageCacheKey) - .optional + private val useDockerImageCacheValidationInstance = new BooleanRuntimeAttributesValidation( + UseDockerImageCacheKey + ).optional val CheckpointFileKey = "checkpointFile" private val checkpointFileValidationInstance = new StringRuntimeAttributesValidation(CheckpointFileKey).optional private val MemoryDefaultValue = "2048 MB" - private def cpuValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = CpuValidation - .instance - .withDefault(CpuValidation - .configDefaultWomValue(runtimeConfig) getOrElse CpuValidation - .defaultMin) - private def cpuPlatformValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = cpuPlatformValidationInstance - private def gpuTypeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[GpuType] = GpuTypeValidation.optional + private def cpuValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.instance + .withDefault( + CpuValidation + .configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin + ) + private def cpuPlatformValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = + cpuPlatformValidationInstance + private def gpuTypeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[GpuType] = + GpuTypeValidation.optional val GpuDriverVersionKey = "nvidiaDriverVersion" - private def gpuDriverValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = new StringRuntimeAttributesValidation(GpuDriverVersionKey).optional + private def gpuDriverValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = + new StringRuntimeAttributesValidation(GpuDriverVersionKey).optional - private def gpuCountValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optional - private def gpuMinValidation(runtimeConfig: Option[Config]):OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optionalMin + private def gpuCountValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optional + private def gpuMinValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optionalMin private val dockerValidation: RuntimeAttributesValidation[String] = DockerValidation.instance private def failOnStderrValidation(runtimeConfig: Option[Config]) = FailOnStderrValidation.default(runtimeConfig) - private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = ContinueOnReturnCodeValidation.default(runtimeConfig) + private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = + ContinueOnReturnCodeValidation.default(runtimeConfig) - private def disksValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Seq[GcpBatchAttachedDisk]] = DisksValidation - .withDefault(DisksValidation.configDefaultWomValue(runtimeConfig) getOrElse DisksDefaultValue) + private def disksValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Seq[GcpBatchAttachedDisk]] = + DisksValidation + .withDefault(DisksValidation.configDefaultWomValue(runtimeConfig) getOrElse DisksDefaultValue) - private def zonesValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Vector[String]] = ZonesValidation - .withDefault(ZonesValidation - .configDefaultWomValue(runtimeConfig) getOrElse ZonesDefaultValue) + private def zonesValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Vector[String]] = + ZonesValidation + .withDefault( + ZonesValidation + .configDefaultWomValue(runtimeConfig) getOrElse ZonesDefaultValue + ) - private def preemptibleValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = preemptibleValidationInstance - .withDefault(preemptibleValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse PreemptibleDefaultValue) + private def preemptibleValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = + preemptibleValidationInstance + .withDefault(preemptibleValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse PreemptibleDefaultValue) - private def memoryValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = { + private def memoryValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = MemoryValidation.withDefaultMemory( RuntimeAttributesKeys.MemoryKey, - MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, runtimeConfig) getOrElse MemoryDefaultValue) - } + MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, runtimeConfig) getOrElse MemoryDefaultValue + ) - private def memoryMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = { + private def memoryMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = MemoryValidation.withDefaultMemory( RuntimeAttributesKeys.MemoryMinKey, MemoryValidation - .configDefaultString(RuntimeAttributesKeys.MemoryMinKey, runtimeConfig) getOrElse MemoryDefaultValue) - } - + .configDefaultString(RuntimeAttributesKeys.MemoryMinKey, runtimeConfig) getOrElse MemoryDefaultValue + ) - private def bootDiskSizeValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = bootDiskValidationInstance - .withDefault(bootDiskValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse BootDiskDefaultValue) + private def bootDiskSizeValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = + bootDiskValidationInstance + .withDefault(bootDiskValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse BootDiskDefaultValue) - private def noAddressValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = noAddressValidationInstance - .withDefault(noAddressValidationInstance - .configDefaultWomValue(runtimeConfig) getOrElse NoAddressDefaultValue) + private def noAddressValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = + noAddressValidationInstance + .withDefault( + noAddressValidationInstance + .configDefaultWomValue(runtimeConfig) getOrElse NoAddressDefaultValue + ) - private def useDockerImageCacheValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = + private def useDockerImageCacheValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Boolean] = useDockerImageCacheValidationInstance - private val outDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = { + private val outDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = InformationValidation.optional(RuntimeAttributesKeys.OutDirMinKey, MemoryUnit.MB, allowZero = true) - } - private val tmpDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = { + private val tmpDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = InformationValidation.optional(RuntimeAttributesKeys.TmpDirMinKey, MemoryUnit.MB, allowZero = true) - } - private val inputDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = { + private val inputDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = InformationValidation.optional(RuntimeAttributesKeys.DnaNexusInputDirMinKey, MemoryUnit.MB, allowZero = true) - } def runtimeAttributesBuilder(batchConfiguration: GcpBatchConfiguration): StandardValidatedRuntimeAttributesBuilder = { val runtimeConfig = batchConfiguration.runtimeConfig - StandardValidatedRuntimeAttributesBuilder.default(runtimeConfig).withValidation( - gpuCountValidation(runtimeConfig), - gpuTypeValidation(runtimeConfig), - gpuDriverValidation(runtimeConfig), - cpuValidation(runtimeConfig), - cpuPlatformValidation(runtimeConfig), - cpuMinValidation(runtimeConfig), - gpuMinValidation(runtimeConfig), - disksValidation(runtimeConfig), - noAddressValidation(runtimeConfig), - zonesValidation(runtimeConfig), - preemptibleValidation(runtimeConfig), - memoryValidation(runtimeConfig), - memoryMinValidation(runtimeConfig), - bootDiskSizeValidation(runtimeConfig), - useDockerImageCacheValidation(runtimeConfig), - checkpointFileValidationInstance, - dockerValidation, - outDirMinValidation, - tmpDirMinValidation, - inputDirMinValidation - ) + StandardValidatedRuntimeAttributesBuilder + .default(runtimeConfig) + .withValidation( + gpuCountValidation(runtimeConfig), + gpuTypeValidation(runtimeConfig), + gpuDriverValidation(runtimeConfig), + cpuValidation(runtimeConfig), + cpuPlatformValidation(runtimeConfig), + cpuMinValidation(runtimeConfig), + gpuMinValidation(runtimeConfig), + disksValidation(runtimeConfig), + noAddressValidation(runtimeConfig), + zonesValidation(runtimeConfig), + preemptibleValidation(runtimeConfig), + memoryValidation(runtimeConfig), + memoryMinValidation(runtimeConfig), + bootDiskSizeValidation(runtimeConfig), + useDockerImageCacheValidation(runtimeConfig), + checkpointFileValidationInstance, + dockerValidation, + outDirMinValidation, + tmpDirMinValidation, + inputDirMinValidation + ) } - def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, runtimeAttrsConfig: Option[Config]): GcpBatchRuntimeAttributes = { - val cpu: Int Refined Positive = RuntimeAttributesValidation.extract(cpuValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val cpuPlatform: Option[String] = RuntimeAttributesValidation.extractOption(cpuPlatformValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) - val checkpointFileName: Option[String] = RuntimeAttributesValidation.extractOption(checkpointFileValidationInstance.key, validatedRuntimeAttributes) + def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, + runtimeAttrsConfig: Option[Config] + ): GcpBatchRuntimeAttributes = { + val cpu: Int Refined Positive = + RuntimeAttributesValidation.extract(cpuValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val cpuPlatform: Option[String] = RuntimeAttributesValidation.extractOption( + cpuPlatformValidation(runtimeAttrsConfig).key, + validatedRuntimeAttributes + ) + val checkpointFileName: Option[String] = + RuntimeAttributesValidation.extractOption(checkpointFileValidationInstance.key, validatedRuntimeAttributes) - //GPU + // GPU lazy val gpuType: Option[GpuType] = RuntimeAttributesValidation .extractOption(gpuTypeValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) lazy val gpuCount: Option[Int Refined Positive] = RuntimeAttributesValidation .extractOption(gpuCountValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) - lazy val gpuDriver: Option[String] = RuntimeAttributesValidation.extractOption(gpuDriverValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + lazy val gpuDriver: Option[String] = + RuntimeAttributesValidation.extractOption(gpuDriverValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) val gpuResource: Option[GpuResource] = if (gpuType.isDefined || gpuCount.isDefined || gpuDriver.isDefined) { - Option(GpuResource(gpuType.getOrElse(GpuType.DefaultGpuType), gpuCount - .getOrElse(GpuType.DefaultGpuCount))) + Option( + GpuResource(gpuType.getOrElse(GpuType.DefaultGpuType), + gpuCount + .getOrElse(GpuType.DefaultGpuCount) + ) + ) } else { None } - val docker: String = RuntimeAttributesValidation.extract(dockerValidation, validatedRuntimeAttributes) - val failOnStderr: Boolean = RuntimeAttributesValidation.extract(failOnStderrValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val noAddress: Boolean = RuntimeAttributesValidation.extract(noAddressValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val failOnStderr: Boolean = + RuntimeAttributesValidation.extract(failOnStderrValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract( + continueOnReturnCodeValidation(runtimeAttrsConfig), + validatedRuntimeAttributes + ) + val noAddress: Boolean = + RuntimeAttributesValidation.extract(noAddressValidation(runtimeAttrsConfig), validatedRuntimeAttributes) val zones: Vector[String] = RuntimeAttributesValidation.extract(ZonesValidation, validatedRuntimeAttributes) - val preemptible: Int = RuntimeAttributesValidation.extract(preemptibleValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val bootDiskSize: Int = RuntimeAttributesValidation.extract(bootDiskSizeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val memory: MemorySize = RuntimeAttributesValidation.extract(memoryValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val disks: Seq[GcpBatchAttachedDisk] = RuntimeAttributesValidation.extract(disksValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val useDockerImageCache: Option[Boolean] = RuntimeAttributesValidation.extractOption(useDockerImageCacheValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + val preemptible: Int = + RuntimeAttributesValidation.extract(preemptibleValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val bootDiskSize: Int = + RuntimeAttributesValidation.extract(bootDiskSizeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val memory: MemorySize = + RuntimeAttributesValidation.extract(memoryValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val disks: Seq[GcpBatchAttachedDisk] = + RuntimeAttributesValidation.extract(disksValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val useDockerImageCache: Option[Boolean] = RuntimeAttributesValidation.extractOption( + useDockerImageCacheValidation(runtimeAttrsConfig).key, + validatedRuntimeAttributes + ) val outDirMin: Option[MemorySize] = RuntimeAttributesValidation .extractOption(outDirMinValidation.key, validatedRuntimeAttributes) @@ -223,9 +264,10 @@ object GcpBatchRuntimeAttributes { val inputDirMin: Option[MemorySize] = RuntimeAttributesValidation .extractOption(inputDirMinValidation.key, validatedRuntimeAttributes) - - val totalExecutionDiskSizeBytes = List(inputDirMin.map(_.bytes), outDirMin.map(_.bytes), tmpDirMin.map(_.bytes)) - .flatten.fold(MemorySize(0, MemoryUnit.Bytes).bytes)(_ + _) + val totalExecutionDiskSizeBytes = List(inputDirMin.map(_.bytes), + outDirMin.map(_.bytes), + tmpDirMin.map(_.bytes) + ).flatten.fold(MemorySize(0, MemoryUnit.Bytes).bytes)(_ + _) val totalExecutionDiskSize = MemorySize(totalExecutionDiskSizeBytes, MemoryUnit.Bytes) val adjustedDisks = disks.adjustWorkingDiskWithNewMin(totalExecutionDiskSize, ()) @@ -244,10 +286,10 @@ object GcpBatchRuntimeAttributes { continueOnReturnCode, noAddress, useDockerImageCache, - checkpointFileName) + checkpointFileName + ) } - } object ZonesValidation extends RuntimeAttributesValidation[Vector[String]] { @@ -277,24 +319,23 @@ object DisksValidation extends RuntimeAttributesValidation[Seq[GcpBatchAttachedD } private def validateLocalDisks(disks: Seq[String]): ErrorOr[Seq[GcpBatchAttachedDisk]] = { - val diskNels: ErrorOr[Seq[GcpBatchAttachedDisk]] = disks.toList.traverse[ErrorOr, GcpBatchAttachedDisk](validateLocalDisk) + val diskNels: ErrorOr[Seq[GcpBatchAttachedDisk]] = + disks.toList.traverse[ErrorOr, GcpBatchAttachedDisk](validateLocalDisk) val defaulted: ErrorOr[Seq[GcpBatchAttachedDisk]] = addDefault(diskNels) defaulted } - private def validateLocalDisk(disk: String): ErrorOr[GcpBatchAttachedDisk] = { + private def validateLocalDisk(disk: String): ErrorOr[GcpBatchAttachedDisk] = GcpBatchAttachedDisk.parse(disk) match { case scala.util.Success(attachedDisk) => attachedDisk.validNel case scala.util.Failure(ex) => ex.getMessage.invalidNel } - } - private def addDefault(disksNel: ErrorOr[Seq[GcpBatchAttachedDisk]]): ErrorOr[Seq[GcpBatchAttachedDisk]] = { + private def addDefault(disksNel: ErrorOr[Seq[GcpBatchAttachedDisk]]): ErrorOr[Seq[GcpBatchAttachedDisk]] = disksNel map { case disks if disks.exists(_.name == GcpBatchWorkingDisk.Name) => disks case disks => disks :+ GcpBatchWorkingDisk.Default } - } override protected def missingValueMessage: String = s"Expecting $key runtime attribute to be a comma separated String or Array[String]" diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchWorkflowPaths.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchWorkflowPaths.scala index fcb80241b21..5fdadce45d0 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchWorkflowPaths.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchWorkflowPaths.scala @@ -21,10 +21,9 @@ object GcpBatchWorkflowPaths { private val AuthFilePathOptionKey = "auth_bucket" private val GcsPrefix = "gs://" - private def callCachePathPrefixFromExecutionRoot(executionRoot: String): String = { + private def callCachePathPrefixFromExecutionRoot(executionRoot: String): String = // If the root looks like gs://bucket/stuff-under-bucket this should return gs://bucket GcsPrefix + executionRoot.substring(GcsPrefix.length).takeWhile(_ != '/') - } } case class GcpBatchWorkflowPaths(workflowDescriptor: BackendWorkflowDescriptor, gcsCredentials: Credentials, @@ -35,20 +34,25 @@ case class GcpBatchWorkflowPaths(workflowDescriptor: BackendWorkflowDescriptor, // combined controller + job standard output and error files. PAPI v1 controls the periodic // delocalization of these files so the metadata Cromwell publishes for these files needs // to match the PAPI v1 names. - standardStreamNameToFileNameMetadataMapper: (GcpBatchJobPaths, String) => String)(implicit ec: ExecutionContext) extends WorkflowPaths { + standardStreamNameToFileNameMetadataMapper: (GcpBatchJobPaths, String) => String +)(implicit ec: ExecutionContext) + extends WorkflowPaths { - override lazy val executionRootString: String = workflowDescriptor.workflowOptions.getOrElse(GcpBatchWorkflowPaths.GcsRootOptionKey, gcpBatchConfiguration.root) - override lazy val callCacheRootPrefix: Option[String] = Option(callCachePathPrefixFromExecutionRoot(executionRootString)) + override lazy val executionRootString: String = + workflowDescriptor.workflowOptions.getOrElse(GcpBatchWorkflowPaths.GcsRootOptionKey, gcpBatchConfiguration.root) + override lazy val callCacheRootPrefix: Option[String] = Option( + callCachePathPrefixFromExecutionRoot(executionRootString) + ) private val workflowOptions: WorkflowOptions = workflowDescriptor.workflowOptions val gcsAuthFilePath: Path = { // The default auth file bucket is always at the root of the root workflow - val defaultBucket = executionRoot.resolve(workflowDescriptor.rootWorkflow.name) - .resolve(workflowDescriptor.rootWorkflowId.toString) + val defaultBucket = executionRoot + .resolve(workflowDescriptor.rootWorkflow.name) + .resolve(workflowDescriptor.rootWorkflowId.toString) val bucket = workflowDescriptor.workflowOptions - .get(GcpBatchWorkflowPaths.AuthFilePathOptionKey) getOrElse defaultBucket - .pathAsString + .get(GcpBatchWorkflowPaths.AuthFilePathOptionKey) getOrElse defaultBucket.pathAsString /* * This is an "exception". The filesystem used here is built from genomicsAuth @@ -63,24 +67,21 @@ case class GcpBatchWorkflowPaths(workflowDescriptor: BackendWorkflowDescriptor, Option(gcpBatchConfiguration.batchAttributes.project) ) - val authBucket = pathBuilderWithGenomicsAuth.build(bucket) recover { - case ex => throw new Exception(s"Invalid gcs auth_bucket path $bucket", ex) + val authBucket = pathBuilderWithGenomicsAuth.build(bucket) recover { case ex => + throw new Exception(s"Invalid gcs auth_bucket path $bucket", ex) } get authBucket.resolve(s"${workflowDescriptor.rootWorkflowId}_auth.json") } - val monitoringScriptPath: Option[Path] = workflowOptions.get(WorkflowOptionKeys.MonitoringScript) - .toOption map { path => - // Fail here if the path exists but can't be built - getPath(path).get + val monitoringScriptPath: Option[Path] = workflowOptions.get(WorkflowOptionKeys.MonitoringScript).toOption map { + path => + // Fail here if the path exists but can't be built + getPath(path).get } - override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): GcpBatchJobPaths = { + override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): GcpBatchJobPaths = new GcpBatchJobPaths(workflowPaths.asInstanceOf[GcpBatchWorkflowPaths], jobKey) - } - override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor) + override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = + this.copy(workflowDescriptor = workflowDescriptor) override def config: Config = gcpBatchConfiguration.configurationDescriptor.backendConfig } - - - diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpLabel.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpLabel.scala index 49e2941bf59..303b2ec85f2 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpLabel.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpLabel.scala @@ -23,18 +23,16 @@ object GcpLabel { // This function is used to coerce a string into one that meets the requirements for a label submission to Google Pipelines API. // See 'labels' in https://cloud.google.com/genomics/reference/rpc/google.genomics.v1alpha2#google.genomics.v1alpha2.RunPipelineArgs - def safeGoogleName(mainText: String, emptyAllowed: Boolean = false): String = { - + def safeGoogleName(mainText: String, emptyAllowed: Boolean = false): String = validateLabelRegex(mainText) match { case Valid(labelText) => labelText case invalid @ _ if mainText.equals("") && emptyAllowed => mainText case invalid @ _ => - def appendSafe(current: String, nextChar: Char): String = { + def appendSafe(current: String, nextChar: Char): String = nextChar match { case c if c.isLetterOrDigit || c == '-' => current + c.toLower case _ => current + '-' } - } val foldResult = mainText.toCharArray.foldLeft("")(appendSafe) @@ -50,51 +48,64 @@ object GcpLabel { if (tooLong) { val middleSeparator = "---" val subSectionLength = (MaxLabelLength - middleSeparator.length) / 2 - validStartAndEnd.substring(0, subSectionLength) + middleSeparator + validStartAndEnd.substring(length - subSectionLength, length) + validStartAndEnd.substring(0, subSectionLength) + middleSeparator + validStartAndEnd.substring( + length - subSectionLength, + length + ) } else { validStartAndEnd } } - } - def validateLabelRegex(s: String): ErrorOr[String] = { + def validateLabelRegex(s: String): ErrorOr[String] = (GoogleLabelRegex.pattern.matcher(s).matches, s.length <= MaxLabelLength) match { case (true, true) => s.validNel - case (false, false) => s"Invalid label field: `$s` did not match regex '$GoogleLabelRegexPattern' and it is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel + case (false, false) => + s"Invalid label field: `$s` did not match regex '$GoogleLabelRegexPattern' and it is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel case (false, _) => s"Invalid label field: `$s` did not match the regex '$GoogleLabelRegexPattern'".invalidNel - case (_, false) => s"Invalid label field: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel + case (_, false) => + s"Invalid label field: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel } - } - def safeLabels(values: (String, String)*): Seq[GcpLabel] = { - def safeGoogleLabel(kvp: (String, String)): GcpLabel = { + def safeGoogleLabel(kvp: (String, String)): GcpLabel = GcpLabel(safeGoogleName(kvp._1), safeGoogleName(kvp._2, emptyAllowed = true)) - } values.map(safeGoogleLabel) } - def validateLabel(key: String, value: String): ErrorOr[GcpLabel] = { - (validateLabelRegex(key), validateLabelRegex(value)).mapN { (validKey, validValue) => GcpLabel(validKey, validValue) } - } + def validateLabel(key: String, value: String): ErrorOr[GcpLabel] = + (validateLabelRegex(key), validateLabelRegex(value)).mapN { (validKey, validValue) => + GcpLabel(validKey, validValue) + } def fromWorkflowOptions(workflowOptions: WorkflowOptions): Try[Seq[GcpLabel]] = { def extractGoogleLabelsFromJsObject(jsObject: JsObject): Try[Seq[GcpLabel]] = { val asErrorOr = jsObject.fields.toList.traverse { case (key: String, value: JsString) => GcpLabel.validateLabel(key, value.value) - case (key, other) => s"Bad label value type for '$key'. Expected simple string but got $other".invalidNel : ErrorOr[GcpLabel] + case (key, other) => + s"Bad label value type for '$key'. Expected simple string but got $other".invalidNel: ErrorOr[GcpLabel] } asErrorOr match { case Valid(value) => Success(value) - case Invalid(errors) => Failure(new AggregatedMessageException("Invalid 'google_labels' in workflow options", errors.toList) with CromwellFatalExceptionMarker with NoStackTrace) + case Invalid(errors) => + Failure( + new AggregatedMessageException("Invalid 'google_labels' in workflow options", errors.toList) + with CromwellFatalExceptionMarker + with NoStackTrace + ) } } workflowOptions.toMap.get("google_labels") match { case Some(obj: JsObject) => extractGoogleLabelsFromJsObject(obj) - case Some(other) => Failure(new Exception(s"Invalid 'google_labels' in workflow options. Must be a simple JSON object mapping string keys to string values. Got $other") with NoStackTrace with CromwellFatalExceptionMarker) + case Some(other) => + Failure( + new Exception( + s"Invalid 'google_labels' in workflow options. Must be a simple JSON object mapping string keys to string values. Got $other" + ) with NoStackTrace with CromwellFatalExceptionMarker + ) case None => Success(Seq.empty) } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/PreviousRetryReasons.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/PreviousRetryReasons.scala index 521dec4ee57..0bd3c1e1535 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/PreviousRetryReasons.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/PreviousRetryReasons.scala @@ -14,9 +14,10 @@ object PreviousRetryReasons { def tryApply(prefetchedKvEntries: Map[String, KvResponse], attemptNumber: Int): ErrorOr[PreviousRetryReasons] = { val validatedPreemptionCount = validatedKvResponse(prefetchedKvEntries.get(preemptionCountKey), preemptionCountKey) - val validatedUnexpectedRetryCount = validatedKvResponse(prefetchedKvEntries.get(unexpectedRetryCountKey), unexpectedRetryCountKey) + val validatedUnexpectedRetryCount = + validatedKvResponse(prefetchedKvEntries.get(unexpectedRetryCountKey), unexpectedRetryCountKey) - (validatedPreemptionCount, validatedUnexpectedRetryCount) mapN { PreviousRetryReasons.apply } + (validatedPreemptionCount, validatedUnexpectedRetryCount) mapN PreviousRetryReasons.apply } def apply(knownPreemptedCount: Int, knownUnexpectedRetryCount: Int, attempt: Int): PreviousRetryReasons = { @@ -35,10 +36,9 @@ object PreviousRetryReasons { case None => s"Programmer Error: Engine made no effort to prefetch $fromKey".invalidNel } - private def validatedInt(s: String, fromKey: String): ErrorOr[Int] = { + private def validatedInt(s: String, fromKey: String): ErrorOr[Int] = Try(s.toInt) match { case Success(i) => i.validNel case Failure(_) => s"Unexpected value found in the KV store: $fromKey='$s'".invalidNel } - } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/Run.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/Run.scala index 418d8ee2a35..5b475196906 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/Run.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/Run.scala @@ -2,4 +2,4 @@ package cromwell.backend.google.batch.models import cromwell.backend.standard.StandardAsyncJob -case class Run(job: StandardAsyncJob) \ No newline at end of file +case class Run(job: StandardAsyncJob) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/RunStatus.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/RunStatus.scala index f5631ef6afa..334dca4b0f2 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/RunStatus.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/RunStatus.scala @@ -8,55 +8,54 @@ sealed trait RunStatus object RunStatus { - private val log: Logger = LoggerFactory.getLogger(RunStatus.toString) - - def fromJobStatus(status: JobStatus.State): RunStatus = status match { - case JobStatus.State.QUEUED => - log.info("job queued") - Running - case JobStatus.State.SCHEDULED => - log.info("job scheduled") - Running - case JobStatus.State.RUNNING => - log.info("job running") - Running - case JobStatus.State.SUCCEEDED => - log.info("job scheduled") - Succeeded(List(ExecutionEvent("complete in GCP Batch"))) //update to more specific - case JobStatus.State.FAILED => - log.info("job failed") - Failed(List.empty) - case JobStatus.State.DELETION_IN_PROGRESS => - log.info("deletion in progress") - DeletionInProgress - case JobStatus.State.STATE_UNSPECIFIED => - log.info("state unspecified") - StateUnspecified - case JobStatus.State.UNRECOGNIZED => - log.info("state unrecognized") - Unrecognized - case _ => - log.info(s"job status not matched: $status") - Running - } - - - sealed trait TerminalRunStatus extends RunStatus { - def eventList: Seq[ExecutionEvent] - } - - sealed trait UnsuccessfulRunStatus extends TerminalRunStatus - - case object Running extends RunStatus - case object DeletionInProgress extends RunStatus - case object StateUnspecified extends RunStatus - case object Unrecognized extends RunStatus - - case class Succeeded(override val eventList: Seq[ExecutionEvent]) extends TerminalRunStatus { - override def toString = "Succeeded" - } - - final case class Failed(override val eventList: Seq[ExecutionEvent]) extends UnsuccessfulRunStatus { - override def toString = "Failed" - } + private val log: Logger = LoggerFactory.getLogger(RunStatus.toString) + + def fromJobStatus(status: JobStatus.State): RunStatus = status match { + case JobStatus.State.QUEUED => + log.info("job queued") + Running + case JobStatus.State.SCHEDULED => + log.info("job scheduled") + Running + case JobStatus.State.RUNNING => + log.info("job running") + Running + case JobStatus.State.SUCCEEDED => + log.info("job scheduled") + Succeeded(List(ExecutionEvent("complete in GCP Batch"))) // update to more specific + case JobStatus.State.FAILED => + log.info("job failed") + Failed(List.empty) + case JobStatus.State.DELETION_IN_PROGRESS => + log.info("deletion in progress") + DeletionInProgress + case JobStatus.State.STATE_UNSPECIFIED => + log.info("state unspecified") + StateUnspecified + case JobStatus.State.UNRECOGNIZED => + log.info("state unrecognized") + Unrecognized + case _ => + log.info(s"job status not matched: $status") + Running + } + + sealed trait TerminalRunStatus extends RunStatus { + def eventList: Seq[ExecutionEvent] + } + + sealed trait UnsuccessfulRunStatus extends TerminalRunStatus + + case object Running extends RunStatus + case object DeletionInProgress extends RunStatus + case object StateUnspecified extends RunStatus + case object Unrecognized extends RunStatus + + case class Succeeded(override val eventList: Seq[ExecutionEvent]) extends TerminalRunStatus { + override def toString = "Succeeded" + } + + final case class Failed(override val eventList: Seq[ExecutionEvent]) extends UnsuccessfulRunStatus { + override def toString = "Failed" + } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/VpcAndSubnetworkProjectLabelValues.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/VpcAndSubnetworkProjectLabelValues.scala index a77bcd96d87..1dd4ce4bda7 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/VpcAndSubnetworkProjectLabelValues.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/VpcAndSubnetworkProjectLabelValues.scala @@ -3,6 +3,7 @@ package cromwell.backend.google.batch.models import cromwell.backend.google.batch.models.VpcAndSubnetworkProjectLabelValues._ final case class VpcAndSubnetworkProjectLabelValues(vpcName: String, subnetNameOpt: Option[String]) { + /** * Returns a qualified network name replacing the string `\${projectId}` in the network name if found. */ @@ -20,9 +21,8 @@ final case class VpcAndSubnetworkProjectLabelValues(vpcName: String, subnetNameO /** * Replaces the string `\${projectId}` in the subnet name if found. */ - def subnetNameOption(projectId: String): Option[String] = { + def subnetNameOption(projectId: String): Option[String] = subnetNameOpt map { _.replace(ProjectIdToken, projectId) } - } } object VpcAndSubnetworkProjectLabelValues { diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/CheckpointingConfiguration.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/CheckpointingConfiguration.scala index ab04a4a71a9..d706168d96e 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/CheckpointingConfiguration.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/CheckpointingConfiguration.scala @@ -10,18 +10,21 @@ final class CheckpointingConfiguration(jobDescriptor: BackendJobDescriptor, workflowPaths: WorkflowPaths, commandDirectory: Path, checkpointInterval: FiniteDuration - ) { - def checkpointFileCloud(checkpointFileName: String): String = { +) { + def checkpointFileCloud(checkpointFileName: String): String = // The checkpoint file for ANY attempt always goes in the "attempt 1" directory. That way we guarantee that // every attempt is able to recover from the single source of checkpointing truth. - workflowPaths.toJobPaths(jobDescriptor.key.copy(attempt = 1), jobDescriptor.workflowDescriptor) - .callExecutionRoot.resolve("__checkpointing").resolve(checkpointFileName).toAbsolutePath.pathAsString - } + workflowPaths + .toJobPaths(jobDescriptor.key.copy(attempt = 1), jobDescriptor.workflowDescriptor) + .callExecutionRoot + .resolve("__checkpointing") + .resolve(checkpointFileName) + .toAbsolutePath + .pathAsString def tmpCheckpointFileCloud(checkpointFileName: String): String = checkpointFileCloud(checkpointFileName) + "-tmp" - def checkpointFileLocal(checkpointFileName: String): String = { + def checkpointFileLocal(checkpointFileName: String): String = commandDirectory.resolve(checkpointFileName).toAbsolutePath.pathAsString - } def tmpCheckpointFileLocal(checkpointFileName: String): String = checkpointFileLocal(checkpointFileName) + "-tmp" def localizePreviousCheckpointCommand(checkpointFileName: String): String = { diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/Env.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/Env.scala index df0cb3b070c..396334d7de0 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/Env.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/Env.scala @@ -3,6 +3,7 @@ package cromwell.backend.google.batch.monitoring import cromwell.backend.BackendJobDescriptor object Env { + /** * Name of an environmental variable */ @@ -12,13 +13,12 @@ object Env { val TaskCallAttempt = "TASK_CALL_ATTEMPT" val DiskMounts = "DISK_MOUNTS" - def monitoringImageEnvironment(jobDescriptor: BackendJobDescriptor) - (mountPaths: List[String]): Map[String, String] = + def monitoringImageEnvironment(jobDescriptor: BackendJobDescriptor)(mountPaths: List[String]): Map[String, String] = Map( Env.WorkflowId -> jobDescriptor.workflowDescriptor.id.toString, Env.TaskCallName -> jobDescriptor.taskCall.localName, Env.TaskCallIndex -> jobDescriptor.key.index.map(_.toString).getOrElse("NA"), Env.TaskCallAttempt -> jobDescriptor.key.attempt.toString, - Env.DiskMounts -> mountPaths.mkString(" "), + Env.DiskMounts -> mountPaths.mkString(" ") ) } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/MonitoringImage.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/MonitoringImage.scala index 578081dc46f..81e17d4d135 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/MonitoringImage.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/monitoring/MonitoringImage.scala @@ -13,8 +13,8 @@ final class MonitoringImage(jobDescriptor: BackendJobDescriptor, workflowPaths: WorkflowPaths, commandDirectory: Path, workingDisk: GcpBatchAttachedDisk, - localMonitoringImageScriptPath: Path, - ) { + localMonitoringImageScriptPath: Path +) { val monitoringImageOption: Option[String] = workflowOptions.get(WorkflowOptionKeys.MonitoringImage).toOption @@ -24,22 +24,21 @@ final class MonitoringImage(jobDescriptor: BackendJobDescriptor, for { _ <- monitoringImageOption // Only use the monitoring_image_script when monitoring_image provided monitoringImageScript <- workflowOptions.get(WorkflowOptionKeys.MonitoringImageScript).toOption - } yield { - PathFactory.buildPath( - monitoringImageScript, - workflowPaths.pathBuilders, - ) - } + } yield PathFactory.buildPath( + monitoringImageScript, + workflowPaths.pathBuilders + ) val monitoringImageCommand: List[String] = monitoringImageScriptOption match { - case Some(_) => List( - "/bin/sh", - "-c", - s"cd '${commandDirectory.pathAsString}' && " + - s"chmod +x '${monitoringImageScriptContainerPath.pathAsString}' && " + - s"'${monitoringImageScriptContainerPath.pathAsString}'" - ) + case Some(_) => + List( + "/bin/sh", + "-c", + s"cd '${commandDirectory.pathAsString}' && " + + s"chmod +x '${monitoringImageScriptContainerPath.pathAsString}' && " + + s"'${monitoringImageScriptContainerPath.pathAsString}'" + ) case None => Nil } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/CheckpointingRunnable.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/CheckpointingRunnable.scala index 5ad8d2b21f9..081cbc1ee52 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/CheckpointingRunnable.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/CheckpointingRunnable.scala @@ -4,45 +4,65 @@ import com.google.cloud.batch.v1.{Runnable, Volume} import cromwell.backend.google.batch.api.GcpBatchRequestFactory.CreateBatchJobParameters trait CheckpointingRunnable { - def checkpointingSetupRunnables(createParameters: CreateBatchJobParameters, - volumes: List[Volume] - ): List[Runnable] = { - val result = createParameters.runtimeAttributes.checkpointFilename.map { checkpointFilename => - val checkpointingImage = RunnableUtils.CloudSdkImage - val checkpointingCommand = createParameters.checkpointingConfiguration.checkpointingCommand(checkpointFilename, RunnableCommands.multiLineBinBashCommand) - val checkpointingEnvironment = Map.empty[String, String] - - // Initial sync from cloud: - val initialCheckpointSyncRunnable = RunnableBuilder.cloudSdkShellRunnable( - createParameters.checkpointingConfiguration.localizePreviousCheckpointCommand(checkpointFilename) - )(volumes = volumes, flags = List.empty, labels = Map.empty) - val describeInitialCheckpointingSyncRunnable = RunnableBuilder.describeDocker("initial checkpointing sync", initialCheckpointSyncRunnable) - - // Background upload runnable: - val backgroundCheckpointingRunnable = RunnableBuilder.backgroundRunnable( - image = checkpointingImage, - command = checkpointingCommand, - environment = checkpointingEnvironment, - volumes = volumes - ) - val describeBackgroundCheckpointingRunnable = RunnableBuilder.describeDocker("begin checkpointing background runnable", backgroundCheckpointingRunnable) - - List(describeInitialCheckpointingSyncRunnable, initialCheckpointSyncRunnable, describeBackgroundCheckpointingRunnable, backgroundCheckpointingRunnable) - }.getOrElse(Nil) + def checkpointingSetupRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]): List[Runnable] = { + val result = createParameters.runtimeAttributes.checkpointFilename + .map { checkpointFilename => + val checkpointingImage = RunnableUtils.CloudSdkImage + val checkpointingCommand = + createParameters.checkpointingConfiguration.checkpointingCommand(checkpointFilename, + RunnableCommands.multiLineBinBashCommand + ) + val checkpointingEnvironment = Map.empty[String, String] + + // Initial sync from cloud: + val initialCheckpointSyncRunnable = RunnableBuilder.cloudSdkShellRunnable( + createParameters.checkpointingConfiguration.localizePreviousCheckpointCommand(checkpointFilename) + )(volumes = volumes, flags = List.empty, labels = Map.empty) + val describeInitialCheckpointingSyncRunnable = + RunnableBuilder.describeDocker("initial checkpointing sync", initialCheckpointSyncRunnable) + + // Background upload runnable: + val backgroundCheckpointingRunnable = RunnableBuilder.backgroundRunnable( + image = checkpointingImage, + command = checkpointingCommand, + environment = checkpointingEnvironment, + volumes = volumes + ) + val describeBackgroundCheckpointingRunnable = + RunnableBuilder.describeDocker("begin checkpointing background runnable", backgroundCheckpointingRunnable) + + List(describeInitialCheckpointingSyncRunnable, + initialCheckpointSyncRunnable, + describeBackgroundCheckpointingRunnable, + backgroundCheckpointingRunnable + ) + } + .getOrElse(Nil) result.map(_.build) } - def checkpointingShutdownRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]): List[Runnable] = { - val result = createParameters.runtimeAttributes.checkpointFilename.map { checkpointFilename => - val terminationRunnable = RunnableBuilder.terminateBackgroundRunnablesRunnable() - val describeTerminationRunnable = RunnableBuilder.describeDocker("terminate checkpointing runnable", terminationRunnable) + def checkpointingShutdownRunnables(createParameters: CreateBatchJobParameters, + volumes: List[Volume] + ): List[Runnable] = { + val result = createParameters.runtimeAttributes.checkpointFilename + .map { checkpointFilename => + val terminationRunnable = RunnableBuilder.terminateBackgroundRunnablesRunnable() + val describeTerminationRunnable = + RunnableBuilder.describeDocker("terminate checkpointing runnable", terminationRunnable) - val deleteCheckpointRunnable = RunnableBuilder.gcsFileDeletionRunnable(createParameters.checkpointingConfiguration.checkpointFileCloud(checkpointFilename), volumes) - val deleteTmpCheckpointRunnable = RunnableBuilder.gcsFileDeletionRunnable(createParameters.checkpointingConfiguration.tmpCheckpointFileCloud(checkpointFilename), volumes) + val deleteCheckpointRunnable = RunnableBuilder.gcsFileDeletionRunnable( + createParameters.checkpointingConfiguration.checkpointFileCloud(checkpointFilename), + volumes + ) + val deleteTmpCheckpointRunnable = RunnableBuilder.gcsFileDeletionRunnable( + createParameters.checkpointingConfiguration.tmpCheckpointFileCloud(checkpointFilename), + volumes + ) - List(describeTerminationRunnable, terminationRunnable, deleteCheckpointRunnable, deleteTmpCheckpointRunnable) - }.getOrElse(Nil) + List(describeTerminationRunnable, terminationRunnable, deleteCheckpointRunnable, deleteTmpCheckpointRunnable) + } + .getOrElse(Nil) result.map(_.build) } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Delocalization.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Delocalization.scala index f1b28598a28..4e1ab74104b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Delocalization.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Delocalization.scala @@ -21,8 +21,9 @@ trait Delocalization { import RunnableUtils._ private def runtimeOutputExtractorRunnable(containerCallRoot: String, - outputFile: String, - womOutputRuntimeExtractor: WomOutputRuntimeExtractor): Runnable.Builder = { + outputFile: String, + womOutputRuntimeExtractor: WomOutputRuntimeExtractor + ): Runnable.Builder = { val commands = List( "-c", // Create the directory where the fofn will be written @@ -39,7 +40,9 @@ trait Delocalization { .withLabels(Map(Key.Tag -> Value.Delocalization)) } - private def delocalizeRuntimeOutputsScript(fofnPath: String, workflowRoot: Path, cloudCallRoot: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = { + private def delocalizeRuntimeOutputsScript(fofnPath: String, workflowRoot: Path, cloudCallRoot: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ) = { val gsutilCommand: String => String = { flag => s"""rm -f $$HOME/.config/gcloud/gce && gsutil -m $flag cp -r $$line "${cloudCallRoot.pathAsString.ensureSlashed}$$gcs_path"""" } @@ -70,13 +73,21 @@ trait Delocalization { |fi""".stripMargin } - private def delocalizeRuntimeOutputsRunnable(cloudCallRoot: Path, inputFile: String, workflowRoot: Path, volumes: List[Volume])(implicit gcsTransferConfiguration: GcsTransferConfiguration): Runnable.Builder = { + private def delocalizeRuntimeOutputsRunnable(cloudCallRoot: Path, + inputFile: String, + workflowRoot: Path, + volumes: List[Volume] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Runnable.Builder = { val command = multiLineCommand(delocalizeRuntimeOutputsScript(inputFile, workflowRoot, cloudCallRoot)) - RunnableBuilder.cloudSdkShellRunnable(command)(volumes = volumes, labels = Map(Key.Tag -> Value.Delocalization), flags = List.empty) + RunnableBuilder.cloudSdkShellRunnable(command)(volumes = volumes, + labels = Map(Key.Tag -> Value.Delocalization), + flags = List.empty + ) } - def deLocalizeRunnables(createParameters: CreateBatchJobParameters, - volumes: List[Volume])(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable] = { + def deLocalizeRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable] = { val cloudCallRoot = createParameters.cloudCallRoot val callExecutionContainerRoot = createParameters.commandScriptContainerPath.parent @@ -84,25 +95,37 @@ trait Delocalization { * Ideally temporaryFofnForRuntimeOutputFiles should be somewhere else than the execution directory (we could mount anther directory) * However because it runs after everything else there's no risk of polluting the task's results and the random ID ensures we don't override anything */ - val temporaryFofnDirectoryForRuntimeOutputFiles = callExecutionContainerRoot.pathAsString.ensureSlashed + UUID.randomUUID().toString.split("-")(0) + val temporaryFofnDirectoryForRuntimeOutputFiles = + callExecutionContainerRoot.pathAsString.ensureSlashed + UUID.randomUUID().toString.split("-")(0) val temporaryFofnForRuntimeOutputFiles = temporaryFofnDirectoryForRuntimeOutputFiles + "/runtime_output_files.txt" val runtimeExtractionRunnables = createParameters.womOutputRuntimeExtractor.toList flatMap { extractor => - List ( - runtimeOutputExtractorRunnable(callExecutionContainerRoot.pathAsString, temporaryFofnForRuntimeOutputFiles, extractor), - delocalizeRuntimeOutputsRunnable(cloudCallRoot, temporaryFofnForRuntimeOutputFiles, createParameters.cloudWorkflowRoot, volumes) + List( + runtimeOutputExtractorRunnable(callExecutionContainerRoot.pathAsString, + temporaryFofnForRuntimeOutputFiles, + extractor + ), + delocalizeRuntimeOutputsRunnable(cloudCallRoot, + temporaryFofnForRuntimeOutputFiles, + createParameters.cloudWorkflowRoot, + volumes + ) ) } - val gcsDelocalizationContainerPath = createParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) + val gcsDelocalizationContainerPath = + createParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) val delocalizationLabel = Map(Key.Tag -> Value.Delocalization) - val runGcsDelocalizationScript = cloudSdkShellRunnable( - s"/bin/bash $gcsDelocalizationContainerPath")(volumes = volumes, labels = delocalizationLabel, flags = List.empty) + val runGcsDelocalizationScript = cloudSdkShellRunnable(s"/bin/bash $gcsDelocalizationContainerPath")( + volumes = volumes, + labels = delocalizationLabel, + flags = List.empty + ) val annotatedRunnables: List[Runnable.Builder] = runGcsDelocalizationScript :: createParameters.outputParameters.flatMap(_.toRunnables(volumes)) ++ - runtimeExtractionRunnables + runtimeExtractionRunnables // NOTE: papiv2 delocalizes logs from /google but such logs are not available on batch // See: https://cloud.google.com/life-sciences/docs/reference/rpc/google.cloud.lifesciences.v2beta diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Localization.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Localization.scala index 07fe3e848c4..04ff33c16e8 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Localization.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Localization.scala @@ -17,47 +17,63 @@ trait Localization { import RunnableCommands._ import RunnableLabels._ - def localizeRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable] = { + def localizeRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable] = { val localizationLabel = Map(Key.Tag -> Value.Localization) val gcsTransferLibraryContainerPath = createParameters.commandScriptContainerPath.sibling(GcsTransferLibraryName) - val localizeGcsTransferLibrary = cloudSdkShellRunnable(localizeFile( - cloudPath = createParameters.cloudCallRoot / GcsTransferLibraryName, - containerPath = gcsTransferLibraryContainerPath))(volumes = volumes, labels = localizationLabel, flags = List.empty) + val localizeGcsTransferLibrary = cloudSdkShellRunnable( + localizeFile(cloudPath = createParameters.cloudCallRoot / GcsTransferLibraryName, + containerPath = gcsTransferLibraryContainerPath + ) + )(volumes = volumes, labels = localizationLabel, flags = List.empty) val gcsLocalizationContainerPath = createParameters.commandScriptContainerPath.sibling(GcsLocalizationScriptName) - val localizeGcsLocalizationScript = cloudSdkShellRunnable(localizeFile( - cloudPath = createParameters.cloudCallRoot / GcsLocalizationScriptName, - containerPath = gcsLocalizationContainerPath))(volumes = volumes, labels = localizationLabel, flags = List.empty) - - val gcsDelocalizationContainerPath = createParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) - val localizeGcsDelocalizationScript = cloudSdkShellRunnable(localizeFile( - cloudPath = createParameters.cloudCallRoot / GcsDelocalizationScriptName, - containerPath = gcsDelocalizationContainerPath))(volumes = volumes, labels = localizationLabel, flags = List.empty) - - val runGcsLocalizationScript = cloudSdkShellRunnable( - s"/bin/bash $gcsLocalizationContainerPath")(volumes = volumes, labels = localizationLabel, flags = List.empty) + val localizeGcsLocalizationScript = cloudSdkShellRunnable( + localizeFile(cloudPath = createParameters.cloudCallRoot / GcsLocalizationScriptName, + containerPath = gcsLocalizationContainerPath + ) + )(volumes = volumes, labels = localizationLabel, flags = List.empty) + + val gcsDelocalizationContainerPath = + createParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) + val localizeGcsDelocalizationScript = cloudSdkShellRunnable( + localizeFile(cloudPath = createParameters.cloudCallRoot / GcsDelocalizationScriptName, + containerPath = gcsDelocalizationContainerPath + ) + )(volumes = volumes, labels = localizationLabel, flags = List.empty) + + val runGcsLocalizationScript = cloudSdkShellRunnable(s"/bin/bash $gcsLocalizationContainerPath")( + volumes = volumes, + labels = localizationLabel, + flags = List.empty + ) val drsInputs: List[DrsPath] = createParameters.inputOutputParameters.fileInputParameters.collect { case GcpBatchFileInput(_, drsPath: DrsPath, _, _) => drsPath } val drsLocalizationRunnable = if (drsInputs.nonEmpty) { - val drsLocalizationManifestContainerPath = createParameters.commandScriptContainerPath.sibling(DrsLocalizationManifestName) - val localizeDrsLocalizationManifest = cloudSdkShellRunnable(localizeFile( - cloudPath = createParameters.cloudCallRoot / DrsLocalizationManifestName, - containerPath = drsLocalizationManifestContainerPath))(volumes = volumes, labels = localizationLabel, flags = List.empty) + val drsLocalizationManifestContainerPath = + createParameters.commandScriptContainerPath.sibling(DrsLocalizationManifestName) + val localizeDrsLocalizationManifest = cloudSdkShellRunnable( + localizeFile(cloudPath = createParameters.cloudCallRoot / DrsLocalizationManifestName, + containerPath = drsLocalizationManifestContainerPath + ) + )(volumes = volumes, labels = localizationLabel, flags = List.empty) // Requester pays project id is stored on each DrsPath, but will be the same for all DRS inputs to a // particular workflow because it's determined by the Google project set in workflow options. val requesterPaysProjectId: Option[String] = drsInputs.flatMap(_.requesterPaysProjectIdOption).headOption - val runDrsLocalization = Localization.drsRunnable(drsLocalizationManifestContainerPath, localizationLabel, requesterPaysProjectId) + val runDrsLocalization = + Localization.drsRunnable(drsLocalizationManifestContainerPath, localizationLabel, requesterPaysProjectId) List(localizeDrsLocalizationManifest, runDrsLocalization) } else List[Runnable.Builder]() // Any "classic" PAPI v2 one-at-a-time localizations for non-GCS inputs. - val singletonLocalizations = createParameters.inputOutputParameters.fileInputParameters.flatMap(_.toRunnables(volumes)) + val singletonLocalizations = + createParameters.inputOutputParameters.fileInputParameters.flatMap(_.toRunnables(volumes)) val localizations = localizeGcsTransferLibrary :: @@ -79,7 +95,7 @@ object Localization { def drsRunnable(manifestPath: Path, labels: Map[String, String], requesterPaysProjectId: Option[String] - ): Runnable.Builder = { + ): Runnable.Builder = { import RunnableBuilder.EnhancedRunnableBuilder val drsResolverConfig = config.getConfig("filesystems.drs.global.config.resolver") @@ -98,4 +114,4 @@ object Localization { .withEnvironment(drsResolverEnv) .withLabels(labels) } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MemoryRetryCheckRunnable.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MemoryRetryCheckRunnable.scala index 3c728b21775..3ab2a64fddc 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MemoryRetryCheckRunnable.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MemoryRetryCheckRunnable.scala @@ -5,10 +5,9 @@ import cromwell.backend.google.batch.api.GcpBatchRequestFactory.CreateBatchJobPa trait MemoryRetryCheckRunnable { - def checkForMemoryRetryRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]): List[Runnable] = { + def checkForMemoryRetryRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]): List[Runnable] = createParameters.retryWithMoreMemoryKeys match { case Some(keys) => List(RunnableBuilder.checkForMemoryRetryRunnable(keys, volumes)).map(_.build) case None => List.empty[Runnable] } - } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MonitoringRunnable.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MonitoringRunnable.scala index b48918610b2..d1fc27b4c89 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MonitoringRunnable.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/MonitoringRunnable.scala @@ -5,9 +5,9 @@ import cromwell.backend.google.batch.api.GcpBatchRequestFactory.CreateBatchJobPa import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration trait MonitoringRunnable { - def monitoringSetupRunnables(createParameters: CreateBatchJobParameters, - volumes: List[Volume] - )(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable] = { + def monitoringSetupRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable] = { val monitoringImageScriptRunnables = createParameters.monitoringImage.monitoringImageScriptOption match { @@ -21,7 +21,7 @@ trait MonitoringRunnable { val describeLocalizeScriptRunnable = RunnableBuilder.describeDocker( "localizing monitoring image script runnable", - localizeScriptRunnable, + localizeScriptRunnable ) List(describeLocalizeScriptRunnable, localizeScriptRunnable) case None => Nil @@ -30,7 +30,6 @@ trait MonitoringRunnable { val monitoringImageRunnables = createParameters.monitoringImage.monitoringImageOption match { case Some(image) => - val monitoringImage = image val monitoringImageCommand = createParameters.monitoringImage.monitoringImageCommand val monitoringImageEnvironment = createParameters.monitoringImage.monitoringImageEnvironment @@ -52,15 +51,15 @@ trait MonitoringRunnable { (monitoringImageScriptRunnables ++ monitoringImageRunnables).map(_.build) } - def monitoringShutdownRunnables(createParameters: CreateBatchJobParameters): List[Runnable] = { + def monitoringShutdownRunnables(createParameters: CreateBatchJobParameters): List[Runnable] = createParameters.monitoringImage.monitoringImageOption match { case Some(_) => val terminationRunnable = RunnableBuilder.terminateBackgroundRunnablesRunnable() - val describeTerminationRunnable = RunnableBuilder.describeDocker("terminate monitoring runnable", terminationRunnable) + val describeTerminationRunnable = + RunnableBuilder.describeDocker("terminate monitoring runnable", terminationRunnable) List(describeTerminationRunnable, terminationRunnable).map(_.build) case None => Nil } - } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala index 7f35528fad3..d69502295cd 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableBuilder.scala @@ -1,7 +1,7 @@ package cromwell.backend.google.batch.runnable import com.google.cloud.batch.v1.Runnable.Container -import com.google.cloud.batch.v1.{Environment,Runnable, Volume} +import com.google.cloud.batch.v1.{Environment, Runnable, Volume} import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration import cromwell.backend.google.batch.models.{BatchParameter, GcpBatchInput, GcpBatchOutput} import cromwell.core.path.Path @@ -19,6 +19,7 @@ object RunnableBuilder { import RunnableUtils._ implicit class EnhancedRunnableBuilder(val builder: Runnable.Builder) extends AnyVal { + /** * Only for use with docker images KNOWN to not have entrypoints already set, * or used with accompanying call to setEntrypoint("non-empty-string"). @@ -31,31 +32,30 @@ object RunnableBuilder { builder.setContainer(container) } - def withEntrypointCommand(command: String*): Runnable.Builder = { + def withEntrypointCommand(command: String*): Runnable.Builder = builder .setContainer( builder.getContainerBuilder - .setEntrypoint(command.headOption.getOrElse("")) //set to blank string instead of null because batch does not support null + .setEntrypoint( + command.headOption.getOrElse("") + ) // set to blank string instead of null because batch does not support null .addAllCommands( command.drop(1).asJava ) ) - } - def withFlags(flags: List[RunnableFlag]): Runnable.Builder = { + def withFlags(flags: List[RunnableFlag]): Runnable.Builder = flags.foldLeft(builder) { case (acc, RunnableFlag.IgnoreExitStatus) => acc.setIgnoreExitStatus(true) case (acc, RunnableFlag.RunInBackground) => acc.setBackground(true) case (acc, RunnableFlag.AlwaysRun) => acc.setAlwaysRun(true) } - } def withEnvironment(environment: Map[String, String]): Runnable.Builder = { val env = Environment.newBuilder.putAllVariables(environment.asJava) builder.setEnvironment(env) } - def withVolumes(volumes: List[Volume]): Runnable.Builder = { val formattedVolumes = volumes.map { volume => val mountPath = volume.getMountPath @@ -98,17 +98,17 @@ object RunnableBuilder { } } - def withImage(image: String): Runnable.Builder = { - Runnable.newBuilder() + def withImage(image: String): Runnable.Builder = + Runnable + .newBuilder() .setContainer(Container.newBuilder.setImageUri(image)) - } - private def cloudSdkContainerBuilder: Container.Builder = { + private def cloudSdkContainerBuilder: Container.Builder = Container.newBuilder.setImageUri(CloudSdkImage) - } - def monitoringImageScriptRunnable(cloudPath: Path, containerPath: Path, volumes: List[Volume]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): Runnable.Builder = { + def monitoringImageScriptRunnable(cloudPath: Path, containerPath: Path, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): Runnable.Builder = { val command = RunnableCommands.localizeFile(cloudPath, containerPath) val labels = Map(Key.Tag -> Value.Localization) cloudSdkShellRunnable(command)(volumes = volumes, flags = List.empty, labels = labels) @@ -118,7 +118,7 @@ object RunnableBuilder { command: List[String], environment: Map[String, String], volumes: List[Volume] - ): Runnable.Builder = { + ): Runnable.Builder = withImage(image) .withEntrypointCommand(command: _*) .withRunInBackground(true) @@ -126,18 +126,15 @@ object RunnableBuilder { .withEnvironment(environment) .withFlags(List(RunnableFlag.RunInBackground, RunnableFlag.IgnoreExitStatus)) .withLabels(Map(Key.Tag -> Value.Monitoring)) - } - - def terminateBackgroundRunnablesRunnable(): Runnable.Builder = { + def terminateBackgroundRunnablesRunnable(): Runnable.Builder = cloudSdkShellRunnable(terminateAllBackgroundRunnablesCommand)( volumes = List.empty, flags = List(RunnableFlag.AlwaysRun), labels = Map(Key.Tag -> Value.Monitoring) ) - } - def gcsFileDeletionRunnable(cloudPath: String, volumes: List[Volume]): Runnable.Builder = { + def gcsFileDeletionRunnable(cloudPath: String, volumes: List[Volume]): Runnable.Builder = cloudSdkShellRunnable( s"""gsutil rm '$cloudPath'""" )( @@ -145,13 +142,13 @@ object RunnableBuilder { flags = List(RunnableFlag.IgnoreExitStatus), labels = Map(Key.Tag -> Value.Monitoring) ) - } def userRunnable(docker: String, scriptContainerPath: String, jobShell: String, volumes: List[Volume], - dockerhubCredentials: (String, String)): Runnable.Builder = { + dockerhubCredentials: (String, String) + ): Runnable.Builder = { val container = (dockerhubCredentials._1, dockerhubCredentials._2) match { case (username, password) if username.nonEmpty && password.nonEmpty => @@ -167,52 +164,57 @@ object RunnableBuilder { .setEntrypoint(jobShell) .addCommands(scriptContainerPath) } - Runnable.newBuilder() + Runnable + .newBuilder() .setContainer(container) .withVolumes(volumes) .putLabels(Key.Tag, Value.UserRunnable) } - def checkForMemoryRetryRunnable(retryLookupKeys: List[String], volumes: List[Volume]): Runnable.Builder = { + def checkForMemoryRetryRunnable(retryLookupKeys: List[String], volumes: List[Volume]): Runnable.Builder = cloudSdkShellRunnable(RunnableCommands.checkIfStderrContainsRetryKeys(retryLookupKeys))( volumes = volumes, flags = List(RunnableFlag.AlwaysRun), labels = Map(Key.Tag -> Value.RetryWithMoreMemory) ).withAlwaysRun(true) - } // Creates a Runnable that logs the docker command for the passed in runnable. - def describeDocker(description: String, runnable: Runnable.Builder): Runnable.Builder = { + def describeDocker(description: String, runnable: Runnable.Builder): Runnable.Builder = logTimestampedRunnable( s"Running $description: ${toDockerRun(runnable)}", List.empty, List.empty, runnable.scalaLabels ) - } private def timestampedMessage(message: String): String = s"""printf '%s %s\\n' "$$(date -u '+%Y/%m/%d %H:%M:%S')" ${shellEscaped(message)}""" - private def logTimestampedRunnable(message: String, volumes: List[Volume], flags: List[RunnableFlag], runnableLabels: Map[String, String]): Runnable.Builder = { + private def logTimestampedRunnable(message: String, + volumes: List[Volume], + flags: List[RunnableFlag], + runnableLabels: Map[String, String] + ): Runnable.Builder = // Uses the cloudSdk image as that image will be used for other operations as well. cloudSdkShellRunnable( timestampedMessage(message) - )(volumes, flags, labels = runnableLabels collect { - case (key, value) if key == Key.Tag => Key.Logging -> value - case (key, value) => key -> value - }).withTimeout(timeout = 300.seconds) - } + )(volumes, + flags, + labels = runnableLabels collect { + case (key, value) if key == Key.Tag => Key.Logging -> value + case (key, value) => key -> value + } + ).withTimeout(timeout = 300.seconds) def cloudSdkRunnable: Runnable.Builder = Runnable.newBuilder.setContainer(cloudSdkContainerBuilder) - def cloudSdkShellRunnable(shellCommand: String)( - volumes: List[Volume], - flags: List[RunnableFlag], - labels: Map[String, String], - timeout: Duration = Duration.Inf): Runnable.Builder = { - - Runnable.newBuilder.setContainer(cloudSdkContainerBuilder) + def cloudSdkShellRunnable(shellCommand: String)(volumes: List[Volume], + flags: List[RunnableFlag], + labels: Map[String, String], + timeout: Duration = Duration.Inf + ): Runnable.Builder = + Runnable.newBuilder + .setContainer(cloudSdkContainerBuilder) .withVolumes(volumes) .withLabels(labels) .withEntrypointCommand( @@ -222,19 +224,17 @@ object RunnableBuilder { ) .withFlags(flags) .withTimeout(timeout) - } - def annotateTimestampedRunnable(description: String, - loggingLabelValue: String, - isAlwaysRun: Boolean = false)( - volumes: List[Volume], - runnables: List[Runnable.Builder]): List[Runnable.Builder] = { - - val flags = if (isAlwaysRun) List(RunnableFlag.AlwaysRun) else List() - val labels = Map(Key.Logging -> loggingLabelValue) - val starting = logTimestampedRunnable(s"Starting $description.", volumes, flags, labels) - val done = logTimestampedRunnable(s"Done $description.", volumes, flags, labels) - List(starting) ++ runnables ++ List(done) + def annotateTimestampedRunnable(description: String, + loggingLabelValue: String, + isAlwaysRun: Boolean = false + )(volumes: List[Volume], runnables: List[Runnable.Builder]): List[Runnable.Builder] = { + + val flags = if (isAlwaysRun) List(RunnableFlag.AlwaysRun) else List() + val labels = Map(Key.Logging -> loggingLabelValue) + val starting = logTimestampedRunnable(s"Starting $description.", volumes, flags, labels) + val done = logTimestampedRunnable(s"Done $description.", volumes, flags, labels) + List(starting) ++ runnables ++ List(done) } /** @@ -243,7 +243,7 @@ object RunnableBuilder { * @param parameter Input or output parameter to label. * @return The labels. */ - def parameterLabels(parameter: BatchParameter): Map[String, String] = { + def parameterLabels(parameter: BatchParameter): Map[String, String] = parameter match { case _: GcpBatchInput => Map( @@ -256,33 +256,31 @@ object RunnableBuilder { Key.OutputName -> parameter.name ) } - } /** Creates a Runnable that describes the parameter localization or delocalization. */ - def describeParameter(parameter: BatchParameter, volumes: List[Volume], labels: Map[String, String]): Runnable.Builder = { + def describeParameter(parameter: BatchParameter, + volumes: List[Volume], + labels: Map[String, String] + ): Runnable.Builder = parameter match { case _: GcpBatchInput => val message = "Localizing input %s -> %s".format( shellEscaped(parameter.cloudPath), - shellEscaped(parameter.containerPath), + shellEscaped(parameter.containerPath) ) logTimestampedRunnable(message, volumes, List.empty, labels) case _: GcpBatchOutput => val message = "Delocalizing output %s -> %s".format( shellEscaped(parameter.containerPath), - shellEscaped(parameter.cloudPath), + shellEscaped(parameter.cloudPath) ) logTimestampedRunnable(message, volumes, List(RunnableFlag.AlwaysRun), labels) } - } // Converts an Runnable to a `docker run ...` command runnable in the shell. private[runnable] def toDockerRun(runnable: Runnable.Builder): String = { - runnable.getContainer - .getCommandsList - .asScala - .toList - .map { cmd => shellEscaped(cmd) } + runnable.getContainer.getCommandsList.asScala.toList + .map(cmd => shellEscaped(cmd)) .mkString(" ") val commandArgs: String = Option(runnable.getContainerBuilder.getCommandsList) match { @@ -313,11 +311,6 @@ object RunnableBuilder { } mkString "" } - List("docker run", - mountArgs, - entrypointArg, - imageArg, - commandArgs, - ).mkString + List("docker run", mountArgs, entrypointArg, imageArg, commandArgs).mkString } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableCommands.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableCommands.scala index 0bbd1c50d53..8c7c024691e 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableCommands.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableCommands.scala @@ -32,19 +32,20 @@ object RunnableCommands { def escape: String = StringEscapeUtils.escapeXSI(path.pathAsString) } - private def makeContentTypeFlag(contentType: Option[ContentType]) = contentType.map(ct => s"""-h "Content-Type: $ct"""").getOrElse("") + private def makeContentTypeFlag(contentType: Option[ContentType]) = + contentType.map(ct => s"""-h "Content-Type: $ct"""").getOrElse("") def makeContainerDirectory(containerPath: Path) = s"mkdir -p ${containerPath.escape}" - def delocalizeDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && " + s"gsutil $flag ${contentType |> makeContentTypeFlag} -m rsync -r ${containerPath.escape} ${cloudPath.escape}" } } - } /** * As per https://cloud.google.com/storage/docs/gsutil/addlhelp/HowSubdirectoriesWork, rule #2 @@ -58,29 +59,29 @@ object RunnableCommands { * By instead using the parent directory (and ensuring it ends with a slash), gsutil will treat that as a directory and put the file under it. * So the final gsutil command will look something like gsutil cp /local/file.txt gs://bucket/subdir/ */ - def delocalizeFile(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeFile(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && " + s"gsutil $flag ${contentType |> makeContentTypeFlag} cp ${containerPath.escape} ${cloudPath.parent.escape.ensureSlashed}" } } - } /** * delocalizeFile necessarily copies the file to the same name. Use this if you want to to specify a name different from the original * Make sure that there's no object named "yourfinalname_something" (see above) in the same cloud directory. */ - def delocalizeFileTo(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeFileTo(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && " + s"gsutil $flag ${contentType |> makeContentTypeFlag} cp ${containerPath.escape} ${cloudPath.escape}" } } - } def ifExist(containerPath: Path)(f: => String) = s"if [ -e ${containerPath.escape} ]; then $f; fi" @@ -92,7 +93,7 @@ object RunnableCommands { | sleep ${duration.toSeconds} |done""".stripMargin - def retry(f: => String)(implicit gcsTransferConfiguration: GcsTransferConfiguration, wait: FiniteDuration): String = { + def retry(f: => String)(implicit gcsTransferConfiguration: GcsTransferConfiguration, wait: FiniteDuration): String = s"""for i in $$(seq ${gcsTransferConfiguration.transferAttempts}); do | ( | $f @@ -107,35 +108,34 @@ object RunnableCommands { | fi |done |exit "$$RC"""".stripMargin - } - def delocalizeFileOrDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeFileOrDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = s"""if [ -d ${containerPath.escape} ]; then | ${delocalizeDirectory(containerPath, cloudPath, contentType)} |else | ${delocalizeFile(containerPath, cloudPath, contentType)} |fi""".stripMargin - } - def localizeDirectory(cloudPath: Path, containerPath: Path) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def localizeDirectory(cloudPath: Path, containerPath: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"${containerPath |> makeContainerDirectory} && " + s"rm -f $$HOME/.config/gcloud/gce && gsutil $flag -m rsync -r ${cloudPath.escape} ${containerPath.escape}" } } - } - def localizeFile(cloudPath: Path, containerPath: Path) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def localizeFile(cloudPath: Path, containerPath: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && gsutil $flag cp ${cloudPath.escape} ${containerPath.escape}" } } - } def recoverRequesterPaysError(path: Path)(f: String => String): String = { val commandWithoutProject = f("") diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableLabels.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableLabels.scala index 7d9978c42c1..35624215443 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableLabels.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableLabels.scala @@ -2,6 +2,7 @@ package cromwell.backend.google.batch.runnable object RunnableLabels { object Key { + /** * Very short description of the runnable */ diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableUtils.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableUtils.scala index 7df2a4bff79..f60baed0608 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableUtils.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableUtils.scala @@ -6,6 +6,7 @@ import net.ceedubs.ficus.Ficus._ import org.apache.commons.text.StringEscapeUtils object RunnableUtils { + /** Image to use for ssh access. */ val sshImage = "gcr.io/cloud-genomics-pipelines/tools" @@ -32,7 +33,7 @@ object RunnableUtils { * When updating this value, also consider updating the CromwellImagesSizeRoundedUpInGB below. */ val CloudSdkImage: String = - //config.getOrElse("cloud-sdk-image-url", "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine") + // config.getOrElse("cloud-sdk-image-url", "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine") config.getOrElse("cloud-sdk-image-url", "gcr.io/google.com/cloudsdktool/cloud-sdk:434.0.0-alpine") /* * At the moment, cloud-sdk (584MB for 354.0.0-alpine) and stedolan/jq (182MB) decompressed ~= 0.8 GB @@ -55,27 +56,28 @@ object RunnableUtils { private val backgroundRunnableTerminationGraceTime = 10 - val terminateAllBackgroundRunnablesCommand: String = s"kill -TERM -1 && sleep $backgroundRunnableTerminationGraceTime || true" + val terminateAllBackgroundRunnablesCommand: String = + s"kill -TERM -1 && sleep $backgroundRunnableTerminationGraceTime || true" def timestampedMessage(message: String): String = s"""printf '%s %s\\n' "$$(date -u '+%Y/%m/%d %H:%M:%S')" ${shellEscaped(message)}""" /** Start background runnables first, leave the rest as is */ def sortRunnables(containerSetup: List[Runnable], - localization: List[Runnable], - userRunnable: List[Runnable], - memoryRetryRunnable: List[Runnable], - deLocalization: List[Runnable], - monitoringSetup: List[Runnable], - monitoringShutdown: List[Runnable], - checkpointingStart: List[Runnable], - checkpointingShutdown: List[Runnable], - sshAccess: List[Runnable], - isBackground: Runnable => Boolean, - ): List[Runnable] = { + localization: List[Runnable], + userRunnable: List[Runnable], + memoryRetryRunnable: List[Runnable], + deLocalization: List[Runnable], + monitoringSetup: List[Runnable], + monitoringShutdown: List[Runnable], + checkpointingStart: List[Runnable], + checkpointingShutdown: List[Runnable], + sshAccess: List[Runnable], + isBackground: Runnable => Boolean + ): List[Runnable] = { val toBeSortedRunnables = localization ++ userRunnable ++ memoryRetryRunnable ++ deLocalization - val sortedRunnables = toBeSortedRunnables.sortWith { - case (runnable, _) => isBackground(runnable) + val sortedRunnables = toBeSortedRunnables.sortWith { case (runnable, _) => + isBackground(runnable) } sshAccess ++ containerSetup ++ monitoringSetup ++ checkpointingStart ++ sortedRunnables ++ checkpointingShutdown ++ monitoringShutdown diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala index d2d499b3127..cba665dbf9e 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/UserRunnable.scala @@ -3,7 +3,6 @@ package cromwell.backend.google.batch.runnable import com.google.cloud.batch.v1.{Runnable, Volume} import cromwell.backend.google.batch.api.GcpBatchRequestFactory.CreateBatchJobParameters - trait UserRunnable { def userRunnables(createParameters: CreateBatchJobParameters, volumes: List[Volume]): List[Runnable] = { diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchExpressionFunctions.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchExpressionFunctions.scala index 4ad6cc45b61..dcec7ec3799 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchExpressionFunctions.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchExpressionFunctions.scala @@ -10,26 +10,25 @@ import cromwell.filesystems.gcs.GcsPathBuilder import cromwell.filesystems.gcs.GcsPathBuilder.{InvalidGcsPath, PossiblyValidRelativeGcsPath, ValidFullGcsPath} import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder -class BatchPathFunctions(pathBuilders: PathBuilders, callContext: CallContext) extends CallCorePathFunctionSet(pathBuilders, callContext) { - override def relativeToHostCallRoot(path: String) = { +class BatchPathFunctions(pathBuilders: PathBuilders, callContext: CallContext) + extends CallCorePathFunctionSet(pathBuilders, callContext) { + override def relativeToHostCallRoot(path: String) = GcsPathBuilder.validateGcsPath(path) match { case _: ValidFullGcsPath => path case _ => callContext.root.resolve(path.stripPrefix("file://").stripPrefix("/")).pathAsString } - } } class BatchExpressionFunctions(standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { + extends StandardExpressionFunctions(standardParams) { override lazy val ioCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder - override def preMapping(str: String) = { + override def preMapping(str: String) = GcsPathBuilder.validateGcsPath(str) match { case _: ValidFullGcsPath => str case PossiblyValidRelativeGcsPath => callContext.root.resolve(str.stripPrefix("/")).pathAsString case _: InvalidGcsPath => str } - } override lazy val pathFunctions = new BatchPathFunctions(pathBuilders, callContext) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchParameterConversions.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchParameterConversions.scala index ad7c942b4f9..2a230e58d6b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchParameterConversions.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchParameterConversions.scala @@ -12,7 +12,9 @@ import cromwell.filesystems.http.HttpPath import simulacrum.typeclass @typeclass trait ToParameter[A <: BatchParameter] { - def toRunnables(p: A, volumes: List[Volume])(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable.Builder] + def toRunnables(p: A, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable.Builder] } trait GcpBatchParameterConversions { @@ -20,16 +22,17 @@ trait GcpBatchParameterConversions { import RunnableCommands._ import RunnableLabels._ - implicit val fileInputToParameter: ToParameter[GcpBatchFileInput] = new ToParameter[GcpBatchFileInput] { - override def toRunnables(fileInput: GcpBatchFileInput, volumes: List[Volume]) - (implicit retryPolicy: GcsTransferConfiguration): List[Runnable.Builder] = { + override def toRunnables(fileInput: GcpBatchFileInput, volumes: List[Volume])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Runnable.Builder] = { val labels = RunnableBuilder.parameterLabels(fileInput) fileInput.cloudPath match { case _: HttpPath => val command = s"curl --silent --create-dirs --output ${fileInput.containerPath} ${fileInput.cloudPath}" - val localizationRunnables = RunnableBuilder.cloudSdkShellRunnable(command)(volumes = volumes, labels = labels, flags = List.empty) + val localizationRunnables = + RunnableBuilder.cloudSdkShellRunnable(command)(volumes = volumes, labels = labels, flags = List.empty) List(RunnableBuilder.describeParameter(fileInput, volumes, labels), localizationRunnables) case _: GcsPath => @@ -44,8 +47,9 @@ trait GcpBatchParameterConversions { implicit val directoryInputToParameter: ToParameter[GcpBatchDirectoryInput] = new ToParameter[GcpBatchDirectoryInput] { - override def toRunnables(directoryInput: GcpBatchDirectoryInput, volumes: List[Volume]) - (implicit retryPolicy: GcsTransferConfiguration): List[Runnable.Builder] = { + override def toRunnables(directoryInput: GcpBatchDirectoryInput, volumes: List[Volume])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Runnable.Builder] = directoryInput.cloudPath match { case _: GcsPath => Nil // GCS paths will be localized with a separate localization script. case _ => @@ -56,19 +60,23 @@ trait GcpBatchParameterConversions { )(volumes = volumes, labels = labels, flags = List.empty) List(describeRunnables, localizationRunnables) } - } } implicit val fileOutputToParameter: ToParameter[GcpBatchFileOutput] = new ToParameter[GcpBatchFileOutput] { - override def toRunnables(fileOutput: GcpBatchFileOutput, volumes: List[Volume]) - (implicit retryPolicy: GcsTransferConfiguration): List[Runnable.Builder] = { + override def toRunnables(fileOutput: GcpBatchFileOutput, volumes: List[Volume])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Runnable.Builder] = { // If the output is a "secondary file", it actually could be a directory but we won't know before runtime. // The fileOrDirectory method will generate a command that can cover both cases - lazy val copy = if (fileOutput.secondary) - RunnableCommands.delocalizeFileOrDirectory(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) - else - RunnableCommands.delocalizeFile(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) + lazy val copy = + if (fileOutput.secondary) + RunnableCommands.delocalizeFileOrDirectory(fileOutput.containerPath, + fileOutput.cloudPath, + fileOutput.contentType + ) + else + RunnableCommands.delocalizeFile(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) lazy val copyOnlyIfExists = RunnableCommands.ifExist(fileOutput.containerPath) { copy @@ -97,11 +105,13 @@ trait GcpBatchParameterConversions { case (key, _) if key == Key.Tag => key -> Value.Background case (key, value) => key -> value } - val periodic = RunnableBuilder.cloudSdkShellRunnable( - every(period) { - copyCommand - } - )(volumes = volumes, labels = periodicLabels, flags = List.empty).withRunInBackground(true) + val periodic = RunnableBuilder + .cloudSdkShellRunnable( + every(period) { + copyCommand + } + )(volumes = volumes, labels = periodicLabels, flags = List.empty) + .withRunInBackground(true) finalDelocalizationRunnables :+ periodic @@ -112,34 +122,37 @@ trait GcpBatchParameterConversions { implicit val directoryOutputToParameter: ToParameter[GcpBatchDirectoryOutput] = new ToParameter[GcpBatchDirectoryOutput] { - override def toRunnables(directoryOutput: GcpBatchDirectoryOutput, volumes: List[Volume]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable.Builder] = { + override def toRunnables(directoryOutput: GcpBatchDirectoryOutput, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable.Builder] = directoryOutput.cloudPath match { case _: GcsPath => Nil // GCS paths will be delocalized with a separate delocalization script. case _ => val labels = RunnableBuilder.parameterLabels(directoryOutput) val describeRunnable = RunnableBuilder.describeParameter(directoryOutput, volumes, labels) - val delocalizationRunnable = RunnableBuilder.cloudSdkShellRunnable( - delocalizeDirectory(directoryOutput.containerPath, directoryOutput.cloudPath, None) - )(volumes = volumes, labels = labels, flags = List.empty) + val delocalizationRunnable = RunnableBuilder + .cloudSdkShellRunnable( + delocalizeDirectory(directoryOutput.containerPath, directoryOutput.cloudPath, None) + )(volumes = volumes, labels = labels, flags = List.empty) .withAlwaysRun(true) List(describeRunnable, delocalizationRunnable) } - } } implicit val inputToParameter: ToParameter[GcpBatchInput] = new ToParameter[GcpBatchInput] { - override def toRunnables(p: GcpBatchInput, volumes: List[Volume]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable.Builder] = p match { + override def toRunnables(p: GcpBatchInput, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable.Builder] = p match { case fileInput: GcpBatchFileInput => fileInputToParameter.toRunnables(fileInput, volumes) case directoryInput: GcpBatchDirectoryInput => directoryInputToParameter.toRunnables(directoryInput, volumes) } } implicit val outputToParameter: ToParameter[GcpBatchOutput] = new ToParameter[GcpBatchOutput] { - override def toRunnables(p: GcpBatchOutput, volumes: List[Volume]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Runnable.Builder] = p match { + override def toRunnables(p: GcpBatchOutput, volumes: List[Volume])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Runnable.Builder] = p match { case fileOutput: GcpBatchFileOutput => fileOutputToParameter.toRunnables(fileOutput, volumes) case directoryOutput: GcpBatchDirectoryOutput => directoryOutputToParameter.toRunnables(directoryOutput, volumes) } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala index 612c95651e9..d27344c42b9 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/BatchUtilityConversions.scala @@ -9,14 +9,12 @@ import wom.format.MemorySize trait BatchUtilityConversions { // construct zones string - def toZonesPath(zones: Vector[String]): String = { + def toZonesPath(zones: Vector[String]): String = zones.map(zone => "zones/" + zone).mkString(" ") - } // lowercase text to match gcp label requirements - def toLabel(text: String): String = { + def toLabel(text: String): String = text.toLowerCase - } // creates batch run time location from zones entered in runtime. This is needed if network not defined by user to place on right network. def toBatchRunLocation(zones: Vector[String]): String = { @@ -25,14 +23,12 @@ trait BatchUtilityConversions { } // convert cpu cores to millicores that Batch expects - def toCpuCores(cpu: Long): Long = { + def toCpuCores(cpu: Long): Long = cpu * 1000 - } // convert memory to MiB that Batch expects - def toMemMib(memory: MemorySize): Long = { + def toMemMib(memory: MemorySize): Long = (memory.amount * 1024).toLong - } // set Standard or Spot instances def toProvisioningModel(preemption: Int): ProvisioningModel = preemption compare 0 match { @@ -45,39 +41,33 @@ trait BatchUtilityConversions { def toVolumes(disks: Seq[GcpBatchAttachedDisk]): List[Volume] = disks.map(toVolume).toList def toVolume(disk: GcpBatchAttachedDisk): Volume = { - val volume = Volume - .newBuilder + val volume = Volume.newBuilder .setDeviceName(disk.name) .setMountPath(disk.mountPoint.pathAsString) - disk match { case _: GcpBatchReferenceFilesDisk => volume .addMountOptions("async, rw") .build case _ => - volume - .build + volume.build } } private def toDisk(disk: GcpBatchAttachedDisk): AttachedDisk = { - val googleDisk = Disk - .newBuilder + val googleDisk = Disk.newBuilder .setSizeGb(disk.sizeGb.toLong) .setType(toBatchDiskType(disk.diskType)) disk match { case refDisk: GcpBatchReferenceFilesDisk => - googleDisk.setImage(refDisk.image) - .build + googleDisk.setImage(refDisk.image).build case _ => googleDisk.build } - val googleAttachedDisk = AttachedDisk - .newBuilder + val googleAttachedDisk = AttachedDisk.newBuilder .setDeviceName(disk.name) .setNewDisk(googleDisk) .build @@ -91,11 +81,11 @@ trait BatchUtilityConversions { case DiskType.LOCAL => "local-ssd" } - def convertGbToMib(runtimeAttributes: GcpBatchRuntimeAttributes): Long = { + def convertGbToMib(runtimeAttributes: GcpBatchRuntimeAttributes): Long = (runtimeAttributes.bootDiskSize * 953.7).toLong - } // Create accelerators for GPUs - def toAccelerator(gpuResource: GpuResource): Accelerator.Builder = Accelerator.newBuilder.setCount(gpuResource.gpuCount.value.toLong).setType(gpuResource.gpuType.toString) + def toAccelerator(gpuResource: GpuResource): Accelerator.Builder = + Accelerator.newBuilder.setCount(gpuResource.gpuCount.value.toLong).setType(gpuResource.gpuType.toString) } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchDockerCacheMappingOperations.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchDockerCacheMappingOperations.scala index e7ca7a0e6a8..233413b6d4b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchDockerCacheMappingOperations.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchDockerCacheMappingOperations.scala @@ -20,7 +20,8 @@ trait GcpBatchDockerCacheMappingOperations { private class DockerImageManifestVersionError(message: String) extends RuntimeException(message) with NoStackTrace def generateDockerImageToDiskImageMapping(auth: GoogleAuthMode, - dockerImageCacheManifestFile: ValidFullGcsPath): Map[String, DockerImageCacheEntry] = { + dockerImageCacheManifestFile: ValidFullGcsPath + ): Map[String, DockerImageCacheEntry] = { val gcsClient = StorageOptions .newBuilder() @@ -34,35 +35,42 @@ trait GcpBatchDockerCacheMappingOperations { def getDockerCacheDiskImageForAJob(dockerImageToCacheDiskImageMappingOpt: Option[Map[String, DockerImageCacheEntry]], dockerImageAsSpecifiedByUser: String, dockerImageWithDigest: String, - jobLogger: JobLogger): Option[String] = { + jobLogger: JobLogger + ): Option[String] = dockerImageToCacheDiskImageMappingOpt .flatMap(_.get(dockerImageAsSpecifiedByUser)) .filter { cachedDockerImageDigestAndDiskName => val hashStartingPositionInActualDockerImage = dockerImageWithDigest.indexOf('@') if (hashStartingPositionInActualDockerImage != -1) { - val actualDigestOfDesiredDockerImage = dockerImageWithDigest.substring(hashStartingPositionInActualDockerImage + 1) + val actualDigestOfDesiredDockerImage = + dockerImageWithDigest.substring(hashStartingPositionInActualDockerImage + 1) if (cachedDockerImageDigestAndDiskName.dockerImageDigest == actualDigestOfDesiredDockerImage) { true } else { - jobLogger.info(s"Cached Docker image digest mismatch. Requested docker image $dockerImageAsSpecifiedByUser has different digest than " + - s"corresponding cached image located at the ${cachedDockerImageDigestAndDiskName.diskImageName} disk image. " + - s"Digest of requested image is $actualDigestOfDesiredDockerImage, but digest of cached image is ${cachedDockerImageDigestAndDiskName.dockerImageDigest}. " + - s"Docker image cache feature will not be used for this task.") + jobLogger.info( + s"Cached Docker image digest mismatch. Requested docker image $dockerImageAsSpecifiedByUser has different digest than " + + s"corresponding cached image located at the ${cachedDockerImageDigestAndDiskName.diskImageName} disk image. " + + s"Digest of requested image is $actualDigestOfDesiredDockerImage, but digest of cached image is ${cachedDockerImageDigestAndDiskName.dockerImageDigest}. " + + s"Docker image cache feature will not be used for this task." + ) false } } else { - jobLogger.error(s"Programmer error ! Odd docker image name where supposed to be name with digest: $dockerImageWithDigest") + jobLogger.error( + s"Programmer error ! Odd docker image name where supposed to be name with digest: $dockerImageWithDigest" + ) false } } .map(_.diskImageName) - } - private[batch] def readDockerImageCacheManifestFileFromGCS(gcsClient: Storage, gcsPath: ValidFullGcsPath): IO[DockerImageCacheManifest] = { - val manifestFileBlobIo = IO { gcsClient.get(BlobId.of(gcsPath.bucket, gcsPath.path.substring(1))) } + private[batch] def readDockerImageCacheManifestFileFromGCS(gcsClient: Storage, + gcsPath: ValidFullGcsPath + ): IO[DockerImageCacheManifest] = { + val manifestFileBlobIo = IO(gcsClient.get(BlobId.of(gcsPath.bucket, gcsPath.path.substring(1)))) manifestFileBlobIo flatMap { manifestFileBlob => - val jsonStringIo = IO { manifestFileBlob.getContent().map(_.toChar).mkString } + val jsonStringIo = IO(manifestFileBlob.getContent().map(_.toChar).mkString) jsonStringIo.flatMap { jsonStr => decode[DockerImageCacheManifest](jsonStr) match { case Left(error) => IO.raiseError(error) @@ -70,12 +78,16 @@ trait GcpBatchDockerCacheMappingOperations { if (parsedManifest.manifestFormatVersion == CURRENT_SUPPORTED_MANIFEST_FORMAT_VERSION) { IO.pure(parsedManifest) } else { - IO.raiseError(new DockerImageManifestVersionError(s"Current supported docker image cache manifest format version " + - s"is $CURRENT_SUPPORTED_MANIFEST_FORMAT_VERSION, but got ${parsedManifest.manifestFormatVersion}")) + IO.raiseError( + new DockerImageManifestVersionError( + s"Current supported docker image cache manifest format version " + + s"is $CURRENT_SUPPORTED_MANIFEST_FORMAT_VERSION, but got ${parsedManifest.manifestFormatVersion}" + ) + ) } } } } } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchExpressionFunctions.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchExpressionFunctions.scala index 2cb2b86b34c..bdf358f1914 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchExpressionFunctions.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchExpressionFunctions.scala @@ -10,26 +10,25 @@ import cromwell.filesystems.gcs.GcsPathBuilder import cromwell.filesystems.gcs.GcsPathBuilder.{InvalidGcsPath, PossiblyValidRelativeGcsPath, ValidFullGcsPath} import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder -class GcpBatchPathFunctions(pathBuilders: PathBuilders, callContext: CallContext) extends CallCorePathFunctionSet(pathBuilders, callContext) { - override def relativeToHostCallRoot(path: String) = { +class GcpBatchPathFunctions(pathBuilders: PathBuilders, callContext: CallContext) + extends CallCorePathFunctionSet(pathBuilders, callContext) { + override def relativeToHostCallRoot(path: String) = GcsPathBuilder.validateGcsPath(path) match { case _: ValidFullGcsPath => path case _ => callContext.root.resolve(path.stripPrefix("file://").stripPrefix("/")).pathAsString } - } } class GcpBatchExpressionFunctions(standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { + extends StandardExpressionFunctions(standardParams) { override lazy val ioCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder - override def preMapping(str: String) = { + override def preMapping(str: String) = GcsPathBuilder.validateGcsPath(str) match { case _: ValidFullGcsPath => str case PossiblyValidRelativeGcsPath => callContext.root.resolve(str.stripPrefix("/")).pathAsString case _: InvalidGcsPath => str } - } override lazy val pathFunctions = new GcpBatchPathFunctions(pathBuilders, callContext) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraints.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraints.scala index 83b4796e039..2bbf835eeef 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraints.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraints.scala @@ -1,6 +1,11 @@ package cromwell.backend.google.batch.util -import cromwell.backend.google.batch.models.{GcpBatchRuntimeAttributes, N1CustomMachineType, N2CustomMachineType, N2DCustomMachineType} +import cromwell.backend.google.batch.models.{ + GcpBatchRuntimeAttributes, + N1CustomMachineType, + N2CustomMachineType, + N2DCustomMachineType +} import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric.Positive import org.slf4j.Logger @@ -12,20 +17,19 @@ object GcpBatchMachineConstraints { cpu: Int Refined Positive, cpuPlatformOption: Option[String], googleLegacyMachineSelection: Boolean, - jobLogger: Logger, - ): String = { + jobLogger: Logger + ): String = if (googleLegacyMachineSelection) { s"predefined-$cpu-${memory.to(MemoryUnit.MB).amount.intValue()}" } else { // If someone requests Intel Cascade Lake as their CPU platform then switch the machine type to n2. - // Similarly, CPU platform of AMD Rome corresponds to the machine type n2d. + // Similarly, CPU platform of AMD Rome corresponds to the machine type n2d. val customMachineType = cpuPlatformOption match { case Some(GcpBatchRuntimeAttributes.CpuPlatformIntelCascadeLakeValue) => N2CustomMachineType - case Some(GcpBatchRuntimeAttributes.CpuPlatformAMDRomeValue) => N2DCustomMachineType + case Some(GcpBatchRuntimeAttributes.CpuPlatformAMDRomeValue) => N2DCustomMachineType case _ => N1CustomMachineType } customMachineType.machineType(memory, cpu, jobLogger) } - } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchReferenceFilesMappingOperations.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchReferenceFilesMappingOperations.scala index f472263f51c..b03347b2e5b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchReferenceFilesMappingOperations.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GcpBatchReferenceFilesMappingOperations.scala @@ -25,7 +25,8 @@ trait GcpBatchReferenceFilesMappingOperations { * may take a significant amount of time. */ def generateReferenceFilesMapping(auth: GoogleAuthMode, - referenceDiskLocalizationManifests: List[ManifestFile]): Map[String, GcpBatchReferenceFilesDisk] = { + referenceDiskLocalizationManifests: List[ManifestFile] + ): Map[String, GcpBatchReferenceFilesDisk] = { val gcsClient = StorageOptions .newBuilder() .setCredentials(auth.credentials(Set(StorageScopes.DEVSTORAGE_READ_ONLY))) @@ -40,26 +41,31 @@ trait GcpBatchReferenceFilesMappingOperations { } def getReferenceInputsToMountedPathMappings(referenceFileToDiskImageMapping: Map[String, GcpBatchReferenceFilesDisk], - inputFiles: List[GcpBatchInput]): Map[GcpBatchInput, String] = { - val gcsPathsToInputs = inputFiles.collect { case i if i.cloudPath.isInstanceOf[GcsPath] => (i.cloudPath.asInstanceOf[GcsPath].pathAsString, i) }.toMap + inputFiles: List[GcpBatchInput] + ): Map[GcpBatchInput, String] = { + val gcsPathsToInputs = inputFiles.collect { + case i if i.cloudPath.isInstanceOf[GcsPath] => (i.cloudPath.asInstanceOf[GcsPath].pathAsString, i) + }.toMap referenceFileToDiskImageMapping.collect { - case (path, disk) if gcsPathsToInputs.keySet.contains(s"gs://$path") => + case (path, disk) if gcsPathsToInputs.keySet.contains(s"gs://$path") => (gcsPathsToInputs(s"gs://$path"), s"${disk.mountPoint.pathAsString}/$path") } } def getReferenceDisksToMount(referenceFileToDiskImageMapping: Map[String, GcpBatchReferenceFilesDisk], - inputFilePaths: Set[String]): List[GcpBatchReferenceFilesDisk] = { + inputFilePaths: Set[String] + ): List[GcpBatchReferenceFilesDisk] = referenceFileToDiskImageMapping.view.filterKeys(key => inputFilePaths.contains(s"gs://$key")).values.toList.distinct - } - private def getReferenceFileToValidatedGcsPathMap(referenceFiles: Set[ReferenceFile]): IO[Map[ReferenceFile, ValidFullGcsPath]] = { - val filesAndValidatedPaths = referenceFiles.map { - referenceFile => (referenceFile, GcsPathBuilder.validateGcsPath(s"gs://${referenceFile.path}")) + private def getReferenceFileToValidatedGcsPathMap( + referenceFiles: Set[ReferenceFile] + ): IO[Map[ReferenceFile, ValidFullGcsPath]] = { + val filesAndValidatedPaths = referenceFiles.map { referenceFile => + (referenceFile, GcsPathBuilder.validateGcsPath(s"gs://${referenceFile.path}")) }.toMap - val filesWithValidPaths = filesAndValidatedPaths.collect { - case (referenceFile, validPath: ValidFullGcsPath) => (referenceFile, validPath) + val filesWithValidPaths = filesAndValidatedPaths.collect { case (referenceFile, validPath: ValidFullGcsPath) => + (referenceFile, validPath) } val filesWithInvalidPaths = filesAndValidatedPaths.collect { case (referenceFile, invalidPath: InvalidFullGcsPath) => (referenceFile, invalidPath) @@ -73,30 +79,30 @@ trait GcpBatchReferenceFilesMappingOperations { } protected def bulkValidateCrc32cs(gcsClient: Storage, - filesWithValidPaths: Map[ReferenceFile, ValidFullGcsPath]): IO[Map[ReferenceFile, Boolean]] = { + filesWithValidPaths: Map[ReferenceFile, ValidFullGcsPath] + ): IO[Map[ReferenceFile, Boolean]] = IO { val gcsBatch = gcsClient.batch() - val filesAndBlobResults = filesWithValidPaths map { - case (referenceFile, ValidFullGcsPath(bucket, path)) => - val blobGetResult = gcsBatch.get(BlobId.of(bucket, path.substring(1)), BlobGetOption.fields(BlobField.CRC32C)) - (referenceFile, blobGetResult) + val filesAndBlobResults = filesWithValidPaths map { case (referenceFile, ValidFullGcsPath(bucket, path)) => + val blobGetResult = gcsBatch.get(BlobId.of(bucket, path.substring(1)), BlobGetOption.fields(BlobField.CRC32C)) + (referenceFile, blobGetResult) } gcsBatch.submit() - filesAndBlobResults map { - case (referenceFile, blobGetResult) => - val crc32cFromManifest = BaseEncoding.base64.encode( - // drop 4 leading bytes from Long crc32c value - // https://stackoverflow.com/a/25111119/1794750 - util.Arrays.copyOfRange(Longs.toByteArray(referenceFile.crc32c), 4, 8) - ) + filesAndBlobResults map { case (referenceFile, blobGetResult) => + val crc32cFromManifest = BaseEncoding.base64.encode( + // drop 4 leading bytes from Long crc32c value + // https://stackoverflow.com/a/25111119/1794750 + util.Arrays.copyOfRange(Longs.toByteArray(referenceFile.crc32c), 4, 8) + ) - (referenceFile, crc32cFromManifest === blobGetResult.get().getCrc32c) + (referenceFile, crc32cFromManifest === blobGetResult.get().getCrc32c) } } - } - private def getMapOfValidReferenceFilePathsToDisks(gcsClient: Storage, manifestFile: ManifestFile): IO[Map[String, GcpBatchReferenceFilesDisk]] = { + private def getMapOfValidReferenceFilePathsToDisks(gcsClient: Storage, + manifestFile: ManifestFile + ): IO[Map[String, GcpBatchReferenceFilesDisk]] = { val refDisk = GcpBatchReferenceFilesDisk(manifestFile.imageIdentifier, manifestFile.diskSizeGb) val allReferenceFilesFromManifestMap = manifestFile.files.map(refFile => (refFile, refDisk)).toMap @@ -109,10 +115,13 @@ trait GcpBatchReferenceFilesMappingOperations { validReferenceFilesFromManifestMapIo map { validReferenceFilesFromManifestMap => val invalidReferenceFiles = allReferenceFilesFromManifestMap.keySet -- validReferenceFilesFromManifestMap.keySet if (invalidReferenceFiles.nonEmpty) { - logger.warn(s"The following files listed in references manifest have checksum mismatch with actual files in GCS: ${invalidReferenceFiles.mkString(",")}") + logger.warn( + s"The following files listed in references manifest have checksum mismatch with actual files in GCS: ${invalidReferenceFiles + .mkString(",")}" + ) } - validReferenceFilesFromManifestMap.map { - case (refFile, disk) => (refFile.path, disk) + validReferenceFilesFromManifestMap.map { case (refFile, disk) => + (refFile.path, disk) }.toMap } } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuTypeValidation.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuTypeValidation.scala index e7ade1bc46a..de53b2a0206 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuTypeValidation.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuTypeValidation.scala @@ -8,7 +8,6 @@ import wom.RuntimeAttributesKeys import wom.types.{WomStringType, WomType} import wom.values.{WomString, WomValue} - object GpuTypeValidation { lazy val instance: RuntimeAttributesValidation[GpuType] = new GpuTypeValidation lazy val optional: OptionalRuntimeAttributesValidation[GpuType] = instance.optional @@ -20,6 +19,7 @@ class GpuTypeValidation extends RuntimeAttributesValidation[GpuType] { override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[GpuType]] = { case WomString(s) => GpuType(s).validNel - case other => s"Invalid '$key': String value required but got ${other.womType.friendlyName}. See ${GpuType.MoreDetailsURL} for a list of options".invalidNel + case other => + s"Invalid '$key': String value required but got ${other.womType.friendlyName}. See ${GpuType.MoreDetailsURL} for a list of options".invalidNel } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuValidation.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuValidation.scala index 4b24c9aeacf..51c19813229 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuValidation.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/util/GpuValidation.scala @@ -5,7 +5,11 @@ import cats.syntax.either._ import cats.syntax.validated._ import com.typesafe.config.Config import common.validation.ErrorOr.ErrorOr -import cromwell.backend.validation.{OptionalRuntimeAttributesValidation, PositiveIntRuntimeAttributesValidation, RuntimeAttributesValidation} +import cromwell.backend.validation.{ + OptionalRuntimeAttributesValidation, + PositiveIntRuntimeAttributesValidation, + RuntimeAttributesValidation +} import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric.Positive import eu.timepit.refined.refineV @@ -37,4 +41,4 @@ class GpuValidation(attributeName: String) extends PositiveIntRuntimeAttributesV case other => s"Invalid gpu count. Expected positive Int but got ${other.womType.friendlyName} ${other.toWomString}".invalidNel } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/GcpBatchIoSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/GcpBatchIoSpec.scala index 186152c0768..9a42728510d 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/GcpBatchIoSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/GcpBatchIoSpec.scala @@ -1,7 +1,12 @@ package cromwell.backend.google.batch import com.google.api.client.http.HttpResponseException -import com.google.api.client.testing.http.{HttpTesting, MockHttpTransport, MockLowLevelHttpRequest, MockLowLevelHttpResponse} +import com.google.api.client.testing.http.{ + HttpTesting, + MockHttpTransport, + MockLowLevelHttpRequest, + MockLowLevelHttpResponse +} import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -25,10 +30,9 @@ class GcpBatchIoSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } private def mockTransport(statusCode: Int) = new MockHttpTransport() { - override def buildRequest(method: String, url: String) = { + override def buildRequest(method: String, url: String) = new MockLowLevelHttpRequest() { override def execute() = new MockLowLevelHttpResponse().setStatusCode(statusCode) } - } } } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala index 711f59d6845..4253b1877be 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala @@ -18,7 +18,12 @@ import cromwell.backend.google.batch.io.{DiskType, GcpBatchWorkingDisk} import cromwell.backend.google.batch.models._ import cromwell.backend.google.batch.util.BatchExpressionFunctions import cromwell.backend.io.JobPathsSpecHelper._ -import cromwell.backend.standard.{DefaultStandardAsyncExecutionActorParams, StandardAsyncExecutionActorParams, StandardAsyncJob, StandardExpressionFunctionsParams} +import cromwell.backend.standard.{ + DefaultStandardAsyncExecutionActorParams, + StandardAsyncExecutionActorParams, + StandardAsyncJob, + StandardExpressionFunctionsParams +} import cromwell.core._ import cromwell.core.callcaching.NoDocker import cromwell.core.labels.Labels @@ -53,14 +58,15 @@ import scala.language.postfixOps import common.mock.MockSugar import org.mockito.Mockito._ -class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite - with AnyFlatSpecLike - with Matchers - with ImplicitSender - with BackendSpec - with BeforeAndAfter - with MockSugar - with DefaultJsonProtocol { +class GcpBatchAsyncBackendJobExecutionActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with BackendSpec + with BeforeAndAfter + with MockSugar + with DefaultJsonProtocol { val mockPathBuilder: GcsPathBuilder = MockGcsPathBuilder.instance import MockGcsPathBuilder._ @@ -68,7 +74,7 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite private def gcsPath(str: String) = mockPathBuilder.build(str).getOrElse(fail(s"Invalid gcs path: $str")) - //import GcpBatchTestConfig._ + // import GcpBatchTestConfig._ import cromwell.backend.google.batch.models.GcpBatchTestConfig._ implicit val Timeout: FiniteDuration = 25.seconds.dilated @@ -98,31 +104,39 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite private val NoOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) - private lazy val TestableCallContext = CallContext(mockPathBuilder.build("gs://root").get, DummyStandardPaths, isDocker = false) + private lazy val TestableCallContext = + CallContext(mockPathBuilder.build("gs://root").get, DummyStandardPaths, isDocker = false) - private lazy val TestableStandardExpressionFunctionsParams: StandardExpressionFunctionsParams - = new StandardExpressionFunctionsParams { - override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilder) - override lazy val callContext: CallContext = TestableCallContext - override val ioActorProxy: ActorRef = simpleIoActor - override val executionContext: ExecutionContext = system.dispatcher - } + private lazy val TestableStandardExpressionFunctionsParams: StandardExpressionFunctionsParams = + new StandardExpressionFunctionsParams { + override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilder) + override lazy val callContext: CallContext = TestableCallContext + override val ioActorProxy: ActorRef = simpleIoActor + override val executionContext: ExecutionContext = system.dispatcher + } - lazy val TestableGcpBatchExpressionFunctions: BatchExpressionFunctions = { + lazy val TestableGcpBatchExpressionFunctions: BatchExpressionFunctions = new BatchExpressionFunctions(TestableStandardExpressionFunctionsParams) - } private def buildInitializationData(jobDescriptor: BackendJobDescriptor, configuration: GcpBatchConfiguration) = { - val pathBuilders = Await.result(configuration.configurationDescriptor.pathBuilders(WorkflowOptions.empty), 5.seconds) + val pathBuilders = + Await.result(configuration.configurationDescriptor.pathBuilders(WorkflowOptions.empty), 5.seconds) val workflowPaths = GcpBatchWorkflowPaths( - jobDescriptor.workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), configuration, pathBuilders, GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + jobDescriptor.workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + configuration, + pathBuilders, + GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val runtimeAttributesBuilder = GcpBatchRuntimeAttributes.runtimeAttributesBuilder(configuration) GcpBackendInitializationData(workflowPaths, runtimeAttributesBuilder, configuration, null, None, None, None) } - class TestableGcpBatchJobExecutionActor(params: StandardAsyncExecutionActorParams, functions: BatchExpressionFunctions) - extends GcpBatchAsyncBackendJobExecutionActor(params) { + class TestableGcpBatchJobExecutionActor(params: StandardAsyncExecutionActorParams, + functions: BatchExpressionFunctions + ) extends GcpBatchAsyncBackendJobExecutionActor(params) { def this(jobDescriptor: BackendJobDescriptor, promise: Promise[BackendJobExecutionResponse], @@ -130,8 +144,8 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite functions: BatchExpressionFunctions = TestableGcpBatchExpressionFunctions, batchSingletonActor: ActorRef = emptyActor, ioActor: ActorRef = mockIoActor, - serviceRegistryActor: ActorRef = kvService) = { - + serviceRegistryActor: ActorRef = kvService + ) = this( DefaultStandardAsyncExecutionActorParams( jobIdKey = GcpBatchAsyncBackendJobExecutionActor.GcpBatchOperationIdKey, @@ -146,7 +160,6 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite ), functions ) - } override lazy val jobLogger: JobLogger = new JobLogger( loggerName = "TestLogger", @@ -162,7 +175,6 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite override lazy val backendEngineFunctions: BatchExpressionFunctions = functions } - private val runtimeAttributesBuilder = GcpBatchRuntimeAttributes.runtimeAttributesBuilder(gcpBatchConfiguration) private val workingDisk = GcpBatchWorkingDisk(DiskType.SSD, 200) @@ -174,11 +186,18 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite |} """.stripMargin - private def buildPreemptibleJobDescriptor(preemptible: Int, previousPreemptions: Int, previousUnexpectedRetries: Int, failedRetriesCountOpt: Option[Int] = None): BackendJobDescriptor = { + private def buildPreemptibleJobDescriptor(preemptible: Int, + previousPreemptions: Int, + previousUnexpectedRetries: Int, + failedRetriesCountOpt: Option[Int] = None + ): BackendJobDescriptor = { val attempt = previousPreemptions + previousUnexpectedRetries + 1 - val wdlNamespace = WdlNamespaceWithWorkflow.load(YoSup.replace("[PREEMPTIBLE]", s"preemptible: $preemptible"), - Seq.empty[Draft2ImportResolver]).get - val womDefinition = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val wdlNamespace = WdlNamespaceWithWorkflow + .load(YoSup.replace("[PREEMPTIBLE]", s"preemptible: $preemptible"), Seq.empty[Draft2ImportResolver]) + .get + val womDefinition = wdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(Inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => @@ -203,21 +222,42 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val key = BackendJobDescriptorKey(job, None, attempt) val runtimeAttributes = makeRuntimeAttributes(job) val prefetchedKvEntries = Map( - GcpBatchBackendLifecycleActorFactory.preemptionCountKey -> KvPair(ScopedKey(workflowDescriptor.id, KvJobKey(key), GcpBatchBackendLifecycleActorFactory.preemptionCountKey), previousPreemptions.toString), - GcpBatchBackendLifecycleActorFactory.unexpectedRetryCountKey -> KvPair(ScopedKey(workflowDescriptor.id, KvJobKey(key), GcpBatchBackendLifecycleActorFactory.unexpectedRetryCountKey), previousUnexpectedRetries.toString)) - val prefetchedKvEntriesUpd = if(failedRetriesCountOpt.isEmpty) { + GcpBatchBackendLifecycleActorFactory.preemptionCountKey -> KvPair( + ScopedKey(workflowDescriptor.id, KvJobKey(key), GcpBatchBackendLifecycleActorFactory.preemptionCountKey), + previousPreemptions.toString + ), + GcpBatchBackendLifecycleActorFactory.unexpectedRetryCountKey -> KvPair( + ScopedKey(workflowDescriptor.id, + KvJobKey(key), + GcpBatchBackendLifecycleActorFactory.unexpectedRetryCountKey + ), + previousUnexpectedRetries.toString + ) + ) + val prefetchedKvEntriesUpd = if (failedRetriesCountOpt.isEmpty) { prefetchedKvEntries } else { - prefetchedKvEntries + (BackendLifecycleActorFactory.FailedRetryCountKey -> KvPair(ScopedKey(workflowDescriptor.id, KvJobKey(key), BackendLifecycleActorFactory.FailedRetryCountKey), failedRetriesCountOpt.get.toString )) + prefetchedKvEntries + (BackendLifecycleActorFactory.FailedRetryCountKey -> KvPair( + ScopedKey(workflowDescriptor.id, KvJobKey(key), BackendLifecycleActorFactory.FailedRetryCountKey), + failedRetriesCountOpt.get.toString + )) } - BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(Inputs), NoDocker, None, prefetchedKvEntriesUpd) + BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(Inputs), + NoDocker, + None, + prefetchedKvEntriesUpd + ) case Left(badtimes) => fail(badtimes.toList.mkString(", ")) } } private case class DockerImageCacheTestingParameters(dockerImageCacheDiskOpt: Option[String], dockerImageAsSpecifiedByUser: String, - isDockerImageCacheUsageRequested: Boolean) + isDockerImageCacheUsageRequested: Boolean + ) private def executionActor(jobDescriptor: BackendJobDescriptor, promise: Promise[BackendJobExecutionResponse], @@ -226,13 +266,19 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite serviceRegistryActor: ActorRef = kvService, referenceInputFilesOpt: Option[Set[GcpBatchInput]] = None, dockerImageCacheTestingParamsOpt: Option[DockerImageCacheTestingParameters] = None - ): ActorRef = { + ): ActorRef = { val job = generateStandardAsyncJob val run = Run(job) val handle = new GcpBatchPendingExecutionHandle(jobDescriptor, run.job, Option(run), None) - class ExecuteOrRecoverActor extends TestableGcpBatchJobExecutionActor(jobDescriptor, promise, gcpBatchConfiguration, batchSingletonActor = batchSingletonActor, serviceRegistryActor = serviceRegistryActor) { + class ExecuteOrRecoverActor + extends TestableGcpBatchJobExecutionActor(jobDescriptor, + promise, + gcpBatchConfiguration, + batchSingletonActor = batchSingletonActor, + serviceRegistryActor = serviceRegistryActor + ) { override def executeOrRecover(mode: ExecutionMode)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { sendIncrementMetricsForReferenceFiles(referenceInputFilesOpt) dockerImageCacheTestingParamsOpt.foreach { dockerImageCacheTestingParams => @@ -251,17 +297,23 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite system.actorOf(Props(new ExecuteOrRecoverActor), "ExecuteOrRecoverActor-" + UUID.randomUUID) } - def runAndFail(previousPreemptions: Int, previousUnexpectedRetries: Int, preemptible: Int, errorCode: Status, innerErrorMessage: String, expectPreemptible: Boolean): BackendJobExecutionResponse = { + def runAndFail(previousPreemptions: Int, + previousUnexpectedRetries: Int, + preemptible: Int, + errorCode: Status, + innerErrorMessage: String, + expectPreemptible: Boolean + ): BackendJobExecutionResponse = { val runStatus: RunStatus = RunStatus.Failed(List.empty) // val runStatus = UnsuccessfulRunStatus(errorCode, Option(innerErrorMessage), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), expectPreemptible) val statusPoller = TestProbe("statusPoller") val promise = Promise[BackendJobExecutionResponse]() - val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, previousPreemptions, previousUnexpectedRetries) + val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, previousPreemptions, previousUnexpectedRetries) // TODO: Use this to check the new KV entries are there! From PAPI - //val kvProbe = TestProbe("kvProbe") + // val kvProbe = TestProbe("kvProbe") val backend = executionActor(jobDescriptor, promise, statusPoller.ref, expectPreemptible) backend ! Execute @@ -277,20 +329,35 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite case RunStatus.Unrecognized => com.google.cloud.batch.v1.JobStatus.State.UNRECOGNIZED } - backend ! GcpBatchBackendSingletonActor.Event.JobStatusRetrieved(Job.newBuilder.setStatus(com.google.cloud.batch.v1.JobStatus.newBuilder.setState(internalStatus).build()).build()) + backend ! GcpBatchBackendSingletonActor.Event.JobStatusRetrieved( + Job.newBuilder + .setStatus(com.google.cloud.batch.v1.JobStatus.newBuilder.setState(internalStatus).build()) + .build() + ) } Await.result(promise.future, Timeout) } - def buildPreemptibleTestActorRef(attempt: Int, preemptible: Int, failedRetriesCountOpt: Option[Int] = None): TestActorRef[TestableGcpBatchJobExecutionActor] = { + def buildPreemptibleTestActorRef(attempt: Int, + preemptible: Int, + failedRetriesCountOpt: Option[Int] = None + ): TestActorRef[TestableGcpBatchJobExecutionActor] = { // For this test we say that all previous attempts were preempted: - val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, attempt - 1, previousUnexpectedRetries = 0, failedRetriesCountOpt = failedRetriesCountOpt) - val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), - gcpBatchConfiguration, - TestableGcpBatchExpressionFunctions, - emptyActor, - failIoActor)) + val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, + attempt - 1, + previousUnexpectedRetries = 0, + failedRetriesCountOpt = failedRetriesCountOpt + ) + val props = Props( + new TestableGcpBatchJobExecutionActor(jobDescriptor, + Promise(), + gcpBatchConfiguration, + TestableGcpBatchExpressionFunctions, + emptyActor, + failIoActor + ) + ) TestActorRef(props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") } @@ -331,12 +398,30 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "send proper value for \"number of reference files used gauge\" metric, or don't send anything if reference disks feature is disabled" in { - val expectedInput1 = GcpBatchFileInput(name = "testfile1", relativeHostPath = DefaultPathBuilder.build(Paths.get(s"test/reference/path/file1")), mount = null, cloudPath = null) - val expectedInput2 = GcpBatchFileInput(name = "testfile2", relativeHostPath = DefaultPathBuilder.build(Paths.get(s"test/reference/path/file2")), mount = null, cloudPath = null) + val expectedInput1 = GcpBatchFileInput(name = "testfile1", + relativeHostPath = + DefaultPathBuilder.build(Paths.get(s"test/reference/path/file1")), + mount = null, + cloudPath = null + ) + val expectedInput2 = GcpBatchFileInput(name = "testfile2", + relativeHostPath = + DefaultPathBuilder.build(Paths.get(s"test/reference/path/file2")), + mount = null, + cloudPath = null + ) val expectedReferenceInputFiles = Set[GcpBatchInput](expectedInput1, expectedInput2) - val expectedMsg1 = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput1.relativeHostPath.pathAsString)))) - val expectedMsg2 = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput2.relativeHostPath.pathAsString)))) + val expectedMsg1 = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput1.relativeHostPath.pathAsString)) + ) + ) + val expectedMsg2 = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput2.relativeHostPath.pathAsString)) + ) + ) val jobDescriptor = buildPreemptibleJobDescriptor(0, 0, 0) val serviceRegistryProbe = TestProbe() @@ -370,59 +455,74 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val serviceRegistryProbe = TestProbe() val madeUpDockerImageName = "test_madeup_docker_image_name" - val expectedMessageWhenRequestedNotFound = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList("docker", List("image", "cache", "image_not_in_cache", madeUpDockerImageName))))) + val expectedMessageWhenRequestedNotFound = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, + NonEmptyList("docker", List("image", "cache", "image_not_in_cache", madeUpDockerImageName)) + ) + ) + ) val backendDockerCacheRequestedButNotFound = executionActor( jobDescriptor, Promise[BackendJobExecutionResponse](), TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - None, - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = true - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + None, + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = true ) + ) ) backendDockerCacheRequestedButNotFound ! Execute serviceRegistryProbe.expectMsg(expectedMessageWhenRequestedNotFound) - val expectedMessageWhenRequestedAndFound = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList("docker", List("image", "cache", "used_image_from_cache", madeUpDockerImageName))))) + val expectedMessageWhenRequestedAndFound = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, + NonEmptyList("docker", List("image", "cache", "used_image_from_cache", madeUpDockerImageName)) + ) + ) + ) val backendDockerCacheRequestedAndFound = executionActor( jobDescriptor, Promise[BackendJobExecutionResponse](), TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - Option("test_madeup_disk_image_name"), - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = true - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + Option("test_madeup_disk_image_name"), + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = true ) + ) ) backendDockerCacheRequestedAndFound ! Execute serviceRegistryProbe.expectMsg(expectedMessageWhenRequestedAndFound) - val expectedMessageWhenNotRequestedButFound = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList("docker", List("image", "cache", "cached_image_not_used", madeUpDockerImageName))))) + val expectedMessageWhenNotRequestedButFound = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, + NonEmptyList("docker", List("image", "cache", "cached_image_not_used", madeUpDockerImageName)) + ) + ) + ) val backendDockerCacheNotRequestedButFound = executionActor( jobDescriptor, Promise[BackendJobExecutionResponse](), TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - Option("test_madeup_disk_image_name"), - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = false - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + Option("test_madeup_disk_image_name"), + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = false ) + ) ) backendDockerCacheNotRequestedButFound ! Execute serviceRegistryProbe.expectMsg(expectedMessageWhenNotRequestedButFound) @@ -433,14 +533,13 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - None, - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = false - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + None, + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = false ) + ) ) backendDockerCacheNotRequestedNotFound ! Execute serviceRegistryProbe.expectNoMessage(timeout) @@ -475,8 +574,10 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite checkFailedResult(Status.ABORTED, Option("15: Other type of error.")) .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true - checkFailedResult(Status.OUT_OF_RANGE, Option("14: Wrong errorCode.")).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true - checkFailedResult(Status.ABORTED, Option("Weird error message.")).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true + checkFailedResult(Status.OUT_OF_RANGE, Option("14: Wrong errorCode.")) + .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true + checkFailedResult(Status.ABORTED, Option("Weird error message.")) + .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true checkFailedResult(Status.ABORTED, Option("UnparsableInt: Even weirder error message.")) .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true checkFailedResult(Status.ABORTED, None).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true @@ -511,14 +612,20 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite gcsFileKey -> gcsFileVal ) - val wdlNamespace = WdlNamespaceWithWorkflow.load( - wdlString, - Seq.empty[Draft2ImportResolver], - ).get - val womWorkflow = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val wdlNamespace = WdlNamespaceWithWorkflow + .load( + wdlString, + Seq.empty[Draft2ImportResolver] + ) + .get + val womWorkflow = wdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), @@ -531,19 +638,28 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite None ) - val call: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t }).get + val call: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => + t + }.get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), gcpBatchConfiguration)) val testActorRef = TestActorRef[TestableGcpBatchJobExecutionActor]( - props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") - + props, + s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) - def gcsPathToLocal(womValue: WomValue): WomValue = { + def gcsPathToLocal(womValue: WomValue): WomValue = WomFileMapper.mapWomFiles(testActorRef.underlyingActor.mapCommandLineWomFile)(womValue).get - } val mappedInputs = jobDescriptor.localInputs safeMapValues gcsPathToLocal @@ -569,8 +685,14 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, WorkflowOptions.fromJsonString("""{"monitoring_script": "gs://path/to/script"}""").get, Labels.empty, @@ -582,22 +704,37 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), gcpBatchConfiguration)) val testActorRef = TestActorRef[TestableGcpBatchJobExecutionActor]( - props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) testActorRef.underlyingActor.monitoringScript shouldBe - Option(GcpBatchFileInput("monitoring-in", gcsPath("gs://path/to/script"), DefaultPathBuilder.get("monitoring.sh"), workingDisk)) + Option( + GcpBatchFileInput("monitoring-in", + gcsPath("gs://path/to/script"), + DefaultPathBuilder.get("monitoring.sh"), + workingDisk + ) + ) } it should "not create a GcpBatchFileInput for the monitoring script, when not specified" in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, NoOptions, Labels.empty, @@ -606,14 +743,17 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite None ) - val job: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst({case t: CommandCallNode => t}).get + val job: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => t }.get val key = BackendJobDescriptorKey(job, None, 1) val runtimeAttributes = makeRuntimeAttributes(job) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), gcpBatchConfiguration)) val testActorRef = TestActorRef[TestableGcpBatchJobExecutionActor]( - props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) testActorRef.underlyingActor.monitoringScript shouldBe None } @@ -622,11 +762,18 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7c")), - WdlNamespaceWithWorkflow.load( - SampleWdl.HelloWorld.asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.HelloWorld.asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, - WorkflowOptions.fromJsonString(s""" {"${GcpBatchWorkflowPaths.GcsRootOptionKey}": "gs://path/to/gcs_root"} """).get, + WorkflowOptions + .fromJsonString(s""" {"${GcpBatchWorkflowPaths.GcsRootOptionKey}": "gs://path/to/gcs_root"} """) + .get, Labels.empty, HogGroup("foo"), List.empty, @@ -636,11 +783,14 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "hello").get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), gcpBatchConfiguration)) val testActorRef = TestActorRef[TestableGcpBatchJobExecutionActor]( - props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val batchBackend = testActorRef.underlyingActor @@ -659,11 +809,19 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7d")), - WdlNamespaceWithWorkflow.load( - new SampleWdl.ScatterWdl().asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load( + new SampleWdl.ScatterWdl().asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, - WorkflowOptions.fromJsonString(s""" {"${GcpBatchWorkflowPaths.GcsRootOptionKey}": "gs://path/to/gcs_root"} """).get, + WorkflowOptions + .fromJsonString(s""" {"${GcpBatchWorkflowPaths.GcsRootOptionKey}": "gs://path/to/gcs_root"} """) + .get, Labels.empty, HogGroup("foo"), List.empty, @@ -673,11 +831,14 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "B").get val key = BackendJobDescriptorKey(call, Option(2), 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), gcpBatchConfiguration)) val testActorRef = TestActorRef[TestableGcpBatchJobExecutionActor]( - props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val batchBackend = testActorRef.underlyingActor @@ -694,9 +855,8 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "return preemptible = true only in the correct cases" in { - def attempt(max: Int, attempt: Int): GcpBatchAsyncBackendJobExecutionActor = { + def attempt(max: Int, attempt: Int): GcpBatchAsyncBackendJobExecutionActor = buildPreemptibleTestActorRef(attempt, max).underlyingActor - } def attempt1(max: Int) = attempt(max, 1) def attempt2(max: Int) = attempt(max, 2) @@ -733,13 +893,15 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite .toWomWorkflowDefinition(isASubworkflow = false) .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, - WorkflowOptions.fromJsonString( - s"""|{ - | "google_project": "$googleProject", - | "${GcpBatchWorkflowPaths.GcsRootOptionKey}": "$batchGcsRoot" - |} - |""".stripMargin - ).get, + WorkflowOptions + .fromJsonString( + s"""|{ + | "google_project": "$googleProject", + | "${GcpBatchWorkflowPaths.GcsRootOptionKey}": "$batchGcsRoot" + |} + |""".stripMargin + ) + .get, Labels.empty, HogGroup("foo"), List.empty, @@ -749,11 +911,14 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "goodbye").get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestableGcpBatchJobExecutionActor(jobDescriptor, Promise(), gcpBatchConfiguration)) val testActorRef = TestActorRef[TestableGcpBatchJobExecutionActor]( - props, s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableGcpBatchJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val batchBackend = testActorRef.underlyingActor @@ -789,12 +954,15 @@ class GcpBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite } private def makeRuntimeAttributes(job: CommandCallNode) = { - val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(job.callable.runtimeAttributes, null, Map.empty) - RuntimeAttributeDefinition.addDefaultsToAttributes( - runtimeAttributesBuilder.definitions.toSet, NoOptions)(evaluatedAttributes.getOrElse(fail("Failed to evaluate runtime attributes"))) + val evaluatedAttributes = + RuntimeAttributeDefinition.evaluateRuntimeAttributes(job.callable.runtimeAttributes, null, Map.empty) + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributesBuilder.definitions.toSet, NoOptions)( + evaluatedAttributes.getOrElse(fail("Failed to evaluate runtime attributes")) + ) } - private def generateStandardAsyncJob = { - StandardAsyncJob(JobName.newBuilder().setJob(UUID.randomUUID().toString).setProject("test").setLocation("local").build().toString) - } -} \ No newline at end of file + private def generateStandardAsyncJob = + StandardAsyncJob( + JobName.newBuilder().setJob(UUID.randomUUID().toString).setProject("test").setLocation("local").build().toString + ) +} diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchBackendLifecycleActorFactorySpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchBackendLifecycleActorFactorySpec.scala index d3bf3a7a3d1..a872ae7090a 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchBackendLifecycleActorFactorySpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchBackendLifecycleActorFactorySpec.scala @@ -11,7 +11,6 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.concurrent.duration._ import scala.language.postfixOps - class GcpBatchBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with Matchers with TableDrivenPropertyChecks { "GcpBatchBackendLifecycleActorFactory" should "robustly build configuration attributes" in { @@ -36,7 +35,8 @@ class GcpBatchBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with Matc batchRequestTimeoutConfiguration = null, referenceFileToDiskImageMappingOpt = None, dockerImageToCacheDiskImageMappingOpt = None, - checkpointingInterval = 1 second) + checkpointingInterval = 1 second + ) GcpBatchBackendLifecycleActorFactory.robustBuildAttributes(() => attributes) shouldBe attributes } @@ -54,7 +54,10 @@ class GcpBatchBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with Matc forAll(fails) { (attempts, description, function) => it should s"$description: make $attempts attribute creation attempts before giving up" in { val e = the[RuntimeException] thrownBy { - GcpBatchBackendLifecycleActorFactory.robustBuildAttributes(function, initialIntervalMillis = 1, maxIntervalMillis = 5) + GcpBatchBackendLifecycleActorFactory.robustBuildAttributes(function, + initialIntervalMillis = 1, + maxIntervalMillis = 5 + ) } e.getMessage should startWith(s"Failed to build GcpBatchConfigurationAttributes on attempt $attempts of 3") } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActorSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActorSpec.scala index c57902531c8..cdfaa931755 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActorSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActorSpec.scala @@ -8,7 +8,7 @@ import cromwell.backend.BackendWorkflowInitializationActor.{InitializationFailed import cromwell.backend.async.RuntimeAttributeValidationFailures import cromwell.backend.google.batch.models.GcpBatchConfiguration import cromwell.backend.google.batch.actors.GcpBatchInitializationActorSpec._ -import cromwell.backend.google.batch.models.GcpBatchTestConfig.{BatchGlobalConfig, googleConfiguration, batchAttributes} +import cromwell.backend.google.batch.models.GcpBatchTestConfig.{batchAttributes, googleConfiguration, BatchGlobalConfig} import cromwell.backend.{BackendConfigurationDescriptor, BackendSpec, BackendWorkflowDescriptor} import cromwell.core.Dispatcher.BackendDispatcher import cromwell.core.TestKitSuite @@ -20,8 +20,7 @@ import wom.graph.CommandCallNode import scala.concurrent.duration._ -class GcpBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers - with ImplicitSender { +class GcpBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender { val Timeout: FiniteDuration = 30.second.dilated import BackendSpec._ @@ -47,14 +46,27 @@ class GcpBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike private def getJesBackendProps(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], - jesConfiguration: GcpBatchConfiguration): Props = { + jesConfiguration: GcpBatchConfiguration + ): Props = { val ioActor = mockIoActor - val params = GcpBatchInitializationActorParams(workflowDescriptor, ioActor, calls, jesConfiguration, emptyActor, restarting = false) + val params = GcpBatchInitializationActorParams(workflowDescriptor, + ioActor, + calls, + jesConfiguration, + emptyActor, + restarting = false + ) Props(new GcpBatchInitializationActor(params)).withDispatcher(BackendDispatcher) } - private def getJesBackend(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], conf: BackendConfigurationDescriptor) = { - val props = getJesBackendProps(workflowDescriptor, calls, new GcpBatchConfiguration(conf, googleConfiguration, batchAttributes)) + private def getJesBackend(workflowDescriptor: BackendWorkflowDescriptor, + calls: Set[CommandCallNode], + conf: BackendConfigurationDescriptor + ) = { + val props = getJesBackendProps(workflowDescriptor, + calls, + new GcpBatchConfiguration(conf, googleConfiguration, batchAttributes) + ) system.actorOf(props, "TestableJesInitializationActor-" + UUID.randomUUID) } @@ -63,17 +75,16 @@ class GcpBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike it should "log a warning message when there are unsupported runtime attributes" in { within(Timeout) { - val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, - runtime = """runtime { docker: "ubuntu/latest" test: true }""") - val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - defaultBackendConfig) + val workflowDescriptor = + buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { docker: "ubuntu/latest" test: true }""") + val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, defaultBackendConfig) val eventPattern = "Key/s [test] is/are not supported by backend. Unsupported attributes will not be part of job executions." EventFilter.warning(pattern = escapePattern(eventPattern), occurrences = 1) intercept { backend ! Initialize } expectMsgPF() { - case InitializationSuccess(_) => //Docker entry is present. + case InitializationSuccess(_) => // Docker entry is present. case InitializationFailed(failure) => fail(s"InitializationSuccess was expected but got $failure") } } @@ -82,36 +93,39 @@ class GcpBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike it should "return InitializationFailed when docker runtime attribute key is not present" in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") - val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - defaultBackendConfig) + val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, defaultBackendConfig) backend ! Initialize - expectMsgPF() { - case InitializationFailed(failure) => - failure match { - case exception: RuntimeAttributeValidationFailures => - if (!exception.getMessage.equals("Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!")) - fail("Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'.") - } + expectMsgPF() { case InitializationFailed(failure) => + failure match { + case exception: RuntimeAttributeValidationFailures => + if ( + !exception.getMessage.equals( + "Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!" + ) + ) + fail( + "Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'." + ) + } } } } } object GcpBatchInitializationActorSpec { - val globalConfig: Config = ConfigFactory.parseString( - """ - |google { - | - | application-name = "cromwell" - | - | auths = [ - | { - | name = "application-default" - | scheme = "mock" - | } - | ] - |} - |""".stripMargin) + val globalConfig: Config = ConfigFactory.parseString(""" + |google { + | + | application-name = "cromwell" + | + | auths = [ + | { + | name = "application-default" + | scheme = "mock" + | } + | ] + |} + |""".stripMargin) val backendConfigTemplate: String = """ @@ -158,28 +172,40 @@ object GcpBatchInitializationActorSpec { |[DOCKERHUBCONFIG] |""".stripMargin - val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[VPCCONFIG]", "").replace("[DOCKERHUBCONFIG]", "")) - - val dockerBackendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[VPCCONFIG]", "").replace("[DOCKERHUBCONFIG]", - """ - |dockerhub { - | account = "my@docker.account" - | # no secrets here guys this is just `echo -n username:password | base64` - | token = "dXNlcm5hbWU6cGFzc3dvcmQ=" - |} - | """.stripMargin)) - - val vpcBackendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[DOCKERHUBCONFIG]", "").replace("[VPCCONFIG]", - """ - |virtual-private-cloud { - | network-label-key = "cromwell-ci-network" - | subnetwork-label-key = "cromwell-ci-subnetwork" - | auth = "service_account" - |} - | """.stripMargin)) + val backendConfig: Config = + ConfigFactory.parseString(backendConfigTemplate.replace("[VPCCONFIG]", "").replace("[DOCKERHUBCONFIG]", "")) + + val dockerBackendConfig: Config = ConfigFactory.parseString( + backendConfigTemplate + .replace("[VPCCONFIG]", "") + .replace( + "[DOCKERHUBCONFIG]", + """ + |dockerhub { + | account = "my@docker.account" + | # no secrets here guys this is just `echo -n username:password | base64` + | token = "dXNlcm5hbWU6cGFzc3dvcmQ=" + |} + | """.stripMargin + ) + ) + + val vpcBackendConfig: Config = ConfigFactory.parseString( + backendConfigTemplate + .replace("[DOCKERHUBCONFIG]", "") + .replace( + "[VPCCONFIG]", + """ + |virtual-private-cloud { + | network-label-key = "cromwell-ci-network" + | subnetwork-label-key = "cromwell-ci-subnetwork" + | auth = "service_account" + |} + | """.stripMargin + ) + ) private val defaultBackendConfig = new BackendConfigurationDescriptor(backendConfig, globalConfig) { override private[backend] lazy val cromwellFileSystems = new CromwellFileSystems(BatchGlobalConfig) } } - diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDiskSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDiskSpec.scala index a168743db8f..7e0f03cc422 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDiskSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDiskSpec.scala @@ -40,7 +40,7 @@ class GcpBatchAttachedDiskSpec extends AnyFlatSpec with CromwellTimeoutSpec with ) it should "reject malformed disk mounts" in { - forAll(invalidTable) { (unparsed) => + forAll(invalidTable) { unparsed => GcpBatchAttachedDisk.parse(unparsed) should be(a[Failure[_]]) } } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchAttributeSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchAttributeSpec.scala index 6743962a4e0..fc012ec5ff8 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchAttributeSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchAttributeSpec.scala @@ -6,28 +6,22 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import wom.values.{WomFloat, WomInteger, WomSingleFile, WomString, WomValue} -class GcpBatchGpuAttributesSpec - extends AnyWordSpecLike - with Matchers - with GcpBatchRuntimeAttributesSpecsMixin { +class GcpBatchGpuAttributesSpec extends AnyWordSpecLike with Matchers with GcpBatchRuntimeAttributesSpecsMixin { val validGpuTypes = List( (Option(WomString("nvidia-tesla-k80")), Option(GpuType.NVIDIATeslaK80)), - (Option(WomString("nvidia-tesla-p100")), Option( GpuType.NVIDIATeslaP100)), - (Option(WomString("custom-gpu-24601")), Option( GpuType("custom-gpu-24601"))), - (None, None)) - val invalidGpuTypes = List( - WomSingleFile("nvidia-tesla-k80"), - WomInteger(100)) + (Option(WomString("nvidia-tesla-p100")), Option(GpuType.NVIDIATeslaP100)), + (Option(WomString("custom-gpu-24601")), Option(GpuType("custom-gpu-24601"))), + (None, None) + ) + val invalidGpuTypes = List(WomSingleFile("nvidia-tesla-k80"), WomInteger(100)) val validGpuCounts = List( (Option(WomInteger(1)), Option(1)), (Option(WomInteger(100)), Option(100)), (None, None) ) - val invalidGpuCounts = List( - WomString("ten"), - WomFloat(1.0)) + val invalidGpuCounts = List(WomString("ten"), WomFloat(1.0)) validGpuTypes foreach { case (validGpuType, expectedGpuTypeValue) => validGpuCounts foreach { case (validGpuCount, expectedGpuCountValue) => @@ -36,7 +30,8 @@ class GcpBatchGpuAttributesSpec "docker" -> WomString("ubuntu:latest") ) ++ validGpuType.map(t => "gpuType" -> t) ++ validGpuCount.map(c => "gpuCount" -> c) - val actualRuntimeAttributes = toBatchRuntimeAttributes(runtimeAttributes, emptyWorkflowOptions, gcpBatchConfiguration) + val actualRuntimeAttributes = + toBatchRuntimeAttributes(runtimeAttributes, emptyWorkflowOptions, gcpBatchConfiguration) expectedGpuTypeValue match { case Some(v) => actualRuntimeAttributes.gpuResource.exists(_.gpuType == v) @@ -57,9 +52,9 @@ class GcpBatchGpuAttributesSpec "docker" -> WomString("ubuntu:latest") ) ++ validGpuType.map(t => "gpuType" -> t) + ("gpuCount" -> invalidGpuCount) - assertBatchRuntimeAttributesFailedCreation( - runtimeAttributes, - s"Invalid gpu count. Expected positive Int but got") + assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, + s"Invalid gpu count. Expected positive Int but got" + ) } } } @@ -71,10 +66,10 @@ class GcpBatchGpuAttributesSpec "docker" -> WomString("ubuntu:latest") ) + ("gpuType" -> invalidGpuType) + ("gpuCount" -> invalidGpuCount) - assertBatchRuntimeAttributesFailedCreation( - runtimeAttributes, - s"Invalid gpu count. Expected positive Int but got") + assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, + s"Invalid gpu count. Expected positive Int but got" + ) } } } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala index d914e9f2978..47044de002d 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala @@ -17,8 +17,11 @@ import org.scalatest.prop.TableDrivenPropertyChecks //import java.net.URL import scala.concurrent.duration._ -class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers - with TableDrivenPropertyChecks { +class GcpBatchConfigurationAttributesSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { behavior of "GcpBatchAttributes" @@ -50,7 +53,7 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo gcpBatchAttributes.maxPollingInterval should be(600) } - it should "parse batch-requests.timeouts values correctly" in { + it should "parse batch-requests.timeouts values correctly" in { val customContent = """ @@ -66,7 +69,9 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo val gcpBatchAttributes = GcpBatchConfigurationAttributes(googleConfig, backendConfig, "batch") gcpBatchAttributes.batchRequestTimeoutConfiguration.readTimeoutMillis.get.value should be(100.hours.toMillis.toInt) - gcpBatchAttributes.batchRequestTimeoutConfiguration.connectTimeoutMillis.get.value should be(10.seconds.toMillis.toInt) + gcpBatchAttributes.batchRequestTimeoutConfiguration.connectTimeoutMillis.get.value should be( + 10.seconds.toMillis.toInt + ) } it should "parse an empty batch-requests.timeouts section correctly" in { @@ -143,8 +148,8 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo |""".stripMargin, VirtualPrivateCloudConfiguration( Option(VirtualPrivateCloudLabels("my-network", Option("my-subnetwork"), mockAuth)), - None, - ), + None + ) ), ( "labels config without subnetwork key", @@ -155,8 +160,8 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo |""".stripMargin, VirtualPrivateCloudConfiguration( Option(VirtualPrivateCloudLabels("my-network", None, mockAuth)), - None, - ), + None + ) ), ( "literal config", @@ -167,8 +172,8 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo |""".stripMargin, VirtualPrivateCloudConfiguration( None, - Option(VirtualPrivateCloudLiterals("my-network", Option("my-subnetwork"))), - ), + Option(VirtualPrivateCloudLiterals("my-network", Option("my-subnetwork"))) + ) ), ( "literal config without subnetwork name", @@ -178,9 +183,9 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo |""".stripMargin, VirtualPrivateCloudConfiguration( None, - Option(VirtualPrivateCloudLiterals("my-network", None)), - ), - ), + Option(VirtualPrivateCloudLiterals("my-network", None)) + ) + ) ) private val invalidVPCConfigTests = Table( @@ -191,7 +196,7 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo | network-label-key = my-network |} |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `auth`."), + List("Virtual Private Cloud configuration is invalid. Missing keys: `auth`.") ), ( "without network label-key", @@ -199,7 +204,7 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo | auth = mock |} |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`."), + List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`.") ), ( "with just a subnetwork label key", @@ -207,7 +212,7 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo | subnetwork-label-key = my-subnetwork |} |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key,auth`."), + List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key,auth`.") ), ( "with subnetwork label network key and auth", @@ -216,8 +221,8 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo | auth = mock | } |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`."), - ), + List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`.") + ) ) forAll(validVpcConfigTests) { (description, customConfig, vpcConfig) => @@ -243,13 +248,12 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo it should "not parse invalid config" in { val nakedConfig = - ConfigFactory.parseString( - """ - |{ - | genomics { - | - | } - |} + ConfigFactory.parseString(""" + |{ + | genomics { + | + | } + |} """.stripMargin) val exception = intercept[IllegalArgumentException with MessageAggregation] { @@ -302,7 +306,8 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo val invalids = List("-1", "150MB", "14PB") invalids foreach { - case invalid@GcpBatchConfigurationAttributes.GsutilHumanBytes(_, _) => fail(s"Memory specification $invalid not expected to be accepted") + case invalid @ GcpBatchConfigurationAttributes.GsutilHumanBytes(_, _) => + fail(s"Memory specification $invalid not expected to be accepted") case _ => } } @@ -332,32 +337,32 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo // Highly abridged versions of hg19 and hg38 manifests just to test for correctness // of parsing. val manifestConfig = - """ - |reference-disk-localization-manifests = [ - |{ - | "imageIdentifier" : "hg19-public-2020-10-26", - | "diskSizeGb" : 10, - | "files" : [ { - | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.fasta.fai", - | "crc32c" : 159565724 - | }, { - | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.dict", - | "crc32c" : 1679459712 - | }] - |}, - |{ - | "imageIdentifier" : "hg38-public-2020-10-26", - | "diskSizeGb" : 20, - | "files" : [ { - | "path" : "gcp-public-data--broad-references/hg38/v0/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz", - | "crc32c" : 930173616 - | }, { - | "path" : "gcp-public-data--broad-references/hg38/v0/exome_evaluation_regions.v1.interval_list", - | "crc32c" : 289077232 - | }] - |} - |] - |""".stripMargin + """ + |reference-disk-localization-manifests = [ + |{ + | "imageIdentifier" : "hg19-public-2020-10-26", + | "diskSizeGb" : 10, + | "files" : [ { + | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.fasta.fai", + | "crc32c" : 159565724 + | }, { + | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.dict", + | "crc32c" : 1679459712 + | }] + |}, + |{ + | "imageIdentifier" : "hg38-public-2020-10-26", + | "diskSizeGb" : 20, + | "files" : [ { + | "path" : "gcp-public-data--broad-references/hg38/v0/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz", + | "crc32c" : 930173616 + | }, { + | "path" : "gcp-public-data--broad-references/hg38/v0/exome_evaluation_regions.v1.interval_list", + | "crc32c" : 289077232 + | }] + |} + |] + |""".stripMargin val backendConfig = ConfigFactory.parseString(configString(manifestConfig)) val validation = GcpBatchConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "batch") val manifests: List[ManifestFile] = validation.toEither.toOption.get.get @@ -422,7 +427,7 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo | "imageIdentifier" : "hg19-public-2020-10-26", | "diskSizeGb" : 10, | # missing files - |}]""", + |}]""" ) badValues foreach { badValue => @@ -433,19 +438,21 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo } } - it should "parse correct existing docker-image-cache-manifest-file config" in { val dockerImageCacheManifest1Path = "gs://bucket/manifest1.json" val dockerImageCacheManifestConfigStr = s"""docker-image-cache-manifest-file = "$dockerImageCacheManifest1Path"""" val backendConfig = ConfigFactory.parseString(configString(dockerImageCacheManifestConfigStr)) - val validatedGcsPathToDockerImageCacheManifestFileErrorOr = GcpBatchConfigurationAttributes.validateGcsPathToDockerImageCacheManifestFile(backendConfig) + val validatedGcsPathToDockerImageCacheManifestFileErrorOr = + GcpBatchConfigurationAttributes.validateGcsPathToDockerImageCacheManifestFile(backendConfig) validatedGcsPathToDockerImageCacheManifestFileErrorOr match { case Valid(validatedGcsPathToDockerImageCacheManifestFileOpt) => validatedGcsPathToDockerImageCacheManifestFileOpt match { case Some(validatedGcsPathToDockerCacheManifestFile) => - validatedGcsPathToDockerCacheManifestFile shouldBe GcsPathBuilder.validateGcsPath(dockerImageCacheManifest1Path) + validatedGcsPathToDockerCacheManifestFile shouldBe GcsPathBuilder.validateGcsPath( + dockerImageCacheManifest1Path + ) case None => fail("GCS paths to docker image cache manifest files, parsed from config, should not be empty") } @@ -458,7 +465,8 @@ class GcpBatchConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeo val backendConfig = ConfigFactory.parseString(configString()) - val validatedGcsPathsToDockerImageCacheManifestFilesErrorOr = GcpBatchConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "unit-test-backend") + val validatedGcsPathsToDockerImageCacheManifestFilesErrorOr = + GcpBatchConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "unit-test-backend") validatedGcsPathsToDockerImageCacheManifestFilesErrorOr match { case Valid(validatedGcsPathsToDockerImageCacheManifestFilesOpt) => validatedGcsPathsToDockerImageCacheManifestFilesOpt shouldBe None diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationSpec.scala index 46b7c147b34..42ae487b5fb 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationSpec.scala @@ -11,7 +11,12 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -class GcpBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks with BeforeAndAfterAll { +class GcpBatchConfigurationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks + with BeforeAndAfterAll { behavior of "GcpBatchConfigurationSpec" @@ -22,26 +27,25 @@ class GcpBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit () } - val globalConfig = ConfigFactory.parseString( - s""" - |google { - | - | application-name = "cromwell" - | - | auths = [ - | { - | name = "application-default" - | scheme = "application_default" - | }, - | { - | name = "service-account" - | scheme = "service_account" - | service-account-id = "my-service-account" - | pem-file = "${mockFile.pathAsString}" - | } - | ] - |} - | + val globalConfig = ConfigFactory.parseString(s""" + |google { + | + | application-name = "cromwell" + | + | auths = [ + | { + | name = "application-default" + | scheme = "application_default" + | }, + | { + | name = "service-account" + | scheme = "service_account" + | service-account-id = "my-service-account" + | pem-file = "${mockFile.pathAsString}" + | } + | ] + |} + | """.stripMargin) val backendConfig = ConfigFactory.parseString( @@ -87,7 +91,8 @@ class GcpBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit | } | } | - """.stripMargin) + """.stripMargin + ) it should "fail to instantiate if any required configuration is missing" in { @@ -113,12 +118,15 @@ class GcpBatchConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "have correct root" in { - new GcpBatchConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), googleConfiguration, batchAttributes).root shouldBe "gs://my-cromwell-workflows-bucket" + new GcpBatchConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), + googleConfiguration, + batchAttributes + ).root shouldBe "gs://my-cromwell-workflows-bucket" } - //it should "have correct docker" in { + // it should "have correct docker" in { // val dockerConf = new GcpBatchConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), googleConfiguration, batchAttributes).dockerCredentials // dockerConf shouldBe defined // dockerConf.get.token shouldBe "dockerToken" - //} + // } } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala index ed2baf4fd06..933d2958689 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala @@ -27,7 +27,14 @@ class GcpBatchJobPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matche ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = GcpBatchWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), gcpBatchConfiguration, pathBuilders(), GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = GcpBatchWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + gcpBatchConfiguration, + pathBuilders(), + GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val callPaths = GcpBatchJobPaths(workflowPaths, jobDescriptorKey) @@ -44,7 +51,14 @@ class GcpBatchJobPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matche ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = GcpBatchWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), gcpBatchConfiguration, pathBuilders(), GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = GcpBatchWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + gcpBatchConfiguration, + pathBuilders(), + GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val callPaths = GcpBatchJobPaths(workflowPaths, jobDescriptorKey) @@ -66,7 +80,14 @@ class GcpBatchJobPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matche ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = GcpBatchWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), gcpBatchConfiguration, pathBuilders(), GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = GcpBatchWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + gcpBatchConfiguration, + pathBuilders(), + GcpBatchInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val callPaths = GcpBatchJobPaths(workflowPaths, jobDescriptorKey) diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributesSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributesSpec.scala index 569a2af58eb..beb9b865c1b 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributesSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchRuntimeAttributesSpec.scala @@ -21,7 +21,7 @@ import wom.values._ import scala.util.{Failure, Success, Try} final class GcpBatchRuntimeAttributesSpec - extends AnyWordSpecLike + extends AnyWordSpecLike with Matchers with GcpBatchRuntimeAttributesSpecsMixin { @@ -35,7 +35,10 @@ final class GcpBatchRuntimeAttributesSpec "use hardcoded defaults if not declared in task, workflow options, or config (except for docker)" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest")) val expectedRuntimeAttributes = expectedDefaults - assertBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, batchConfiguration = noDefaultsBatchConfiguration) + assertBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, + expectedRuntimeAttributes, + batchConfiguration = noDefaultsBatchConfiguration + ) } "validate a valid Docker entry" in { @@ -58,12 +61,18 @@ final class GcpBatchRuntimeAttributesSpec "fail to validate an invalid failOnStderr entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "failOnStderr" -> WomString("yes")) - assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") + assertBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'" + ) } "fail to validate an invalid continueOnReturnCode entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomString("value")) - assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + assertBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } "validate a valid cpu entry" in { @@ -91,18 +100,30 @@ final class GcpBatchRuntimeAttributesSpec "fail to validate an invalid zones entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomInteger(1)) - assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") + assertBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]" + ) } "validate a valid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomStringType), List(WomString("us-central1-y"), WomString("us-central1-z")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "zones" -> WomArray(WomArrayType(WomStringType), + List(WomString("us-central1-y"), WomString("us-central1-z")) + ) + ) val expectedRuntimeAttributes = expectedDefaults.copy(zones = Vector("us-central1-y", "us-central1-z")) assertBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) - assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))) + ) + assertBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]" + ) } "validate a valid preemptible entry" in { @@ -114,7 +135,8 @@ final class GcpBatchRuntimeAttributesSpec "fail to validate an invalid preemptible entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "preemptible" -> WomString("value")) assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, - "Expecting preemptible runtime attribute to be an Integer") + "Expecting preemptible runtime attribute to be an Integer" + ) } "validate a valid bootDiskSizeGb entry" in { @@ -125,7 +147,9 @@ final class GcpBatchRuntimeAttributesSpec "fail to validate an invalid bootDiskSizeGb entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "bootDiskSizeGb" -> WomString("4GB")) - assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting bootDiskSizeGb runtime attribute to be an Integer") + assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, + "Expecting bootDiskSizeGb runtime attribute to be an Integer" + ) } // "validate a valid disks entry" in { @@ -134,15 +158,15 @@ final class GcpBatchRuntimeAttributesSpec // assertBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) // } - //"fail to validate an invalid disks entry" in { + // "fail to validate an invalid disks entry" in { // val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomInteger(10)) // assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting disks runtime attribute to be a comma separated String or Array[String]") - //} + // } - //"fail to validate a valid disks array entry" in { + // "fail to validate a valid disks array entry" in { // val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("blah"), WomString("blah blah")))) // assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE'") - //} + // } "validate a valid memory entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "memory" -> WomString("1 GB")) @@ -152,7 +176,10 @@ final class GcpBatchRuntimeAttributesSpec "fail to validate an invalid memory entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "memory" -> WomString("blah")) - assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting memory runtime attribute to be an Integer or String with format '8 GB'") + assertBatchRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting memory runtime attribute to be an Integer or String with format '8 GB'" + ) } "validate a valid noAddress entry" in { @@ -164,7 +191,8 @@ final class GcpBatchRuntimeAttributesSpec "fail to validate an invalid noAddress entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "noAddress" -> WomInteger(1)) assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, - "Expecting noAddress runtime attribute to be a Boolean") + "Expecting noAddress runtime attribute to be a Boolean" + ) } "override config default attributes with default attributes declared in workflow options" in { @@ -226,11 +254,14 @@ final class GcpBatchRuntimeAttributesSpec trait GcpBatchRuntimeAttributesSpecsMixin { this: TestSuite => - def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = { - WorkflowOptions(JsObject(Map( - "default_runtime_attributes" -> JsObject(defaults) - ))) - } + def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = + WorkflowOptions( + JsObject( + Map( + "default_runtime_attributes" -> JsObject(defaults) + ) + ) + ) val expectedDefaults = new GcpBatchRuntimeAttributes( cpu = refineMV(1), @@ -253,7 +284,8 @@ trait GcpBatchRuntimeAttributesSpecsMixin { expectedRuntimeAttributes: GcpBatchRuntimeAttributes, workflowOptions: WorkflowOptions = emptyWorkflowOptions, defaultZones: NonEmptyList[String] = defaultZones, - batchConfiguration: GcpBatchConfiguration = gcpBatchConfiguration): Unit = { + batchConfiguration: GcpBatchConfiguration = gcpBatchConfiguration + ): Unit = { try { val actualRuntimeAttributes = toBatchRuntimeAttributes(runtimeAttributes, workflowOptions, batchConfiguration) assert(actualRuntimeAttributes == expectedRuntimeAttributes) @@ -265,10 +297,13 @@ trait GcpBatchRuntimeAttributesSpecsMixin { def assertBatchRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], exMsgs: List[String], - workflowOptions: WorkflowOptions): Unit = { + workflowOptions: WorkflowOptions + ): Unit = { Try(toBatchRuntimeAttributes(runtimeAttributes, workflowOptions, gcpBatchConfiguration)) match { case Success(oops) => - fail(s"Expected error containing strings: ${exMsgs.map(s => s"'$s'").mkString(", ")} but instead got Success($oops)") + fail( + s"Expected error containing strings: ${exMsgs.map(s => s"'$s'").mkString(", ")} but instead got Success($oops)" + ) case Failure(ex) => exMsgs foreach { exMsg => assert(ex.getMessage.contains(exMsg)) } } () @@ -276,23 +311,29 @@ trait GcpBatchRuntimeAttributesSpecsMixin { def assertBatchRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], exMsg: String, - workflowOptions: WorkflowOptions = emptyWorkflowOptions): Unit = { + workflowOptions: WorkflowOptions = emptyWorkflowOptions + ): Unit = assertBatchRuntimeAttributesFailedCreation(runtimeAttributes, List(exMsg), workflowOptions) - } def toBatchRuntimeAttributes(runtimeAttributes: Map[String, WomValue], workflowOptions: WorkflowOptions, - batchConfiguration: GcpBatchConfiguration): GcpBatchRuntimeAttributes = { + batchConfiguration: GcpBatchConfiguration + ): GcpBatchRuntimeAttributes = { val runtimeAttributesBuilder = GcpBatchRuntimeAttributes.runtimeAttributesBuilder(batchConfiguration) - val defaultedAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes( - staticRuntimeAttributeDefinitions, workflowOptions)(runtimeAttributes) + val defaultedAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(staticRuntimeAttributeDefinitions, workflowOptions)( + runtimeAttributes + ) val validatedRuntimeAttributes = runtimeAttributesBuilder.build(defaultedAttributes, NOPLogger.NOP_LOGGER) GcpBatchRuntimeAttributes(validatedRuntimeAttributes, batchConfiguration.runtimeConfig) } val emptyWorkflowOptions: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get val defaultZones: NonEmptyList[String] = NonEmptyList.of("us-central1-b", "us-central1-a") - val noDefaultsBatchConfiguration = new GcpBatchConfiguration(GcpBatchTestConfig.NoDefaultsConfigurationDescriptor, googleConfiguration, batchAttributes) + val noDefaultsBatchConfiguration = new GcpBatchConfiguration(GcpBatchTestConfig.NoDefaultsConfigurationDescriptor, + googleConfiguration, + batchAttributes + ) val staticRuntimeAttributeDefinitions: Set[RuntimeAttributeDefinition] = GcpBatchRuntimeAttributes.runtimeAttributesBuilder(GcpBatchTestConfig.gcpBatchConfiguration).definitions.toSet } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchTestConfig.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchTestConfig.scala index fc8b1333170..4d41f5436ea 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchTestConfig.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchTestConfig.scala @@ -98,11 +98,10 @@ object GcpBatchTestConfig { val BatchBackendConfig: Config = ConfigFactory.parseString(BatchBackendConfigString) val BatchGlobalConfig: Config = ConfigFactory.parseString(BatchGlobalConfigString) val BatchBackendNoDefaultConfig: Config = ConfigFactory.parseString(NoDefaultsConfigString) - val BatchBackendConfigurationDescriptor: BackendConfigurationDescriptor = { + val BatchBackendConfigurationDescriptor: BackendConfigurationDescriptor = new BackendConfigurationDescriptor(BatchBackendConfig, BatchGlobalConfig) { override private[backend] lazy val cromwellFileSystems = new CromwellFileSystems(BatchGlobalConfig) } - } val NoDefaultsConfigurationDescriptor: BackendConfigurationDescriptor = BackendConfigurationDescriptor(BatchBackendNoDefaultConfig, BatchGlobalConfig) def pathBuilders()(implicit as: ActorSystem): List[PathBuilder] = @@ -110,5 +109,6 @@ object GcpBatchTestConfig { val googleConfiguration: GoogleConfiguration = GoogleConfiguration(BatchGlobalConfig) val batchAttributes: GcpBatchConfigurationAttributes = GcpBatchConfigurationAttributes(googleConfiguration, BatchBackendConfig, "batch") - val gcpBatchConfiguration = new GcpBatchConfiguration(BatchBackendConfigurationDescriptor, googleConfiguration,batchAttributes) -} \ No newline at end of file + val gcpBatchConfiguration = + new GcpBatchConfiguration(BatchBackendConfigurationDescriptor, googleConfiguration, batchAttributes) +} diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchVpcAndSubnetworkProjectLabelValuesSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchVpcAndSubnetworkProjectLabelValuesSpec.scala index e24b3fc809e..e28b912a1fb 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchVpcAndSubnetworkProjectLabelValuesSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchVpcAndSubnetworkProjectLabelValuesSpec.scala @@ -21,15 +21,15 @@ class GcpBatchVpcAndSubnetworkProjectLabelValuesSpec extends AnyFlatSpec with Ma s"slashed/$${projectId}/net", None, "slashed/my-project/net", - None, + None ), ( "a subnet with a project token", "slashed/net", Option(s"slashed/$${projectId}/sub"), "slashed/net", - Option("slashed/my-project/sub"), - ), + Option("slashed/my-project/sub") + ) ) forAll(labelsTests) { (description, network, subnetOption, networkName, subnetNameOption) => diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/LocalizationSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/LocalizationSpec.scala index 12be65744e0..3e7beb90d0a 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/LocalizationSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/LocalizationSpec.scala @@ -24,14 +24,21 @@ class LocalizationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher fields.keySet.map(_.getName) should contain theSameElementsAs Set("commands", "image_uri") // Set("commands", "environment", "imageUri", "labels", "mounts") - fields.find(_._1.getName == "commands").value.asInstanceOf[(_, java.util.List[_])]._2 should contain theSameElementsAs List( - "-m", manifestPathString + fields + .find(_._1.getName == "commands") + .value + .asInstanceOf[(_, java.util.List[_])] + ._2 should contain theSameElementsAs List( + "-m", + manifestPathString ) // runnable.get("mounts") should be(a[java.util.List[_]]) // runnable.get("mounts").asInstanceOf[java.util.List[_]] should be (empty) // - fields.find(_._1.getName == "image_uri").value.asInstanceOf[(_, String)]._2 should be("somerepo/drs-downloader:tagged") + fields.find(_._1.getName == "image_uri").value.asInstanceOf[(_, String)]._2 should be( + "somerepo/drs-downloader:tagged" + ) // // val actionLabels = runnable.get("labels").asInstanceOf[java.util.Map[_, _]] // actionLabels.keySet.asScala should contain theSameElementsAs List("tag") @@ -46,19 +53,29 @@ class LocalizationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val tagLabel = "myLabel" val requesterPaysProjectId = "123" - val container = Localization.drsRunnable(manifestPath, Map(tagKey -> tagLabel), Option(requesterPaysProjectId)).getContainer + val container = + Localization.drsRunnable(manifestPath, Map(tagKey -> tagLabel), Option(requesterPaysProjectId)).getContainer val fields = container.getAllFields.asScala fields.keySet.map(_.getName) should contain theSameElementsAs Set("commands", "image_uri") // Set("commands", "environment", "imageUri", "labels", "mounts") - fields.find(_._1.getName == "commands").value.asInstanceOf[(_, java.util.List[_])]._2 should contain theSameElementsAs List( - "-m", manifestPathString, "-r", requesterPaysProjectId + fields + .find(_._1.getName == "commands") + .value + .asInstanceOf[(_, java.util.List[_])] + ._2 should contain theSameElementsAs List( + "-m", + manifestPathString, + "-r", + requesterPaysProjectId ) // runnable.get("mounts") should be(a[java.util.List[_]]) // runnable.get("mounts").asInstanceOf[java.util.List[_]] should be (empty) - fields.find(_._1.getName == "image_uri").value.asInstanceOf[(_, String)]._2 should be("somerepo/drs-downloader:tagged") + fields.find(_._1.getName == "image_uri").value.asInstanceOf[(_, String)]._2 should be( + "somerepo/drs-downloader:tagged" + ) // val actionLabels = runnable.get("labels").asInstanceOf[java.util.Map[_, _]] // actionLabels.keySet.asScala should contain theSameElementsAs List("tag") diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala index 04d2c7d43f5..ef657ad11ca 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableBuilderSpec.scala @@ -18,31 +18,42 @@ class RunnableBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ("description", "runnable", "command"), ("a cloud sdk runnable", RunnableBuilder.cloudSdkRunnable, s"docker run ${RunnableUtils.CloudSdkImage}"), ("a cloud sdk runnable with args", - RunnableBuilder.cloudSdkRunnable.withCommand("bash", "-c", "echo hello"), - s"docker run ${RunnableUtils.CloudSdkImage} bash -c echo\\ hello" + RunnableBuilder.cloudSdkRunnable.withCommand("bash", "-c", "echo hello"), + s"docker run ${RunnableUtils.CloudSdkImage} bash -c echo\\ hello" ), ("a cloud sdk runnable with quotes in the args", - RunnableBuilder.cloudSdkRunnable.withCommand("bash", "-c", "echo hello m'lord"), - s"docker run ${RunnableUtils.CloudSdkImage} bash -c echo\\ hello\\ m\\'lord" + RunnableBuilder.cloudSdkRunnable.withCommand("bash", "-c", "echo hello m'lord"), + s"docker run ${RunnableUtils.CloudSdkImage} bash -c echo\\ hello\\ m\\'lord" ), ("a cloud sdk runnable with a newline in the args", - RunnableBuilder.cloudSdkRunnable.withCommand("bash", "-c", "echo hello\\\nworld"), - s"docker run ${RunnableUtils.CloudSdkImage} bash -c echo\\ hello\\\\world" + RunnableBuilder.cloudSdkRunnable.withCommand("bash", "-c", "echo hello\\\nworld"), + s"docker run ${RunnableUtils.CloudSdkImage} bash -c echo\\ hello\\\\world" ), ("an runnable with multiple args", - Runnable.newBuilder() - .setContainer(Runnable.Container.newBuilder.setImageUri("ubuntu")) - .withEntrypointCommand("") - .withCommand("bash", "-c", "echo hello") - .withAlwaysRun(true) - .withVolumes(List( - Volume.newBuilder().setDeviceName("read-only-disk").setMountPath("/mnt/read/only/container").addMountOptions("ro"), - Volume.newBuilder().setDeviceName("read-write-disk").setMountPath("/mnt/read/write/container").addMountOptions("rw"), - ).map(_.build())), - "docker run" + - " -v /mnt/read/only/container:/mnt/read/only/container -v /mnt/read/write/container:/mnt/read/write/container" + - " ubuntu bash -c echo\\ hello" - ), + Runnable + .newBuilder() + .setContainer(Runnable.Container.newBuilder.setImageUri("ubuntu")) + .withEntrypointCommand("") + .withCommand("bash", "-c", "echo hello") + .withAlwaysRun(true) + .withVolumes( + List( + Volume + .newBuilder() + .setDeviceName("read-only-disk") + .setMountPath("/mnt/read/only/container") + .addMountOptions("ro"), + Volume + .newBuilder() + .setDeviceName("read-write-disk") + .setMountPath("/mnt/read/write/container") + .addMountOptions("rw") + ).map(_.build()) + ), + "docker run" + + " -v /mnt/read/only/container:/mnt/read/only/container -v /mnt/read/write/container:/mnt/read/write/container" + + " ubuntu bash -c echo\\ hello" + ) ) forAll(dockerRunRunnables) { (description, runnable, command) => @@ -53,15 +64,17 @@ class RunnableBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc private val memoryRetryExpectedEntrypoint = "/bin/sh" - def memoryRetryExpectedCommand(lookupString: String): List[String] = { + def memoryRetryExpectedCommand(lookupString: String): List[String] = List( "-c", s"grep -E -q '$lookupString' /cromwell_root/stderr ; echo $$? > /cromwell_root/memory_retry_rc" ) - } val volumes = List( - Volume.newBuilder().setDeviceName("read-only-disk").setMountPath("/mnt/read/only/container")/*.addMountOptions("ro")*/ + Volume + .newBuilder() + .setDeviceName("read-only-disk") + .setMountPath("/mnt/read/only/container") /*.addMountOptions("ro")*/ ).map(_.build()) private val memoryRetryRunnableExpectedLabels = Map(Key.Tag -> Value.RetryWithMoreMemory).asJava @@ -76,7 +89,9 @@ class RunnableBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc runnable.getContainer.getCommandsList.asScala shouldBe expectedCommand runnable.getAlwaysRun shouldBe true runnable.getLabelsMap shouldBe memoryRetryRunnableExpectedLabels - runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => s"${v.getMountPath}:${v.getMountPath}") + runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => + s"${v.getMountPath}:${v.getMountPath}" + ) } it should "return cloud sdk runnable for multiple keys in retry-with-double-memory" in { @@ -89,6 +104,8 @@ class RunnableBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc runnable.getContainer.getCommandsList.asScala shouldBe expectedCommand runnable.getAlwaysRun shouldBe true runnable.getLabelsMap shouldBe memoryRetryRunnableExpectedLabels - runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => s"${v.getMountPath}:${v.getMountPath}") + runnable.getContainer.getVolumesList.asScala.toList shouldBe volumes.map(v => + s"${v.getMountPath}:${v.getMountPath}" + ) } -} \ No newline at end of file +} diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableCommandSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableCommandSpec.scala index 6e6c9f2d0af..546f8c9997e 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableCommandSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/runnable/RunnableCommandSpec.scala @@ -1,6 +1,5 @@ package cromwell.backend.google.batch.runnable - import java.nio.file.Path import common.assertion.CromwellTimeoutSpec import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.GcsTransferConfiguration @@ -19,7 +18,7 @@ class RunnableCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat mock[Path], mock[com.google.api.services.storage.Storage], mock[com.google.cloud.storage.Storage], - "my-project", + "my-project" ) val recovered = recoverRequesterPaysError(path) { flag => s"flag is $flag" @@ -46,22 +45,23 @@ class RunnableCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } it should "use GcsTransferConfiguration to set the number of localization retries" in { - implicit val gcsTransferConfiguration: GcsTransferConfiguration = GcsTransferConfiguration( - transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") - retry("I'm very flaky") shouldBe """for i in $(seq 31380); do - | ( - | I'm very flaky - | ) - | RC=$? - | if [ "$RC" = "0" ]; then - | break - | fi - | if [ $i -lt 31380 ]; then - | printf '%s %s\n' "$(date -u '+%Y/%m/%d %H:%M:%S')" Waiting\ 5\ seconds\ and\ retrying - | sleep 5 - | fi - |done - |exit "$RC"""".stripMargin + implicit val gcsTransferConfiguration: GcsTransferConfiguration = + GcsTransferConfiguration(transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") + retry( + "I'm very flaky" + ) shouldBe """for i in $(seq 31380); do + | ( + | I'm very flaky + | ) + | RC=$? + | if [ "$RC" = "0" ]; then + | break + | fi + | if [ $i -lt 31380 ]; then + | printf '%s %s\n' "$(date -u '+%Y/%m/%d %H:%M:%S')" Waiting\ 5\ seconds\ and\ retrying + | sleep 5 + | fi + |done + |exit "$RC"""".stripMargin } } - diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraintsSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraintsSpec.scala index 968ab545cbf..417b1e829d1 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraintsSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/util/GcpBatchMachineConstraintsSpec.scala @@ -65,9 +65,9 @@ class GcpBatchMachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpe // Same tests but with AMD Rome (n2d) #cpu > 16 are in increments of 16 (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1024"), - (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2dOption, false, "n2d-custom-4-4096"), + (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2dOption, false, "n2d-custom-4-4096"), (MemorySize(1, MemoryUnit.GB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1024"), - (MemorySize(1 , MemoryUnit.GB), refineMV[Positive](4), n2dOption, false, "n2d-custom-4-2048"), + (MemorySize(1, MemoryUnit.GB), refineMV[Positive](4), n2dOption, false, "n2d-custom-4-2048"), (MemorySize(14, MemoryUnit.GB), refineMV[Positive](16), n2dOption, false, "n2d-custom-16-14336"), (MemorySize(13.65, MemoryUnit.GB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-14080"), (MemorySize(1520.96, MemoryUnit.MB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1536"), @@ -83,7 +83,7 @@ class GcpBatchMachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpe cpu = cpu, cpuPlatformOption = cpuPlatformOption, googleLegacyMachineSelection = googleLegacyMachineSelection, - jobLogger = NOPLogger.NOP_LOGGER, + jobLogger = NOPLogger.NOP_LOGGER ) shouldBe expected } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/CustomMachineType.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/CustomMachineType.scala index 33cbd346abf..d7855890641 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/CustomMachineType.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/CustomMachineType.scala @@ -20,6 +20,7 @@ import math.{log, pow} * - https://cloud.google.com/sdk/gcloud/reference/compute/instances/create#--custom-vm-type */ trait CustomMachineType { + /** * The vm prefix to create this custom machine type. */ @@ -53,10 +54,7 @@ trait CustomMachineType { /** * Generates a custom machine type based on the requested memory and cpu */ - def machineType(requestedMemory: MemorySize, - requestedCpu: Int Refined Positive, - jobLogger: Logger, - ): String = { + def machineType(requestedMemory: MemorySize, requestedCpu: Int Refined Positive, jobLogger: Logger): String = { val memory = requestedMemory |> validateMemory val cpu = requestedCpu |> validateCpu @@ -91,8 +89,8 @@ trait CustomMachineType { adjustedCpu: Int, originalMemory: MemorySize, adjustedMemory: MemorySize, - logger: Logger, - ): Unit = { + logger: Logger + ): Unit = { def memoryAdjustmentLog = s"memory was adjusted from ${originalMemory.toMBString} to ${adjustedMemory.toMBString}" def cpuAdjustmentLog = s"cpu was adjusted from $originalCpu to $adjustedCpu" @@ -100,7 +98,7 @@ trait CustomMachineType { val messageOption = ( originalCpu == adjustedCpu, - originalMemory.to(MemoryUnit.MB).amount == adjustedMemory.to(MemoryUnit.MB).amount, + originalMemory.to(MemoryUnit.MB).amount == adjustedMemory.to(MemoryUnit.MB).amount ) match { case (true, false) => Option(memoryAdjustmentLog) case (false, true) => Option(cpuAdjustmentLog) @@ -128,17 +126,15 @@ case object N1CustomMachineType extends CustomMachineType { override val maxMemoryPerCpu: MemorySize = MemorySize(6.5, MemoryUnit.GB) override val memoryFactor: MemorySize = MemorySize(256, MemoryUnit.MB) - override def validateCpu(cpu: Refined[Int, Positive]): Int = { + override def validateCpu(cpu: Refined[Int, Positive]): Int = // Either one cpu, or an even number of cpus cpu.value match { case 1 => 1 case cpu => cpu + (cpu % 2) } - } - override def validateMemory(memory: MemorySize): MemorySize = { + override def validateMemory(memory: MemorySize): MemorySize = memory.asMultipleOf(memoryFactor) - } } case object N2CustomMachineType extends CustomMachineType { @@ -147,18 +143,16 @@ case object N2CustomMachineType extends CustomMachineType { override val maxMemoryPerCpu: MemorySize = MemorySize(8.0, MemoryUnit.GB) override val memoryFactor: MemorySize = MemorySize(256, MemoryUnit.MB) - override def validateCpu(cpu: Refined[Int, Positive]): Int = { + override def validateCpu(cpu: Refined[Int, Positive]): Int = // cpus must be divisible by 2 up to 32, and higher numbers must be divisible by 4 cpu.value match { case cpu if cpu <= 32 => cpu + (cpu % 2) case cpu if cpu % 4 == 0 => cpu case cpu => cpu + (4 - (cpu % 4)) } - } - override def validateMemory(memory: MemorySize): MemorySize = { + override def validateMemory(memory: MemorySize): MemorySize = memory.asMultipleOf(memoryFactor) - } } case object N2DCustomMachineType extends CustomMachineType { @@ -166,18 +160,15 @@ case object N2DCustomMachineType extends CustomMachineType { override val minMemoryPerCpu: MemorySize = MemorySize(0.5, MemoryUnit.GB) override val maxMemoryPerCpu: MemorySize = MemorySize(8.0, MemoryUnit.GB) override val memoryFactor: MemorySize = MemorySize(256, MemoryUnit.MB) - - override def validateCpu(cpu: Refined[Int, Positive]): Int = { + + override def validateCpu(cpu: Refined[Int, Positive]): Int = cpu.value match { - case cpu if cpu <= 16 => 2 max pow(2, (log(cpu.toDouble)/log(2)).ceil).toInt + case cpu if cpu <= 16 => 2 max pow(2, (log(cpu.toDouble) / log(2)).ceil).toInt case cpu if cpu > 16 && cpu <= 96 && cpu % 16 == 0 => cpu case cpu if cpu > 16 && cpu <= 96 => cpu + 16 - (cpu % 16) case cpu if cpu > 96 => 96 } - } - override def validateMemory(memory: MemorySize): MemorySize = { + override def validateMemory(memory: MemorySize): MemorySize = memory.asMultipleOf(memoryFactor) - } } - diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleCloudScopes.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleCloudScopes.scala index 5adc03ba1a8..02c40b6f11a 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleCloudScopes.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleCloudScopes.scala @@ -4,6 +4,7 @@ package cromwell.backend.google.pipelines.common * Google cloud scopes that don't have constants defined elsewhere in Google Cloud Java API. */ object GoogleCloudScopes { + /** * More restricted version of com.google.api.services.cloudkms.v1.CloudKMSScopes.CLOUD_PLATFORM * Could use that scope to keep things simple, but docs say to use a more restricted scope: diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleLabels.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleLabels.scala index 3eacb82393b..0599594fdf8 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleLabels.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GoogleLabels.scala @@ -23,18 +23,16 @@ object GoogleLabels { // This function is used to coerce a string into one that meets the requirements for a label submission to Google Pipelines API. // See 'labels' in https://cloud.google.com/genomics/reference/rpc/google.genomics.v1alpha2#google.genomics.v1alpha2.RunPipelineArgs - def safeGoogleName(mainText: String, emptyAllowed: Boolean = false): String = { - + def safeGoogleName(mainText: String, emptyAllowed: Boolean = false): String = validateLabelRegex(mainText) match { case Valid(labelText) => labelText case invalid @ _ if mainText.equals("") && emptyAllowed => mainText case invalid @ _ => - def appendSafe(current: String, nextChar: Char): String = { + def appendSafe(current: String, nextChar: Char): String = nextChar match { case c if c.isLetterOrDigit || c == '-' => current + c.toLower case _ => current + '-' } - } val foldResult = mainText.toCharArray.foldLeft("")(appendSafe) @@ -50,51 +48,64 @@ object GoogleLabels { if (tooLong) { val middleSeparator = "---" val subSectionLength = (MaxLabelLength - middleSeparator.length) / 2 - validStartAndEnd.substring(0, subSectionLength) + middleSeparator + validStartAndEnd.substring(length - subSectionLength, length) + validStartAndEnd.substring(0, subSectionLength) + middleSeparator + validStartAndEnd.substring( + length - subSectionLength, + length + ) } else { validStartAndEnd } } - } - def validateLabelRegex(s: String): ErrorOr[String] = { + def validateLabelRegex(s: String): ErrorOr[String] = (GoogleLabelRegex.pattern.matcher(s).matches, s.length <= MaxLabelLength) match { case (true, true) => s.validNel - case (false, false) => s"Invalid label field: `$s` did not match regex '$GoogleLabelRegexPattern' and it is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel + case (false, false) => + s"Invalid label field: `$s` did not match regex '$GoogleLabelRegexPattern' and it is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel case (false, _) => s"Invalid label field: `$s` did not match the regex '$GoogleLabelRegexPattern'".invalidNel - case (_, false) => s"Invalid label field: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel + case (_, false) => + s"Invalid label field: `$s` is ${s.length} characters. The maximum is $MaxLabelLength.".invalidNel } - } - def safeLabels(values: (String, String)*): Seq[GoogleLabel] = { - def safeGoogleLabel(kvp: (String, String)): GoogleLabel = { + def safeGoogleLabel(kvp: (String, String)): GoogleLabel = GoogleLabel(safeGoogleName(kvp._1), safeGoogleName(kvp._2, emptyAllowed = true)) - } values.map(safeGoogleLabel) } - def validateLabel(key: String, value: String): ErrorOr[GoogleLabel] = { - (validateLabelRegex(key), validateLabelRegex(value)).mapN { (validKey, validValue) => GoogleLabel(validKey, validValue) } - } + def validateLabel(key: String, value: String): ErrorOr[GoogleLabel] = + (validateLabelRegex(key), validateLabelRegex(value)).mapN { (validKey, validValue) => + GoogleLabel(validKey, validValue) + } def fromWorkflowOptions(workflowOptions: WorkflowOptions): Try[Seq[GoogleLabel]] = { def extractGoogleLabelsFromJsObject(jsObject: JsObject): Try[Seq[GoogleLabel]] = { val asErrorOr = jsObject.fields.toList.traverse { case (key: String, value: JsString) => GoogleLabels.validateLabel(key, value.value) - case (key, other) => s"Bad label value type for '$key'. Expected simple string but got $other".invalidNel : ErrorOr[GoogleLabel] + case (key, other) => + s"Bad label value type for '$key'. Expected simple string but got $other".invalidNel: ErrorOr[GoogleLabel] } asErrorOr match { case Valid(value) => Success(value) - case Invalid(errors) => Failure(new AggregatedMessageException("Invalid 'google_labels' in workflow options", errors.toList) with CromwellFatalExceptionMarker with NoStackTrace) + case Invalid(errors) => + Failure( + new AggregatedMessageException("Invalid 'google_labels' in workflow options", errors.toList) + with CromwellFatalExceptionMarker + with NoStackTrace + ) } } workflowOptions.toMap.get("google_labels") match { case Some(obj: JsObject) => extractGoogleLabelsFromJsObject(obj) - case Some(other) => Failure(new Exception(s"Invalid 'google_labels' in workflow options. Must be a simple JSON object mapping string keys to string values. Got $other") with NoStackTrace with CromwellFatalExceptionMarker) + case Some(other) => + Failure( + new Exception( + s"Invalid 'google_labels' in workflow options. Must be a simple JSON object mapping string keys to string values. Got $other" + ) with NoStackTrace with CromwellFatalExceptionMarker + ) case None => Success(Seq.empty) } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala index 540888f9180..7ef5dfb0232 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala @@ -19,6 +19,7 @@ class GpuTypeValidation extends RuntimeAttributesValidation[GpuType] { override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[GpuType]] = { case WomString(s) => GpuType(s).validNel - case other => s"Invalid '$key': String value required but got ${other.womType.friendlyName}. See ${GpuType.MoreDetailsURL} for a list of options".invalidNel + case other => + s"Invalid '$key': String value required but got ${other.womType.friendlyName}. See ${GpuType.MoreDetailsURL} for a list of options".invalidNel } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuValidation.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuValidation.scala index adbb44565a9..916410df293 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuValidation.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuValidation.scala @@ -5,7 +5,11 @@ import cats.syntax.either._ import cats.syntax.validated._ import com.typesafe.config.Config import common.validation.ErrorOr.ErrorOr -import cromwell.backend.validation.{OptionalRuntimeAttributesValidation, PositiveIntRuntimeAttributesValidation, RuntimeAttributesValidation} +import cromwell.backend.validation.{ + OptionalRuntimeAttributesValidation, + PositiveIntRuntimeAttributesValidation, + RuntimeAttributesValidation +} import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric.Positive import eu.timepit.refined.refineV diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala index 7e707f959e1..b92e9ceeeb7 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/MachineConstraints.scala @@ -11,8 +11,8 @@ object MachineConstraints { cpu: Int Refined Positive, cpuPlatformOption: Option[String], googleLegacyMachineSelection: Boolean, - jobLogger: Logger, - ): String = { + jobLogger: Logger + ): String = if (googleLegacyMachineSelection) { s"predefined-$cpu-${memory.to(MemoryUnit.MB).amount.intValue()}" } else { @@ -26,10 +26,9 @@ object MachineConstraints { cpuPlatformOption match { case Some(PipelinesApiRuntimeAttributes.CpuPlatformIntelIceLakeValue) => N2CustomMachineType case Some(PipelinesApiRuntimeAttributes.CpuPlatformIntelCascadeLakeValue) => N2CustomMachineType - case Some(PipelinesApiRuntimeAttributes.CpuPlatformAMDRomeValue) => N2DCustomMachineType + case Some(PipelinesApiRuntimeAttributes.CpuPlatformAMDRomeValue) => N2DCustomMachineType case _ => N1CustomMachineType } customMachineType.machineType(memory, cpu, jobLogger) } - } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PapiInstrumentation.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PapiInstrumentation.scala index 656597cf29f..e0f40fe2670 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PapiInstrumentation.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PapiInstrumentation.scala @@ -23,9 +23,8 @@ object PapiInstrumentation { private val PapiAbortRetriedKey = PapiAbortKey.concatNel(RetryKey) implicit class StatsDPathGoogleEnhanced(val statsDPath: InstrumentationPath) extends AnyVal { - def withGoogleThrowable(failure: Throwable) = { + def withGoogleThrowable(failure: Throwable) = statsDPath.withThrowable(failure, GoogleUtil.extractStatusCode) - } } } @@ -35,15 +34,21 @@ trait PapiInstrumentation extends CromwellInstrumentation { def abortSuccess() = increment(PapiAbortKey.concatNel(SuccessKey), BackendPrefix) def failedQuery(failedQuery: PAPIApiRequestFailed) = failedQuery.query match { - case _: PAPIStatusPollRequest => increment(PapiPollFailedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) - case _: PAPIRunCreationRequest => increment(PapiRunFailedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) - case _: PAPIAbortRequest => increment(PapiAbortFailedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) + case _: PAPIStatusPollRequest => + increment(PapiPollFailedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) + case _: PAPIRunCreationRequest => + increment(PapiRunFailedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) + case _: PAPIAbortRequest => + increment(PapiAbortFailedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) } def retriedQuery(failedQuery: PAPIApiRequestFailed) = failedQuery.query match { - case _: PAPIStatusPollRequest => increment(PapiPollRetriedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) - case _: PAPIRunCreationRequest => increment(PapiRunRetriedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) - case _: PAPIAbortRequest => increment(PapiAbortRetriedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) + case _: PAPIStatusPollRequest => + increment(PapiPollRetriedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) + case _: PAPIRunCreationRequest => + increment(PapiRunRetriedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) + case _: PAPIAbortRequest => + increment(PapiAbortRetriedKey.withGoogleThrowable(failedQuery.cause.cause), BackendPrefix) } def updateQueueSize(size: Int) = sendGauge(PapiKey.concatNel("queue_size"), size.toLong, BackendPrefix) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala index fee63573ff1..fa079ea86b2 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActor.scala @@ -13,20 +13,36 @@ import common.util.StringUtil._ import common.validation.ErrorOr._ import common.validation.Validation._ import cromwell.backend._ -import cromwell.backend.async.{AbortedExecutionHandle, ExecutionHandle, FailedNonRetryableExecutionHandle, FailedRetryableExecutionHandle, PendingExecutionHandle} +import cromwell.backend.async.{ + AbortedExecutionHandle, + ExecutionHandle, + FailedNonRetryableExecutionHandle, + FailedRetryableExecutionHandle, + PendingExecutionHandle +} import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes.GcsTransferConfiguration import cromwell.backend.google.pipelines.common.PipelinesApiJobPaths.GcsTransferLibraryName import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory._ import cromwell.backend.google.pipelines.common.api.RunStatus.TerminalRunStatus import cromwell.backend.google.pipelines.common.api._ import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiRunCreationClient.JobAbortedException -import cromwell.backend.google.pipelines.common.api.clients.{PipelinesApiAbortClient, PipelinesApiRunCreationClient, PipelinesApiStatusRequestClient} +import cromwell.backend.google.pipelines.common.api.clients.{ + PipelinesApiAbortClient, + PipelinesApiRunCreationClient, + PipelinesApiStatusRequestClient +} import cromwell.backend.google.pipelines.common.authentication.PipelinesApiDockerCredentials import cromwell.backend.google.pipelines.common.errors.FailedToDelocalizeFailure import cromwell.backend.google.pipelines.common.io._ import cromwell.backend.google.pipelines.common.monitoring.{CheckpointingConfiguration, MonitoringImage} import cromwell.backend.io.DirectoryFunctions -import cromwell.backend.standard.{ScriptPreambleData, StandardAdHocValue, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.backend.standard.{ + ScriptPreambleData, + StandardAdHocValue, + StandardAsyncExecutionActor, + StandardAsyncExecutionActorParams, + StandardAsyncJob +} import cromwell.core._ import cromwell.core.io.IoCommandBuilder import cromwell.core.path.{DefaultPathBuilder, Path} @@ -80,7 +96,8 @@ object PipelinesApiAsyncBackendJobExecutionActor { message: String, jobTag: String, returnCodeOption: Option[Int], - stderrPath: Path): Exception = { + stderrPath: Path + ): Exception = { val returnCodeMessage = returnCodeOption match { case Some(returnCode) if returnCode == 0 => "Job exited without an error, exit code 0." case Some(returnCode) => s"Job exit code $returnCode. Check $stderrPath for more information." @@ -92,7 +109,7 @@ object PipelinesApiAsyncBackendJobExecutionActor { } class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackendJobLifecycleActor + extends BackendJobLifecycleActor with StandardAsyncExecutionActor with PipelinesApiJobCachingActorHelper with PipelinesApiStatusRequestClient @@ -111,7 +128,8 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta val jesBackendSingletonActor: ActorRef = standardParams.backendSingletonActorOption.getOrElse( - throw new RuntimeException("JES Backend actor cannot exist without the JES backend singleton actor")) + throw new RuntimeException("JES Backend actor cannot exist without the JES backend singleton actor") + ) override type StandardAsyncRunInfo = Run @@ -122,23 +140,35 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta override val papiApiActor: ActorRef = jesBackendSingletonActor override lazy val pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff( - initialInterval = 30 seconds, maxInterval = jesAttributes.maxPollingInterval seconds, multiplier = 1.1) + initialInterval = 30 seconds, + maxInterval = jesAttributes.maxPollingInterval seconds, + multiplier = 1.1 + ) - override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff( - initialInterval = 3 seconds, maxInterval = 20 seconds, multiplier = 1.1) + override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = + SimpleExponentialBackoff(initialInterval = 3 seconds, maxInterval = 20 seconds, multiplier = 1.1) - override lazy val runtimeEnvironment: RuntimeEnvironment = { - RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, PipelinesApiWorkingDisk.MountPoint, PipelinesApiWorkingDisk.MountPoint)(standardParams.minimumRuntimeSettings) - } + override lazy val runtimeEnvironment: RuntimeEnvironment = + RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, + PipelinesApiWorkingDisk.MountPoint, + PipelinesApiWorkingDisk.MountPoint + )(standardParams.minimumRuntimeSettings) protected lazy val cmdInput: PipelinesApiFileInput = - PipelinesApiFileInput(PipelinesApiJobPaths.JesExecParamName, pipelinesApiCallPaths.script, DefaultPathBuilder.get(pipelinesApiCallPaths.scriptFilename), workingDisk) + PipelinesApiFileInput(PipelinesApiJobPaths.JesExecParamName, + pipelinesApiCallPaths.script, + DefaultPathBuilder.get(pipelinesApiCallPaths.scriptFilename), + workingDisk + ) - protected lazy val dockerConfiguration: Option[PipelinesApiDockerCredentials] = pipelinesConfiguration.dockerCredentials + protected lazy val dockerConfiguration: Option[PipelinesApiDockerCredentials] = + pipelinesConfiguration.dockerCredentials - protected val previousRetryReasons: ErrorOr[PreviousRetryReasons] = PreviousRetryReasons.tryApply(jobDescriptor.prefetchedKvStoreEntries, jobDescriptor.key.attempt) + protected val previousRetryReasons: ErrorOr[PreviousRetryReasons] = + PreviousRetryReasons.tryApply(jobDescriptor.prefetchedKvStoreEntries, jobDescriptor.key.attempt) - protected lazy val jobDockerImage: String = jobDescriptor.maybeCallCachingEligible.dockerHash.getOrElse(runtimeAttributes.dockerImage) + protected lazy val jobDockerImage: String = + jobDescriptor.maybeCallCachingEligible.dockerHash.getOrElse(runtimeAttributes.dockerImage) override lazy val dockerImageUsed: Option[String] = Option(jobDockerImage) @@ -152,7 +182,8 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta override def requestsAbortAndDiesImmediately: Boolean = false /*_*/ // Silence an errant IntelliJ warning - override def receive: Receive = pollingActorClientReceive orElse runCreationClientReceive orElse abortActorClientReceive orElse kvClientReceive orElse super.receive + override def receive: Receive = + pollingActorClientReceive orElse runCreationClientReceive orElse abortActorClientReceive orElse kvClientReceive orElse super.receive /*_*/ // https://stackoverflow.com/questions/36679973/controlling-false-intellij-code-editor-error-in-scala-plugin /** @@ -161,12 +192,17 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta protected def pipelinesApiInputsFromWomFiles(jesNamePrefix: String, remotePathArray: Seq[WomFile], localPathArray: Seq[WomFile], - jobDescriptor: BackendJobDescriptor): Iterable[PipelinesApiInput] = { - (remotePathArray zip localPathArray zipWithIndex) flatMap { - case ((remotePath, localPath), index) => - Seq(PipelinesApiFileInput(s"$jesNamePrefix-$index", getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) + jobDescriptor: BackendJobDescriptor + ): Iterable[PipelinesApiInput] = + (remotePathArray zip localPathArray zipWithIndex) flatMap { case ((remotePath, localPath), index) => + Seq( + PipelinesApiFileInput(s"$jesNamePrefix-$index", + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) } - } /** * Turns WomFiles into relative paths. These paths are relative to the working disk. @@ -174,7 +210,7 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta * relativeLocalizationPath("foo/bar.txt") -> "foo/bar.txt" * relativeLocalizationPath("gs://some/bucket/foo.txt") -> "some/bucket/foo.txt" */ - override protected def relativeLocalizationPath(file: WomFile): WomFile = { + override protected def relativeLocalizationPath(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { case Success(drsPath: DrsPath) => DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() @@ -182,17 +218,16 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case _ => value } ) - } - override protected def fileName(file: WomFile): WomFile = { + override protected def fileName(file: WomFile): WomFile = file.mapFile(value => getPath(value) match { - case Success(drsPath: DrsPath) => DefaultPathBuilder.get(DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync()).name + case Success(drsPath: DrsPath) => + DefaultPathBuilder.get(DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync()).name case Success(path) => path.name case _ => value } ) - } override lazy val inputsToNotLocalize: Set[WomFile] = { val localizeOptional = jobDescriptor.findInputFilesByParameterMeta { @@ -204,23 +239,21 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta localizeSkipped ++ localizeMapped } - protected def callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = { - - jobDescriptor.fullyQualifiedInputs map { - case (key, womFile) => - val arrays: Seq[WomArray] = womFile collectAsSeq { - case womFile: WomFile if !inputsToNotLocalize.contains(womFile) => - val files: List[WomSingleFile] = DirectoryFunctions - .listWomSingleFiles(womFile, pipelinesApiCallPaths.workflowPaths) - .toTry(s"Error getting single files for $womFile").get - WomArray(WomArrayType(WomSingleFileType), files) - } + protected def callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = + jobDescriptor.fullyQualifiedInputs map { case (key, womFile) => + val arrays: Seq[WomArray] = womFile collectAsSeq { + case womFile: WomFile if !inputsToNotLocalize.contains(womFile) => + val files: List[WomSingleFile] = DirectoryFunctions + .listWomSingleFiles(womFile, pipelinesApiCallPaths.workflowPaths) + .toTry(s"Error getting single files for $womFile") + .get + WomArray(WomArrayType(WomSingleFileType), files) + } - key -> arrays.flatMap(_.value).collect { - case womFile: WomFile => womFile - } + key -> arrays.flatMap(_.value).collect { case womFile: WomFile => + womFile + } } - } private[pipelines] def generateInputs(jobDescriptor: BackendJobDescriptor): Set[PipelinesApiInput] = { // We need to tell PAPI about files that were created as part of command instantiation (these need to be defined @@ -228,12 +261,12 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta // md5's of their paths. val writeFunctionFiles = instantiatedCommand.createdFiles map { f => f.file.value.md5SumShort -> List(f) } toMap - val writeFunctionInputs = writeFunctionFiles flatMap { - case (name, files) => pipelinesApiInputsFromWomFiles(name, files.map(_.file), files.map(localizationPath), jobDescriptor) + val writeFunctionInputs = writeFunctionFiles flatMap { case (name, files) => + pipelinesApiInputsFromWomFiles(name, files.map(_.file), files.map(localizationPath), jobDescriptor) } - val callInputInputs = callInputFiles flatMap { - case (name, files) => pipelinesApiInputsFromWomFiles(name, files, files.map(relativeLocalizationPath), jobDescriptor) + val callInputInputs = callInputFiles flatMap { case (name, files) => + pipelinesApiInputsFromWomFiles(name, files, files.map(relativeLocalizationPath), jobDescriptor) } (writeFunctionInputs ++ callInputInputs).toSet @@ -245,7 +278,9 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta * * @throws Exception if the `path` does not live in one of the supplied `disks` */ - protected def relativePathAndAttachedDisk(path: String, disks: Seq[PipelinesApiAttachedDisk]): (Path, PipelinesApiAttachedDisk) = { + protected def relativePathAndAttachedDisk(path: String, + disks: Seq[PipelinesApiAttachedDisk] + ): (Path, PipelinesApiAttachedDisk) = { val absolutePath = DefaultPathBuilder.get(path) match { case p if !p.isAbsolute => PipelinesApiWorkingDisk.MountPoint.resolve(p) case p => p @@ -254,7 +289,9 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta disks.find(d => absolutePath.startsWith(d.mountPoint)) match { case Some(disk) => (disk.mountPoint.relativize(absolutePath), disk) case None => - throw new Exception(s"Absolute path $path doesn't appear to be under any mount points: ${disks.map(_.toString).mkString(", ")}") + throw new Exception( + s"Absolute path $path doesn't appear to be under any mount points: ${disks.map(_.toString).mkString(", ")}" + ) } } @@ -262,39 +299,40 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta * If the desired reference name is too long, we don't want to break JES or risk collisions by arbitrary truncation. So, * just use a hash. We only do this when needed to give better traceability in the normal case. */ - protected def makeSafeReferenceName(referenceName: String): String = { + protected def makeSafeReferenceName(referenceName: String): String = if (referenceName.length <= 127) referenceName else referenceName.md5Sum - } - protected [pipelines] def generateOutputs(jobDescriptor: BackendJobDescriptor): Set[PipelinesApiOutput] = { + protected[pipelines] def generateOutputs(jobDescriptor: BackendJobDescriptor): Set[PipelinesApiOutput] = { import cats.syntax.validated._ - def evaluateFiles(output: OutputDefinition): List[FileEvaluation] = { + def evaluateFiles(output: OutputDefinition): List[FileEvaluation] = Try( output.expression.evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType).map(_.toList) ).getOrElse(List.empty[FileEvaluation].validNel) .getOrElse(List.empty) - } - def relativeFileEvaluation(evaluation: FileEvaluation): FileEvaluation = { + def relativeFileEvaluation(evaluation: FileEvaluation): FileEvaluation = evaluation.copy(file = relativeLocalizationPath(evaluation.file)) - } val womFileOutputs = jobDescriptor.taskCall.callable.outputs.flatMap(evaluateFiles) map relativeFileEvaluation val outputs: Seq[PipelinesApiOutput] = womFileOutputs.distinct flatMap { fileEvaluation => fileEvaluation.file.flattenFiles flatMap { - case unlistedDirectory: WomUnlistedDirectory => generateUnlistedDirectoryOutputs(unlistedDirectory, fileEvaluation) + case unlistedDirectory: WomUnlistedDirectory => + generateUnlistedDirectoryOutputs(unlistedDirectory, fileEvaluation) case singleFile: WomSingleFile => generateSingleFileOutputs(singleFile, fileEvaluation) case globFile: WomGlobFile => generateGlobFileOutputs(globFile) // Assumes optional = false for globs. } } - val additionalGlobOutput = jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(generateGlobFileOutputs).toSet + val additionalGlobOutput = + jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(generateGlobFileOutputs).toSet outputs.toSet ++ additionalGlobOutput } - protected def generateUnlistedDirectoryOutputs(womFile: WomUnlistedDirectory, fileEvaluation: FileEvaluation): List[PipelinesApiOutput] = { + protected def generateUnlistedDirectoryOutputs(womFile: WomUnlistedDirectory, + fileEvaluation: FileEvaluation + ): List[PipelinesApiOutput] = { val directoryPath = womFile.value.ensureSlashed val directoryListFile = womFile.value.ensureUnslashed + ".list" val gcsDirDestinationPath = callRootPath.resolve(directoryPath) @@ -325,10 +363,18 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta ) } - protected def generateSingleFileOutputs(womFile: WomSingleFile, fileEvaluation: FileEvaluation): List[PipelinesApiFileOutput] = { + protected def generateSingleFileOutputs(womFile: WomSingleFile, + fileEvaluation: FileEvaluation + ): List[PipelinesApiFileOutput] = { val destination = callRootPath.resolve(womFile.value.stripPrefix("/")) val (relpath, disk) = relativePathAndAttachedDisk(womFile.value, runtimeAttributes.disks) - val jesFileOutput = PipelinesApiFileOutput(makeSafeReferenceName(womFile.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val jesFileOutput = PipelinesApiFileOutput(makeSafeReferenceName(womFile.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(jesFileOutput) } @@ -344,11 +390,23 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta // We need both the glob directory and the glob list: List( // The glob directory: - PipelinesApiFileOutput(makeSafeReferenceName(globDirectory), gcsGlobDirectoryDestinationPath, DefaultPathBuilder.get(globDirectory + "*"), globDirectoryDisk, - optional = false, secondary = false), + PipelinesApiFileOutput( + makeSafeReferenceName(globDirectory), + gcsGlobDirectoryDestinationPath, + DefaultPathBuilder.get(globDirectory + "*"), + globDirectoryDisk, + optional = false, + secondary = false + ), // The glob list file: - PipelinesApiFileOutput(makeSafeReferenceName(globListFile), gcsGlobListFileDestinationPath, DefaultPathBuilder.get(globListFile), globDirectoryDisk, - optional = false, secondary = false) + PipelinesApiFileOutput( + makeSafeReferenceName(globListFile), + gcsGlobListFileDestinationPath, + DefaultPathBuilder.get(globListFile), + globDirectoryDisk, + optional = false, + secondary = false + ) ) } @@ -358,71 +416,73 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta lazy val localMonitoringImageScriptPath: Path = DefaultPathBuilder.get(pipelinesApiCallPaths.jesMonitoringImageScriptFilename) - lazy val monitoringScript: Option[PipelinesApiFileInput] = { + lazy val monitoringScript: Option[PipelinesApiFileInput] = pipelinesApiCallPaths.workflowPaths.monitoringScriptPath map { path => PipelinesApiFileInput(s"$jesMonitoringParamName-in", path, localMonitoringScriptPath, workingDisk) } - } lazy val monitoringOutput: Option[PipelinesApiFileOutput] = monitoringScript map { _ => - PipelinesApiFileOutput(s"$jesMonitoringParamName-out", - pipelinesApiCallPaths.jesMonitoringLogPath, localMonitoringLogPath, workingDisk, optional = false, secondary = false, - contentType = plainTextContentType) + PipelinesApiFileOutput( + s"$jesMonitoringParamName-out", + pipelinesApiCallPaths.jesMonitoringLogPath, + localMonitoringLogPath, + workingDisk, + optional = false, + secondary = false, + contentType = plainTextContentType + ) } override lazy val commandDirectory: Path = PipelinesApiWorkingDisk.MountPoint - private val DockerMonitoringLogPath: Path = PipelinesApiWorkingDisk.MountPoint.resolve(pipelinesApiCallPaths.jesMonitoringLogFilename) - private val DockerMonitoringScriptPath: Path = PipelinesApiWorkingDisk.MountPoint.resolve(pipelinesApiCallPaths.jesMonitoringScriptFilename) - //noinspection ActorMutableStateInspection + private val DockerMonitoringLogPath: Path = + PipelinesApiWorkingDisk.MountPoint.resolve(pipelinesApiCallPaths.jesMonitoringLogFilename) + private val DockerMonitoringScriptPath: Path = + PipelinesApiWorkingDisk.MountPoint.resolve(pipelinesApiCallPaths.jesMonitoringScriptFilename) + // noinspection ActorMutableStateInspection private var hasDockerCredentials: Boolean = false - private lazy val isDockerImageCacheUsageRequested = runtimeAttributes.useDockerImageCache.getOrElse(useDockerImageCache(jobDescriptor.workflowDescriptor)) + private lazy val isDockerImageCacheUsageRequested = + runtimeAttributes.useDockerImageCache.getOrElse(useDockerImageCache(jobDescriptor.workflowDescriptor)) - override def scriptPreamble: ErrorOr[ScriptPreambleData] = { + override def scriptPreamble: ErrorOr[ScriptPreambleData] = if (monitoringOutput.isDefined) - ScriptPreambleData( - s"""|touch $DockerMonitoringLogPath - |chmod u+x $DockerMonitoringScriptPath - |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin).valid - else ScriptPreambleData("").valid - } + ScriptPreambleData(s"""|touch $DockerMonitoringLogPath + |chmod u+x $DockerMonitoringScriptPath + |$DockerMonitoringScriptPath > $DockerMonitoringLogPath &""".stripMargin).valid + else ScriptPreambleData("").valid override def globParentDirectory(womGlobFile: WomGlobFile): Path = { val (_, disk) = relativePathAndAttachedDisk(womGlobFile.value, runtimeAttributes.disks) disk.mountPoint } - protected def googleProject(descriptor: BackendWorkflowDescriptor): String = { + protected def googleProject(descriptor: BackendWorkflowDescriptor): String = descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleProject, jesAttributes.project) - } - protected def computeServiceAccount(descriptor: BackendWorkflowDescriptor): String = { - descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleComputeServiceAccount, jesAttributes.computeServiceAccount) - } + protected def computeServiceAccount(descriptor: BackendWorkflowDescriptor): String = + descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleComputeServiceAccount, + jesAttributes.computeServiceAccount + ) - protected def fuseEnabled(descriptor: BackendWorkflowDescriptor): Boolean = { + protected def fuseEnabled(descriptor: BackendWorkflowDescriptor): Boolean = descriptor.workflowOptions.getBoolean(WorkflowOptionKeys.EnableFuse).toOption.getOrElse(jesAttributes.enableFuse) - } - protected def googleLegacyMachineSelection(descriptor: BackendWorkflowDescriptor): Boolean = { + protected def googleLegacyMachineSelection(descriptor: BackendWorkflowDescriptor): Boolean = descriptor.workflowOptions.getBoolean(WorkflowOptionKeys.GoogleLegacyMachineSelection).getOrElse(false) - } - protected def useDockerImageCache(descriptor: BackendWorkflowDescriptor): Boolean = { + protected def useDockerImageCache(descriptor: BackendWorkflowDescriptor): Boolean = descriptor.workflowOptions.getBoolean(WorkflowOptionKeys.UseDockerImageCache).getOrElse(false) - } - override def isTerminal(runStatus: RunStatus): Boolean = { + override def isTerminal(runStatus: RunStatus): Boolean = runStatus match { case _: TerminalRunStatus => true case _ => false } - } private def createPipelineParameters(inputOutputParameters: InputOutputParameters, - customLabels: Seq[GoogleLabel], - ): CreatePipelineParameters = { + customLabels: Seq[GoogleLabel] + ): CreatePipelineParameters = standardParams.backendInitializationDataOption match { case Some(data: PipelinesApiBackendInitializationData) => val dockerKeyAndToken: Option[CreatePipelineDockerKeyAndToken] = for { @@ -431,24 +491,28 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta } yield CreatePipelineDockerKeyAndToken(key, token) /* - * Right now this doesn't cost anything, because sizeOption returns the size if it was previously already fetched - * for some reason (expression evaluation for instance), but otherwise does not retrieve it and returns None. - * In CWL-land we tend to be aggressive in pre-fetching the size in order to be able to evaluate JS expressions, - * but less in WDL as we can get it last minute and on demand because size is a WDL function, whereas in CWL - * we don't inspect the JS to know if size is called and therefore always pre-fetch it. - * - * We could decide to call withSize before in which case we would retrieve the size for all files and have - * a guaranteed more accurate total size, but there might be performance impacts ? - */ + * Right now this doesn't cost anything, because sizeOption returns the size if it was previously already fetched + * for some reason (expression evaluation for instance), but otherwise does not retrieve it and returns None. + * In CWL-land we tend to be aggressive in pre-fetching the size in order to be able to evaluate JS expressions, + * but less in WDL as we can get it last minute and on demand because size is a WDL function, whereas in CWL + * we don't inspect the JS to know if size is called and therefore always pre-fetch it. + * + * We could decide to call withSize before in which case we would retrieve the size for all files and have + * a guaranteed more accurate total size, but there might be performance impacts ? + */ val inputFileSize = Option(callInputFiles.values.flatMap(_.flatMap(_.sizeOption)).sum) // Attempt to adjust the disk size by taking into account the size of input files - val adjustedSizeDisks = inputFileSize.map(size => MemorySize.apply(size.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB)) map { inputFileSizeInformation => - runtimeAttributes.disks.adjustWorkingDiskWithNewMin( - inputFileSizeInformation, - jobLogger.info(s"Adjusted working disk size to ${inputFileSizeInformation.amount} GB to account for input files") - ) - } getOrElse runtimeAttributes.disks + val adjustedSizeDisks = + inputFileSize.map(size => MemorySize.apply(size.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB)) map { + inputFileSizeInformation => + runtimeAttributes.disks.adjustWorkingDiskWithNewMin( + inputFileSizeInformation, + jobLogger.info( + s"Adjusted working disk size to ${inputFileSizeInformation.amount} GB to account for input files" + ) + ) + } getOrElse runtimeAttributes.disks val inputFilePaths = inputOutputParameters.jobInputParameters.map(_.cloudPath.pathAsString).toSet val referenceDisksToMount = @@ -465,7 +529,7 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta workflowPaths = workflowPaths, commandDirectory = commandDirectory, workingDisk = workingDisk, - localMonitoringImageScriptPath = localMonitoringImageScriptPath, + localMonitoringImageScriptPath = localMonitoringImageScriptPath ) val checkpointingConfiguration = @@ -523,7 +587,6 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case None => throw new RuntimeException("No pipelines API backend initialization data found?") } - } override def isFatal(throwable: Throwable): Boolean = super.isFatal(throwable) || isFatalJesException(throwable) @@ -531,33 +594,44 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta override def executeAsync(): Future[ExecutionHandle] = createNewJob() - val futureKvJobKey: KvJobKey = KvJobKey(jobDescriptor.key.call.fullyQualifiedName, jobDescriptor.key.index, jobDescriptor.key.attempt + 1) + val futureKvJobKey: KvJobKey = + KvJobKey(jobDescriptor.key.call.fullyQualifiedName, jobDescriptor.key.index, jobDescriptor.key.attempt + 1) override def recoverAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = reconnectToExistingJob(jobId) override def reconnectAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = reconnectToExistingJob(jobId) - override def reconnectToAbortAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = reconnectToExistingJob(jobId, forceAbort = true) + override def reconnectToAbortAsync(jobId: StandardAsyncJob): Future[ExecutionHandle] = + reconnectToExistingJob(jobId, forceAbort = true) private def reconnectToExistingJob(jobForResumption: StandardAsyncJob, forceAbort: Boolean = false) = { if (forceAbort) tryAbort(jobForResumption) - Future.successful(PendingExecutionHandle(jobDescriptor, jobForResumption, Option(Run(jobForResumption)), previousState = None)) + Future.successful( + PendingExecutionHandle(jobDescriptor, jobForResumption, Option(Run(jobForResumption)), previousState = None) + ) } - protected def uploadDrsLocalizationManifest(createPipelineParameters: CreatePipelineParameters, cloudPath: Path): Future[Unit] = Future.successful(()) + protected def uploadDrsLocalizationManifest(createPipelineParameters: CreatePipelineParameters, + cloudPath: Path + ): Future[Unit] = Future.successful(()) - protected def uploadGcsTransferLibrary(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = Future.successful(()) + protected def uploadGcsTransferLibrary(createPipelineParameters: CreatePipelineParameters, + cloudPath: Path, + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = Future.successful(()) protected def uploadGcsLocalizationScript(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, transferLibraryContainerPath: Path, gcsTransferConfiguration: GcsTransferConfiguration, - referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]]): Future[Unit] = Future.successful(()) + referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]] + ): Future[Unit] = Future.successful(()) protected def uploadGcsDelocalizationScript(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, transferLibraryContainerPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = Future.successful(()) + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = Future.successful(()) protected val useReferenceDisks: Boolean = { val optionName = WorkflowOptions.UseReferenceDisks.name @@ -566,7 +640,10 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case Failure(OptionNotFoundException(_)) => false case Failure(f) => // Should not happen, this case should have been screened for and fast-failed during workflow materialization. - log.error(f, s"Programmer error: unexpected failure attempting to read value for workflow option '$optionName' as a Boolean") + log.error( + f, + s"Programmer error: unexpected failure attempting to read value for workflow option '$optionName' as a Boolean" + ) false } } @@ -576,8 +653,15 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta def evaluateRuntimeAttributes = Future.fromTry(Try(runtimeAttributes)) def generateInputOutputParameters: Future[InputOutputParameters] = Future.fromTry(Try { - val rcFileOutput = PipelinesApiFileOutput(returnCodeFilename, returnCodeGcsPath, DefaultPathBuilder.get(returnCodeFilename), workingDisk, optional = false, secondary = false, - contentType = plainTextContentType) + val rcFileOutput = PipelinesApiFileOutput( + returnCodeFilename, + returnCodeGcsPath, + DefaultPathBuilder.get(returnCodeFilename), + workingDisk, + optional = false, + secondary = false, + contentType = plainTextContentType + ) val memoryRetryRCFileOutput = PipelinesApiFileOutput( memoryRetryRCFilename, @@ -597,8 +681,16 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta StandardStream("stdout", _.output), StandardStream("stderr", _.error) ) map { s => - PipelinesApiFileOutput(s.name, returnCodeGcsPath.sibling(s.filename), DefaultPathBuilder.get(s.filename), - workingDisk, optional = false, secondary = false, uploadPeriod = jesAttributes.logFlushPeriod, contentType = plainTextContentType) + PipelinesApiFileOutput( + s.name, + returnCodeGcsPath.sibling(s.filename), + DefaultPathBuilder.get(s.filename), + workingDisk, + optional = false, + secondary = false, + uploadPeriod = jesAttributes.logFlushPeriod, + contentType = plainTextContentType + ) } InputOutputParameters( @@ -623,19 +715,24 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta ) def sendGoogleLabelsToMetadata(customLabels: Seq[GoogleLabel]): Unit = { - lazy val backendLabelEvents: Map[String, String] = ((backendLabels ++ customLabels) map { l => s"${CallMetadataKeys.BackendLabels}:${l.key}" -> l.value }).toMap + lazy val backendLabelEvents: Map[String, String] = + ((backendLabels ++ customLabels) map { l => s"${CallMetadataKeys.BackendLabels}:${l.key}" -> l.value }).toMap tellMetadata(backendLabelEvents) } - def getReferenceInputsToMountedPathsOpt(createPipelinesParameters: CreatePipelineParameters): Option[Map[PipelinesApiInput, String]] = { + def getReferenceInputsToMountedPathsOpt( + createPipelinesParameters: CreatePipelineParameters + ): Option[Map[PipelinesApiInput, String]] = if (useReferenceDisks) { - jesAttributes - .referenceFileToDiskImageMappingOpt - .map(getReferenceInputsToMountedPathMappings(_, createPipelinesParameters.inputOutputParameters.fileInputParameters)) + jesAttributes.referenceFileToDiskImageMappingOpt + .map( + getReferenceInputsToMountedPathMappings(_, + createPipelinesParameters.inputOutputParameters.fileInputParameters + ) + ) } else { None } - } val runPipelineResponse = for { _ <- evaluateRuntimeAttributes @@ -651,9 +748,18 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta _ <- uploadGcsTransferLibrary(createParameters, gcsTransferLibraryCloudPath, gcsTransferConfiguration) gcsLocalizationScriptCloudPath = jobPaths.callExecutionRoot / PipelinesApiJobPaths.GcsLocalizationScriptName referenceInputsToMountedPathsOpt = getReferenceInputsToMountedPathsOpt(createParameters) - _ <- uploadGcsLocalizationScript(createParameters, gcsLocalizationScriptCloudPath, transferLibraryContainerPath, gcsTransferConfiguration, referenceInputsToMountedPathsOpt) + _ <- uploadGcsLocalizationScript(createParameters, + gcsLocalizationScriptCloudPath, + transferLibraryContainerPath, + gcsTransferConfiguration, + referenceInputsToMountedPathsOpt + ) gcsDelocalizationScriptCloudPath = jobPaths.callExecutionRoot / PipelinesApiJobPaths.GcsDelocalizationScriptName - _ <- uploadGcsDelocalizationScript(createParameters, gcsDelocalizationScriptCloudPath, transferLibraryContainerPath, gcsTransferConfiguration) + _ <- uploadGcsDelocalizationScript(createParameters, + gcsDelocalizationScriptCloudPath, + transferLibraryContainerPath, + gcsTransferConfiguration + ) _ = this.hasDockerCredentials = createParameters.privateDockerKeyAndEncryptedToken.isDefined runId <- runPipeline(workflowId, createParameters, jobLogger) _ = sendGoogleLabelsToMetadata(customLabels) @@ -667,48 +773,50 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta runPipelineResponse map { runId => PendingExecutionHandle(jobDescriptor, runId, Option(Run(runId)), previousState = None) - } recover { - case JobAbortedException => AbortedExecutionHandle + } recover { case JobAbortedException => + AbortedExecutionHandle } } - protected def sendIncrementMetricsForReferenceFiles(referenceInputFilesOpt: Option[Set[PipelinesApiInput]]): Unit = { + protected def sendIncrementMetricsForReferenceFiles(referenceInputFilesOpt: Option[Set[PipelinesApiInput]]): Unit = referenceInputFilesOpt match { case Some(referenceInputFiles) => referenceInputFiles.foreach { referenceInputFile => increment(NonEmptyList.of("referencefiles", referenceInputFile.relativeHostPath.pathAsString)) } case _ => - // do nothing - reference disks feature is either not configured in Cromwell or disabled in workflow options + // do nothing - reference disks feature is either not configured in Cromwell or disabled in workflow options } - } protected def sendIncrementMetricsForDockerImageCache(dockerImageCacheDiskOpt: Option[String], dockerImageAsSpecifiedByUser: String, - isDockerImageCacheUsageRequested: Boolean): Unit = { + isDockerImageCacheUsageRequested: Boolean + ): Unit = (isDockerImageCacheUsageRequested, dockerImageCacheDiskOpt) match { - case (true, None) => increment(NonEmptyList("docker", List("image", "cache", "image_not_in_cache", dockerImageAsSpecifiedByUser))) - case (true, Some(_)) => increment(NonEmptyList("docker", List("image", "cache", "used_image_from_cache", dockerImageAsSpecifiedByUser))) - case (false, Some(_)) => increment(NonEmptyList("docker", List("image", "cache", "cached_image_not_used", dockerImageAsSpecifiedByUser))) + case (true, None) => + increment(NonEmptyList("docker", List("image", "cache", "image_not_in_cache", dockerImageAsSpecifiedByUser))) + case (true, Some(_)) => + increment(NonEmptyList("docker", List("image", "cache", "used_image_from_cache", dockerImageAsSpecifiedByUser))) + case (false, Some(_)) => + increment(NonEmptyList("docker", List("image", "cache", "cached_image_not_used", dockerImageAsSpecifiedByUser))) case _ => // docker image cache not requested and image is not in cache anyway - do nothing } - } - override def pollStatusAsync(handle: JesPendingExecutionHandle): Future[RunStatus] = { + override def pollStatusAsync(handle: JesPendingExecutionHandle): Future[RunStatus] = super[PipelinesApiStatusRequestClient].pollStatus(workflowId, handle.pendingJob) - } override def customPollStatusFailure: PartialFunction[(ExecutionHandle, Exception), ExecutionHandle] = { - case (_: JesPendingExecutionHandle@unchecked, JobAbortedException) => + case (_: JesPendingExecutionHandle @unchecked, JobAbortedException) => AbortedExecutionHandle - case (oldHandle: JesPendingExecutionHandle@unchecked, e: GoogleJsonResponseException) if e.getStatusCode == 404 => + case (oldHandle: JesPendingExecutionHandle @unchecked, e: GoogleJsonResponseException) if e.getStatusCode == 404 => jobLogger.error(s"JES Job ID ${oldHandle.runInfo.get.job} has not been found, failing call") FailedNonRetryableExecutionHandle(e, kvPairsToSave = None) } - override lazy val startMetadataKeyValues: Map[String, Any] = super[PipelinesApiJobCachingActorHelper].startMetadataKeyValues + override lazy val startMetadataKeyValues: Map[String, Any] = + super[PipelinesApiJobCachingActorHelper].startMetadataKeyValues - override def getTerminalMetadata(runStatus: RunStatus): Map[String, Any] = { + override def getTerminalMetadata(runStatus: RunStatus): Map[String, Any] = runStatus match { case terminalRunStatus: TerminalRunStatus => Map( @@ -718,66 +826,61 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta ) case unknown => throw new RuntimeException(s"Attempt to get terminal metadata from non terminal status: $unknown") } - } - override def mapOutputWomFile(womFile: WomFile): WomFile = { + override def mapOutputWomFile(womFile: WomFile): WomFile = womFileToGcsPath(generateOutputs(jobDescriptor))(womFile) - } - protected [pipelines] def womFileToGcsPath(jesOutputs: Set[PipelinesApiOutput])(womFile: WomFile): WomFile = { + protected[pipelines] def womFileToGcsPath(jesOutputs: Set[PipelinesApiOutput])(womFile: WomFile): WomFile = womFile mapFile { path => jesOutputs collectFirst { case jesOutput if jesOutput.name == makeSafeReferenceName(path) => jesOutput.cloudPath.pathAsString } getOrElse path } - } - override def isDone(runStatus: RunStatus): Boolean = { + override def isDone(runStatus: RunStatus): Boolean = runStatus match { case _: RunStatus.Success => true case _: RunStatus.UnsuccessfulRunStatus => false - case _ => throw new RuntimeException(s"Cromwell programmer blunder: isSuccess was called on an incomplete RunStatus ($runStatus).") + case _ => + throw new RuntimeException( + s"Cromwell programmer blunder: isSuccess was called on an incomplete RunStatus ($runStatus)." + ) } - } - override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = { + override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = runStatus match { case successStatus: RunStatus.Success => successStatus.eventList case unknown => throw new RuntimeException(s"handleExecutionSuccess not called with RunStatus.Success. Instead got $unknown") } - } - override def retryEvaluateOutputs(exception: Exception): Boolean = { + override def retryEvaluateOutputs(exception: Exception): Boolean = exception match { case aggregated: CromwellAggregatedException => aggregated.throwables.collectFirst { case s: SocketTimeoutException => s }.isDefined case _ => false } - } private lazy val standardPaths = jobPaths.standardPaths - override def handleExecutionFailure(runStatus: RunStatus, - returnCode: Option[Int]): Future[ExecutionHandle] = { + override def handleExecutionFailure(runStatus: RunStatus, returnCode: Option[Int]): Future[ExecutionHandle] = { - def generateBetterErrorMsg(runStatus: RunStatus.UnsuccessfulRunStatus, errorMsg: String): String = { - if (runStatus.errorCode.getCode.value == PapiFailedPreConditionErrorCode - && errorMsg.contains("Execution failed") - && (errorMsg.contains("Localization") || errorMsg.contains("Delocalization"))) { + def generateBetterErrorMsg(runStatus: RunStatus.UnsuccessfulRunStatus, errorMsg: String): String = + if ( + runStatus.errorCode.getCode.value == PapiFailedPreConditionErrorCode + && errorMsg.contains("Execution failed") + && (errorMsg.contains("Localization") || errorMsg.contains("Delocalization")) + ) { s"Please check the log file for more details: $jesLogPath." } - //If error code 10, add some extra messaging to the server logging + // If error code 10, add some extra messaging to the server logging else if (runStatus.errorCode.getCode.value == PapiMysteriouslyCrashedErrorCode) { jobLogger.info(s"Job Failed with Error Code 10 for a machine where Preemptible is set to $preemptible") errorMsg - } - else errorMsg - } + } else errorMsg // Inner function: Handles a 'Failed' runStatus (or Preempted if preemptible was false) - def handleFailedRunStatus(runStatus: RunStatus.UnsuccessfulRunStatus, - returnCode: Option[Int]): ExecutionHandle = { + def handleFailedRunStatus(runStatus: RunStatus.UnsuccessfulRunStatus, returnCode: Option[Int]): ExecutionHandle = { lazy val prettyError = runStatus.prettyPrintedError @@ -793,10 +896,12 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta handleUnexpectedTermination(runStatus.errorCode, prettyError, returnCode) case _ if isDockerPullFailure => val unable = s"Unable to pull Docker image '$jobDockerImage' " - val details = if (hasDockerCredentials) - "but Docker credentials are present; is this Docker account authorized to pull the image? " else - "and there are effectively no Docker credentials present (one or more of token, authorization, or Google KMS key may be missing). " + - "Please check your private Docker configuration and/or the pull access for this image. " + val details = + if (hasDockerCredentials) + "but Docker credentials are present; is this Docker account authorized to pull the image? " + else + "and there are effectively no Docker credentials present (one or more of token, authorization, or Google KMS key may be missing). " + + "Please check your private Docker configuration and/or the pull access for this image. " val message = unable + details + prettyError FailedNonRetryableExecutionHandle( StandardException(runStatus.errorCode, message, jobTag, returnCode, standardPaths.error), @@ -819,34 +924,43 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case preemptedStatus: RunStatus.Preempted if preemptible => handlePreemption(preemptedStatus, returnCode) case _: RunStatus.Cancelled => AbortedExecutionHandle case failedStatus: RunStatus.UnsuccessfulRunStatus => handleFailedRunStatus(failedStatus, returnCode) - case unknown => throw new RuntimeException(s"handleExecutionFailure not called with RunStatus.Failed or RunStatus.Preempted. Instead got $unknown") + case unknown => + throw new RuntimeException( + s"handleExecutionFailure not called with RunStatus.Failed or RunStatus.Preempted. Instead got $unknown" + ) } } } } - private def nextAttemptPreemptedAndUnexpectedRetryCountsToKvPairs(p: Int, ur: Int): Seq[KvPair] = { + private def nextAttemptPreemptedAndUnexpectedRetryCountsToKvPairs(p: Int, ur: Int): Seq[KvPair] = Seq( - KvPair(ScopedKey(workflowId, futureKvJobKey, PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey), ur.toString), - KvPair(ScopedKey(workflowId, futureKvJobKey, PipelinesApiBackendLifecycleActorFactory.preemptionCountKey), p.toString) + KvPair(ScopedKey(workflowId, futureKvJobKey, PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey), + ur.toString + ), + KvPair(ScopedKey(workflowId, futureKvJobKey, PipelinesApiBackendLifecycleActorFactory.preemptionCountKey), + p.toString + ) ) - } - private def handleUnexpectedTermination(errorCode: Status, errorMessage: String, jobReturnCode: Option[Int]): ExecutionHandle = { + private def handleUnexpectedTermination(errorCode: Status, + errorMessage: String, + jobReturnCode: Option[Int] + ): ExecutionHandle = { val msg = s"Retrying. $errorMessage" previousRetryReasons match { case Valid(PreviousRetryReasons(p, ur)) => val thisUnexpectedRetry = ur + 1 if (thisUnexpectedRetry <= maxUnexpectedRetries) { - val preemptionAndUnexpectedRetryCountsKvPairs = nextAttemptPreemptedAndUnexpectedRetryCountsToKvPairs(p, thisUnexpectedRetry) + val preemptionAndUnexpectedRetryCountsKvPairs = + nextAttemptPreemptedAndUnexpectedRetryCountsToKvPairs(p, thisUnexpectedRetry) // Increment unexpected retry count and preemption count stays the same FailedRetryableExecutionHandle( StandardException(errorCode, msg, jobTag, jobReturnCode, standardPaths.error), jobReturnCode, kvPairsToSave = Option(preemptionAndUnexpectedRetryCountsKvPairs) ) - } - else { + } else { FailedNonRetryableExecutionHandle( StandardException(errorCode, errorMessage, jobTag, jobReturnCode, standardPaths.error), jobReturnCode, @@ -873,7 +987,8 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta val taskName = s"${workflowDescriptor.id}:${call.localName}" val baseMsg = s"Task $taskName was preempted for the ${thisPreemption.toOrdinal} time." - val preemptionAndUnexpectedRetryCountsKvPairs = nextAttemptPreemptedAndUnexpectedRetryCountsToKvPairs(thisPreemption, ur) + val preemptionAndUnexpectedRetryCountsKvPairs = + nextAttemptPreemptedAndUnexpectedRetryCountsToKvPairs(thisPreemption, ur) if (thisPreemption < maxPreemption) { // Increment preemption count and unexpectedRetryCount stays the same val msg = @@ -884,12 +999,14 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta jobReturnCode, kvPairsToSave = Option(preemptionAndUnexpectedRetryCountsKvPairs) ) - } - else { + } else { val msg = s"$baseMsg The maximum number of preemptible attempts ($maxPreemption) has been reached. The " + s"call will be restarted with a non-preemptible VM. Error code $errorCode.$prettyPrintedError)" - FailedRetryableExecutionHandle(StandardException( - errorCode, msg, jobTag, jobReturnCode, standardPaths.error), jobReturnCode, kvPairsToSave = Option(preemptionAndUnexpectedRetryCountsKvPairs)) + FailedRetryableExecutionHandle( + StandardException(errorCode, msg, jobTag, jobReturnCode, standardPaths.error), + jobReturnCode, + kvPairsToSave = Option(preemptionAndUnexpectedRetryCountsKvPairs) + ) } case Invalid(_) => FailedNonRetryableExecutionHandle( @@ -916,7 +1033,7 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta canSkipLocalize } - override def cloudResolveWomFile(womFile: WomFile): WomFile = { + override def cloudResolveWomFile(womFile: WomFile): WomFile = womFile.mapFile { value => getPath(value) match { case Success(drsPath: DrsPath) => DrsResolver.getSimpleGsUri(drsPath).unsafeRunSync().getOrElse(value) @@ -924,16 +1041,15 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case _ => value } } - } - override def mapCommandLineWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineWomFile(womFile: WomFile): WomFile = womFile.mapFile { value => (getPath(value), asAdHocFile(womFile)) match { case (Success(gcsPath: GcsPath), Some(adHocFile)) => // Ad hoc files will be placed directly at the root ("/cromwell_root/ad_hoc_file.txt") unlike other input files // for which the full path is being propagated ("/cromwell_root/path/to/input_file.txt") workingDisk.mountPoint.resolve(adHocFile.alternativeName.getOrElse(gcsPath.name)).pathAsString - case (Success(path @ ( _: GcsPath | _: HttpPath)), _) => + case (Success(path @ (_: GcsPath | _: HttpPath)), _) => workingDisk.mountPoint.resolve(path.pathWithoutScheme).pathAsString case (Success(drsPath: DrsPath), _) => val filePath = DrsResolver.getContainerRelativePath(drsPath).unsafeRunSync() @@ -943,9 +1059,8 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case _ => value } } - } - override def mapCommandLineJobInputWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineJobInputWomFile(womFile: WomFile): WomFile = womFile.mapFile(value => getPath(value) match { case Success(gcsPath: GcsPath) => workingDisk.mountPoint.resolve(gcsPath.pathWithoutScheme).pathAsString @@ -955,8 +1070,8 @@ class PipelinesApiAsyncBackendJobExecutionActor(override val standardParams: Sta case _ => value } ) - } // No need for Cromwell-performed localization in the PAPI backend, ad hoc values are localized directly from GCS to the VM by PAPI. - override lazy val localizeAdHocValues: List[AdHocValue] => ErrorOr[List[StandardAdHocValue]] = _.map(Coproduct[StandardAdHocValue](_)).validNel + override lazy val localizeAdHocValues: List[AdHocValue] => ErrorOr[List[StandardAdHocValue]] = + _.map(Coproduct[StandardAdHocValue](_)).validNel } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendInitializationData.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendInitializationData.scala index d8634fcf016..22a298a9738 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendInitializationData.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendInitializationData.scala @@ -4,8 +4,7 @@ import com.google.auth.Credentials import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory import cromwell.backend.standard.{StandardInitializationData, StandardValidatedRuntimeAttributesBuilder} -case class PipelinesApiBackendInitializationData -( +case class PipelinesApiBackendInitializationData( override val workflowPaths: PipelinesApiWorkflowPaths, override val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, papiConfiguration: PipelinesApiConfiguration, diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactory.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactory.scala index 91061161ac9..e993985f985 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactory.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactory.scala @@ -4,9 +4,16 @@ import akka.actor.{ActorRef, Props} import com.google.api.client.util.ExponentialBackOff import com.typesafe.scalalogging.StrictLogging import cromwell.backend._ -import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.{preemptionCountKey, robustBuildAttributes, unexpectedRetryCountKey} +import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.{ + preemptionCountKey, + robustBuildAttributes, + unexpectedRetryCountKey +} import cromwell.backend.google.pipelines.common.authentication.PipelinesApiDockerCredentials -import cromwell.backend.google.pipelines.common.callcaching.{PipelinesApiBackendCacheHitCopyingActor, PipelinesApiBackendFileHashingActor} +import cromwell.backend.google.pipelines.common.callcaching.{ + PipelinesApiBackendCacheHitCopyingActor, + PipelinesApiBackendFileHashingActor +} import cromwell.backend.standard._ import cromwell.backend.standard.callcaching.{StandardCacheHitCopyingActor, StandardFileHashingActor} import cromwell.cloudsupport.gcp.GoogleConfiguration @@ -16,8 +23,10 @@ import wom.graph.CommandCallNode import scala.util.{Failure, Success, Try} -abstract class PipelinesApiBackendLifecycleActorFactory(override val name: String, override val configurationDescriptor: BackendConfigurationDescriptor) - extends StandardLifecycleActorFactory { +abstract class PipelinesApiBackendLifecycleActorFactory( + override val name: String, + override val configurationDescriptor: BackendConfigurationDescriptor +) extends StandardLifecycleActorFactory { // Abstract members protected def requiredBackendSingletonActor(serviceRegistryActor: ActorRef): Props @@ -34,7 +43,8 @@ abstract class PipelinesApiBackendLifecycleActorFactory(override val name: Strin robustBuildAttributes(defaultBuildAttributes) } - override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = classOf[PipelinesApiInitializationActor] + override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = + classOf[PipelinesApiInitializationActor] override lazy val asyncExecutionActorClass: Class[_ <: StandardAsyncExecutionActor] = classOf[PipelinesApiAsyncBackendJobExecutionActor] @@ -42,36 +52,61 @@ abstract class PipelinesApiBackendLifecycleActorFactory(override val name: Strin Option(classOf[PipelinesApiFinalizationActor]) override lazy val jobIdKey: String = PipelinesApiAsyncBackendJobExecutionActor.JesOperationIdKey - override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = Option(requiredBackendSingletonActor(serviceRegistryActor)) - - override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, restart: Boolean): StandardInitializationActorParams = { - PipelinesApiInitializationActorParams(workflowDescriptor, ioActor, calls, jesConfiguration, serviceRegistryActor, restart) - } - - override def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - jobExecutionMap: JobExecutionMap, workflowOutputs: CallOutputs, - initializationDataOption: Option[BackendInitializationData]): - StandardFinalizationActorParams = { + override def backendSingletonActorProps(serviceRegistryActor: ActorRef): Option[Props] = Option( + requiredBackendSingletonActor(serviceRegistryActor) + ) + + override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restart: Boolean + ): StandardInitializationActorParams = + PipelinesApiInitializationActorParams(workflowDescriptor, + ioActor, + calls, + jesConfiguration, + serviceRegistryActor, + restart + ) + + override def workflowFinalizationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + jobExecutionMap: JobExecutionMap, + workflowOutputs: CallOutputs, + initializationDataOption: Option[BackendInitializationData] + ): StandardFinalizationActorParams = // The `PipelinesApiInitializationActor` will only return a non-`Empty` `PipelinesApiBackendInitializationData` from a successful `beforeAll` // invocation. HOWEVER, the finalization actor is created regardless of whether workflow initialization was successful // or not. So the finalization actor must be able to handle an empty `PipelinesApiBackendInitializationData` option, and there is no // `.get` on the initialization data as there is with the execution or cache hit copying actor methods. - PipelinesApiFinalizationActorParams(workflowDescriptor, ioActor, calls, jesConfiguration, jobExecutionMap, workflowOutputs, - initializationDataOption) - } - - override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = { + PipelinesApiFinalizationActorParams(workflowDescriptor, + ioActor, + calls, + jesConfiguration, + jobExecutionMap, + workflowOutputs, + initializationDataOption + ) + + override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = Option(classOf[PipelinesApiBackendCacheHitCopyingActor]) - } - override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option(classOf[PipelinesApiBackendFileHashingActor]) + override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option( + classOf[PipelinesApiBackendFileHashingActor] + ) - override def dockerHashCredentials(workflowDescriptor: BackendWorkflowDescriptor, initializationData: Option[BackendInitializationData]): List[Any] = { + override def dockerHashCredentials(workflowDescriptor: BackendWorkflowDescriptor, + initializationData: Option[BackendInitializationData] + ): List[Any] = Try(BackendInitializationData.as[PipelinesApiBackendInitializationData](initializationData)) match { case Success(papiData) => - val tokenFromWorkflowOptions = workflowDescriptor.workflowOptions.get(GoogleAuthMode.DockerCredentialsTokenKey).toOption - val effectiveToken = tokenFromWorkflowOptions.orElse(papiData.papiConfiguration.dockerCredentials map { _.token }) + val tokenFromWorkflowOptions = + workflowDescriptor.workflowOptions.get(GoogleAuthMode.DockerCredentialsTokenKey).toOption + val effectiveToken = tokenFromWorkflowOptions.orElse(papiData.papiConfiguration.dockerCredentials map { + _.token + }) val dockerCredentials: Option[PipelinesApiDockerCredentials] = effectiveToken map { token => // These credentials are being returned for hashing and all that matters in this context is the token @@ -83,19 +118,19 @@ abstract class PipelinesApiBackendLifecycleActorFactory(override val name: Strin List(dockerCredentials, googleCredentials).flatten case _ => List.empty[Any] } - } } object PipelinesApiBackendLifecycleActorFactory extends StrictLogging { val preemptionCountKey = "PreemptionCount" val unexpectedRetryCountKey = "UnexpectedRetryCount" - private [common] def robustBuildAttributes(buildAttributes: () => PipelinesApiConfigurationAttributes, - maxAttempts: Int = 3, - initialIntervalMillis: Int = 5000, - maxIntervalMillis: Int = 10000, - multiplier: Double = 1.5, - randomizationFactor: Double = 0.5): PipelinesApiConfigurationAttributes = { + private[common] def robustBuildAttributes(buildAttributes: () => PipelinesApiConfigurationAttributes, + maxAttempts: Int = 3, + initialIntervalMillis: Int = 5000, + maxIntervalMillis: Int = 10000, + multiplier: Double = 1.5, + randomizationFactor: Double = 0.5 + ): PipelinesApiConfigurationAttributes = { val backoff = new ExponentialBackOff.Builder() .setInitialIntervalMillis(initialIntervalMillis) .setMaxIntervalMillis(maxIntervalMillis) @@ -104,18 +139,26 @@ object PipelinesApiBackendLifecycleActorFactory extends StrictLogging { .build() // `attempt` is 1-based - def build(attempt: Int): Try[PipelinesApiConfigurationAttributes] = { + def build(attempt: Int): Try[PipelinesApiConfigurationAttributes] = Try { buildAttributes() } recoverWith { // Try again if this was an Exception (as opposed to an Error) and we have not hit maxAttempts case ex: Exception if attempt < maxAttempts => - logger.warn(s"Failed to build PipelinesApiConfigurationAttributes on attempt $attempt of $maxAttempts, retrying.", ex) + logger.warn( + s"Failed to build PipelinesApiConfigurationAttributes on attempt $attempt of $maxAttempts, retrying.", + ex + ) Thread.sleep(backoff.nextBackOffMillis()) build(attempt + 1) - case e => Failure(new RuntimeException(s"Failed to build PipelinesApiConfigurationAttributes on attempt $attempt of $maxAttempts", e)) + case e => + Failure( + new RuntimeException( + s"Failed to build PipelinesApiConfigurationAttributes on attempt $attempt of $maxAttempts", + e + ) + ) } - } // This intentionally throws if the final result of `build` is a `Failure`. build(attempt = 1).get } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendSingletonActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendSingletonActor.scala index 468e5006cf7..2e5bcccb0a8 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendSingletonActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendSingletonActor.scala @@ -9,10 +9,17 @@ import cromwell.core.Mailbox import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric.Positive -final class PipelinesApiBackendSingletonActor(qps: Int Refined Positive, requestWorkers: Int Refined Positive, serviceRegistryActor: ActorRef) - (implicit batchHandler: PipelinesApiRequestHandler) extends Actor with ActorLogging { +final class PipelinesApiBackendSingletonActor(qps: Int Refined Positive, + requestWorkers: Int Refined Positive, + serviceRegistryActor: ActorRef +)(implicit batchHandler: PipelinesApiRequestHandler) + extends Actor + with ActorLogging { - val jesApiQueryManager = context.actorOf(PipelinesApiRequestManager.props(qps, requestWorkers, serviceRegistryActor).withMailbox(Mailbox.PriorityMailbox), "PAPIQueryManager") + val jesApiQueryManager = context.actorOf( + PipelinesApiRequestManager.props(qps, requestWorkers, serviceRegistryActor).withMailbox(Mailbox.PriorityMailbox), + "PAPIQueryManager" + ) override def receive = { case abort: BackendSingletonActorAbortWorkflow => jesApiQueryManager.forward(abort) @@ -23,6 +30,8 @@ final class PipelinesApiBackendSingletonActor(qps: Int Refined Positive, request } object PipelinesApiBackendSingletonActor { - def props[O](qps: Int Refined Positive, requestWorkers: Int Refined Positive, serviceRegistryActor: ActorRef) - (implicit batchHandler: PipelinesApiRequestHandler): Props = Props(new PipelinesApiBackendSingletonActor(qps, requestWorkers, serviceRegistryActor)).withDispatcher(BackendDispatcher) + def props[O](qps: Int Refined Positive, requestWorkers: Int Refined Positive, serviceRegistryActor: ActorRef)(implicit + batchHandler: PipelinesApiRequestHandler + ): Props = Props(new PipelinesApiBackendSingletonActor(qps, requestWorkers, serviceRegistryActor)) + .withDispatcher(BackendDispatcher) } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfiguration.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfiguration.scala index 2b380143441..f279728d102 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfiguration.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfiguration.scala @@ -14,23 +14,24 @@ import scala.concurrent.duration.FiniteDuration class PipelinesApiConfiguration(val configurationDescriptor: BackendConfigurationDescriptor, val genomicsFactory: PipelinesApiFactoryInterface, val googleConfig: GoogleConfiguration, - val papiAttributes: PipelinesApiConfigurationAttributes) extends DefaultJsonProtocol { + val papiAttributes: PipelinesApiConfigurationAttributes +) extends DefaultJsonProtocol { val jesAuths: PipelinesApiAuths = papiAttributes.auths val root: String = configurationDescriptor.backendConfig.getString("root") val pipelineTimeout: FiniteDuration = papiAttributes.pipelineTimeout val runtimeConfig: Option[Config] = configurationDescriptor.backendRuntimeAttributesConfig - val dockerCredentials: Option[PipelinesApiDockerCredentials] = { + val dockerCredentials: Option[PipelinesApiDockerCredentials] = BackendDockerConfiguration.build(configurationDescriptor.backendConfig).dockerCredentials map { creds => PipelinesApiDockerCredentials.apply(creds, googleConfig) } - } val dockerEncryptionKeyName: Option[String] = dockerCredentials flatMap { _.keyName } val dockerEncryptionAuthName: Option[String] = dockerCredentials flatMap { _.authName } val dockerToken: Option[String] = dockerCredentials map { _.token } - val jobShell: String = configurationDescriptor.backendConfig.as[Option[String]]("job-shell").getOrElse( - configurationDescriptor.globalConfig.getOrElse("system.job-shell", "/bin/bash")) + val jobShell: String = configurationDescriptor.backendConfig + .as[Option[String]]("job-shell") + .getOrElse(configurationDescriptor.globalConfig.getOrElse("system.job-shell", "/bin/bash")) } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala index adfaa54c1d5..9f279c4350c 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala @@ -9,9 +9,17 @@ import common.exception.MessageAggregation import common.validation.ErrorOr._ import common.validation.Validation._ import cromwell.backend.CommonBackendConfigurationAttributes -import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes.{BatchRequestTimeoutConfiguration, GcsTransferConfiguration, VirtualPrivateCloudConfiguration} +import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes.{ + BatchRequestTimeoutConfiguration, + GcsTransferConfiguration, + VirtualPrivateCloudConfiguration +} import cromwell.backend.google.pipelines.common.authentication.PipelinesApiAuths -import cromwell.backend.google.pipelines.common.callcaching.{CopyCachedOutputs, PipelinesCacheHitDuplicationStrategy, UseOriginalCachedOutputs} +import cromwell.backend.google.pipelines.common.callcaching.{ + CopyCachedOutputs, + PipelinesCacheHitDuplicationStrategy, + UseOriginalCachedOutputs +} import cromwell.backend.google.pipelines.common.io.PipelinesApiReferenceFilesDisk import cromwell.cloudsupport.gcp.GoogleConfiguration import cromwell.cloudsupport.gcp.auth.GoogleAuthMode @@ -29,30 +37,31 @@ import scala.concurrent.duration._ import scala.util.matching.Regex import scala.util.{Failure, Success, Try} -case class PipelinesApiConfigurationAttributes(project: String, - computeServiceAccount: String, - auths: PipelinesApiAuths, - restrictMetadataAccess: Boolean, - enableFuse: Boolean, - executionBucket: String, - endpointUrl: URL, - location: String, - maxPollingInterval: Int, - qps: Int Refined Positive, - cacheHitDuplicationStrategy: PipelinesCacheHitDuplicationStrategy, - requestWorkers: Int Refined Positive, - pipelineTimeout: FiniteDuration, - logFlushPeriod: Option[FiniteDuration], - gcsTransferConfiguration: GcsTransferConfiguration, - virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, - batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, - referenceFileToDiskImageMappingOpt: Option[Map[String, PipelinesApiReferenceFilesDisk]], - dockerImageToCacheDiskImageMappingOpt: Option[Map[String, DockerImageCacheEntry]], - checkpointingInterval: FiniteDuration - ) +case class PipelinesApiConfigurationAttributes( + project: String, + computeServiceAccount: String, + auths: PipelinesApiAuths, + restrictMetadataAccess: Boolean, + enableFuse: Boolean, + executionBucket: String, + endpointUrl: URL, + location: String, + maxPollingInterval: Int, + qps: Int Refined Positive, + cacheHitDuplicationStrategy: PipelinesCacheHitDuplicationStrategy, + requestWorkers: Int Refined Positive, + pipelineTimeout: FiniteDuration, + logFlushPeriod: Option[FiniteDuration], + gcsTransferConfiguration: GcsTransferConfiguration, + virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, + referenceFileToDiskImageMappingOpt: Option[Map[String, PipelinesApiReferenceFilesDisk]], + dockerImageToCacheDiskImageMappingOpt: Option[Map[String, DockerImageCacheEntry]], + checkpointingInterval: FiniteDuration +) object PipelinesApiConfigurationAttributes - extends PipelinesApiDockerCacheMappingOperations + extends PipelinesApiDockerCacheMappingOperations with PipelinesApiReferenceFilesMappingOperations with StrictLogging { @@ -64,10 +73,11 @@ object PipelinesApiConfigurationAttributes final case class VirtualPrivateCloudLabels(network: String, subnetwork: Option[String], auth: GoogleAuthMode) final case class VirtualPrivateCloudLiterals(network: String, subnetwork: Option[String]) final case class VirtualPrivateCloudConfiguration(labelsOption: Option[VirtualPrivateCloudLabels], - literalsOption: Option[VirtualPrivateCloudLiterals], - ) - final case class BatchRequestTimeoutConfiguration(readTimeoutMillis: Option[Int Refined Positive], connectTimeoutMillis: Option[Int Refined Positive]) - + literalsOption: Option[VirtualPrivateCloudLiterals] + ) + final case class BatchRequestTimeoutConfiguration(readTimeoutMillis: Option[Int Refined Positive], + connectTimeoutMillis: Option[Int Refined Positive] + ) lazy val Logger: Logger = LoggerFactory.getLogger("PipelinesApiConfiguration") @@ -119,43 +129,47 @@ object PipelinesApiConfigurationAttributes "genomics.default-zones" -> "default-runtime-attributes.zones" ) - def apply(googleConfig: GoogleConfiguration, backendConfig: Config, backendName: String): PipelinesApiConfigurationAttributes = { + def apply(googleConfig: GoogleConfiguration, + backendConfig: Config, + backendName: String + ): PipelinesApiConfigurationAttributes = { - def vpcErrorMessage(missingKeys: List[String]) = s"Virtual Private Cloud configuration is invalid. Missing keys: `${missingKeys.mkString(",")}`.".invalidNel + def vpcErrorMessage(missingKeys: List[String]) = + s"Virtual Private Cloud configuration is invalid. Missing keys: `${missingKeys.mkString(",")}`.".invalidNel def validateVPCLabelsConfig(networkOption: Option[String], subnetworkOption: Option[String], - authOption: Option[String], - ): ErrorOr[Option[VirtualPrivateCloudLabels]] = { + authOption: Option[String] + ): ErrorOr[Option[VirtualPrivateCloudLabels]] = (networkOption, subnetworkOption, authOption) match { - case (Some(network), _, Some(auth)) => googleConfig.auth(auth) match { - case Valid(validAuth) => - Option(VirtualPrivateCloudLabels(network, subnetworkOption, validAuth)).validNel - case Invalid(error) => s"Auth $auth is not valid for Virtual Private Cloud configuration. Reason: $error" .invalidNel - } + case (Some(network), _, Some(auth)) => + googleConfig.auth(auth) match { + case Valid(validAuth) => + Option(VirtualPrivateCloudLabels(network, subnetworkOption, validAuth)).validNel + case Invalid(error) => + s"Auth $auth is not valid for Virtual Private Cloud configuration. Reason: $error".invalidNel + } case (Some(_), _, None) => vpcErrorMessage(List("auth")) case (None, _, Some(_)) => vpcErrorMessage(List("network-label-key")) case (None, Some(_), None) => vpcErrorMessage(List("network-label-key", "auth")) case (None, None, None) => None.validNel } - } def validateVPCLiteralsConfig(networkNameOption: Option[String], - subnetworkNameOption: Option[String], - ): ErrorOr[Option[VirtualPrivateCloudLiterals]] = { + subnetworkNameOption: Option[String] + ): ErrorOr[Option[VirtualPrivateCloudLiterals]] = (networkNameOption, subnetworkNameOption) match { case (None, Some(_)) => vpcErrorMessage(List("network-name")) case (Some(networkName), _) => Option(VirtualPrivateCloudLiterals(networkName, subnetworkNameOption)).valid case (None, None) => None.valid } - } def validateVPCConfig(networkNameOption: Option[String], subnetworkNameOption: Option[String], networkLabelOption: Option[String], subnetworkLabelOption: Option[String], - authOption: Option[String], - ): ErrorOr[VirtualPrivateCloudConfiguration] = { + authOption: Option[String] + ): ErrorOr[VirtualPrivateCloudConfiguration] = { val vpcLabelsValidation = validateVPCLabelsConfig(networkLabelOption, subnetworkLabelOption, authOption) val vpcLiteralsValidation = @@ -163,33 +177,49 @@ object PipelinesApiConfigurationAttributes (vpcLabelsValidation, vpcLiteralsValidation) mapN VirtualPrivateCloudConfiguration } - val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => entry.getKey } + val configKeys = backendConfig.entrySet().asScala.toSet map { entry: java.util.Map.Entry[String, ConfigValue] => + entry.getKey + } warnNotRecognized(configKeys, papiKeys, backendName, Logger) def warnDeprecated(keys: Set[String], deprecated: Map[String, String], context: String, logger: Logger): Unit = { val deprecatedKeys = keys.intersect(deprecated.keySet) - deprecatedKeys foreach { key => logger.warn(s"Found deprecated configuration key $key, replaced with ${deprecated.get(key)}") } + deprecatedKeys foreach { key => + logger.warn(s"Found deprecated configuration key $key, replaced with ${deprecated.get(key)}") + } } warnDeprecated(configKeys, deprecatedJesKeys, backendName, Logger) - val project: ErrorOr[String] = validate { backendConfig.as[String]("project") } - val executionBucket: ErrorOr[String] = validate { backendConfig.as[String]("root") } - val endpointUrl: ErrorOr[URL] = validate { backendConfig.as[URL]("genomics.endpoint-url") } - val location: ErrorOr[String] = validateGenomicsLocation(endpointUrl, backendConfig.as[Option[String]]("genomics.location")) + val project: ErrorOr[String] = validate(backendConfig.as[String]("project")) + val executionBucket: ErrorOr[String] = validate(backendConfig.as[String]("root")) + val endpointUrl: ErrorOr[URL] = validate(backendConfig.as[URL]("genomics.endpoint-url")) + val location: ErrorOr[String] = + validateGenomicsLocation(endpointUrl, backendConfig.as[Option[String]]("genomics.location")) val maxPollingInterval: Int = backendConfig.as[Option[Int]]("maximum-polling-interval").getOrElse(600) - val computeServiceAccount: String = backendConfig.as[Option[String]]("genomics.compute-service-account").getOrElse("default") - val genomicsAuthName: ErrorOr[String] = validate { backendConfig.as[String]("genomics.auth") } - val genomicsRestrictMetadataAccess: ErrorOr[Boolean] = validate { backendConfig.as[Option[Boolean]]("genomics.restrict-metadata-access").getOrElse(false) } - val genomicsEnableFuse: ErrorOr[Boolean] = validate { backendConfig.as[Option[Boolean]]("genomics.enable-fuse").getOrElse(false) } - val gcsFilesystemAuthName: ErrorOr[String] = validate { backendConfig.as[String]("filesystems.gcs.auth") } + val computeServiceAccount: String = + backendConfig.as[Option[String]]("genomics.compute-service-account").getOrElse("default") + val genomicsAuthName: ErrorOr[String] = validate(backendConfig.as[String]("genomics.auth")) + val genomicsRestrictMetadataAccess: ErrorOr[Boolean] = validate { + backendConfig.as[Option[Boolean]]("genomics.restrict-metadata-access").getOrElse(false) + } + val genomicsEnableFuse: ErrorOr[Boolean] = validate { + backendConfig.as[Option[Boolean]]("genomics.enable-fuse").getOrElse(false) + } + val gcsFilesystemAuthName: ErrorOr[String] = validate(backendConfig.as[String]("filesystems.gcs.auth")) val qpsValidation = validateQps(backendConfig) - val duplicationStrategy = validate { backendConfig.as[Option[String]]("filesystems.gcs.caching.duplication-strategy").getOrElse("copy") match { - case "copy" => CopyCachedOutputs - case "reference" => UseOriginalCachedOutputs - case other => throw new IllegalArgumentException(s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details.") - } } - val requestWorkers: ErrorOr[Int Refined Positive] = validatePositiveInt(backendConfig.as[Option[Int]]("request-workers").getOrElse(3), "request-workers") + val duplicationStrategy = validate { + backendConfig.as[Option[String]]("filesystems.gcs.caching.duplication-strategy").getOrElse("copy") match { + case "copy" => CopyCachedOutputs + case "reference" => UseOriginalCachedOutputs + case other => + throw new IllegalArgumentException( + s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details." + ) + } + } + val requestWorkers: ErrorOr[Int Refined Positive] = + validatePositiveInt(backendConfig.as[Option[Int]]("request-workers").getOrElse(3), "request-workers") val pipelineTimeout: FiniteDuration = backendConfig.getOrElse("pipeline-timeout", 7.days) @@ -201,9 +231,11 @@ object PipelinesApiConfigurationAttributes case None => Option(1.minute) } - val parallelCompositeUploadThreshold = validateGsutilMemorySpecification(backendConfig, "genomics.parallel-composite-upload-threshold") + val parallelCompositeUploadThreshold = + validateGsutilMemorySpecification(backendConfig, "genomics.parallel-composite-upload-threshold") - val localizationAttempts: ErrorOr[Int Refined Positive] = backendConfig.as[Option[Int]]("genomics.localization-attempts") + val localizationAttempts: ErrorOr[Int Refined Positive] = backendConfig + .as[Option[Int]]("genomics.localization-attempts") .map(attempts => validatePositiveInt(attempts, "genomics.localization-attempts")) .getOrElse(DefaultGcsTransferAttempts.validNel) @@ -216,91 +248,101 @@ object PipelinesApiConfigurationAttributes val vpcSubnetworkName: ErrorOr[Option[String]] = validate { backendConfig.getAs[String]("virtual-private-cloud.subnetwork-name") } - val vpcNetworkLabel: ErrorOr[Option[String]] = validate { backendConfig.getAs[String]("virtual-private-cloud.network-label-key") } - val vpcSubnetworkLabel: ErrorOr[Option[String]] = validate { backendConfig.getAs[String]("virtual-private-cloud.subnetwork-label-key") } - val vpcAuth: ErrorOr[Option[String]] = validate { backendConfig.getAs[String]("virtual-private-cloud.auth")} + val vpcNetworkLabel: ErrorOr[Option[String]] = validate { + backendConfig.getAs[String]("virtual-private-cloud.network-label-key") + } + val vpcSubnetworkLabel: ErrorOr[Option[String]] = validate { + backendConfig.getAs[String]("virtual-private-cloud.subnetwork-label-key") + } + val vpcAuth: ErrorOr[Option[String]] = validate(backendConfig.getAs[String]("virtual-private-cloud.auth")) - val virtualPrivateCloudConfiguration: ErrorOr[VirtualPrivateCloudConfiguration] = { + val virtualPrivateCloudConfiguration: ErrorOr[VirtualPrivateCloudConfiguration] = (vpcNetworkName, vpcSubnetworkName, vpcNetworkLabel, vpcSubnetworkLabel, vpcAuth) flatMapN validateVPCConfig - } - val batchRequestsReadTimeout = readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.read") - val batchRequestsConnectTimeout = readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.connect") + val batchRequestsReadTimeout = + readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.read") + val batchRequestsConnectTimeout = + readOptionalPositiveMillisecondsIntFromDuration(backendConfig, "batch-requests.timeouts.connect") - val batchRequestTimeoutConfigurationValidation = (batchRequestsReadTimeout, batchRequestsConnectTimeout) mapN { (read, connect) => - BatchRequestTimeoutConfiguration(readTimeoutMillis = read, connectTimeoutMillis = connect) + val batchRequestTimeoutConfigurationValidation = (batchRequestsReadTimeout, batchRequestsConnectTimeout) mapN { + (read, connect) => + BatchRequestTimeoutConfiguration(readTimeoutMillis = read, connectTimeoutMillis = connect) } - val referenceDiskLocalizationManifestFiles: ErrorOr[Option[List[ManifestFile]]] = validateReferenceDiskManifestConfigs(backendConfig, backendName) + val referenceDiskLocalizationManifestFiles: ErrorOr[Option[List[ManifestFile]]] = + validateReferenceDiskManifestConfigs(backendConfig, backendName) - val dockerImageCacheManifestFile: ErrorOr[Option[ValidFullGcsPath]] = validateGcsPathToDockerImageCacheManifestFile(backendConfig) + val dockerImageCacheManifestFile: ErrorOr[Option[ValidFullGcsPath]] = validateGcsPathToDockerImageCacheManifestFile( + backendConfig + ) val checkpointingInterval: FiniteDuration = backendConfig.getOrElse(checkpointingIntervalKey, 10.minutes) - def authGoogleConfigForPapiConfigurationAttributes(project: String, - bucket: String, - endpointUrl: URL, - genomicsName: String, - location: String, - restrictMetadata: Boolean, - enableFuse: Boolean, - gcsName: String, - qps: Int Refined Positive, - cacheHitDuplicationStrategy: PipelinesCacheHitDuplicationStrategy, - requestWorkers: Int Refined Positive, - gcsTransferConfiguration: GcsTransferConfiguration, - virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, - batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, - referenceDiskLocalizationManifestFilesOpt: Option[List[ManifestFile]], - dockerImageCacheManifestFileOpt: Option[ValidFullGcsPath]): ErrorOr[PipelinesApiConfigurationAttributes] = - (googleConfig.auth(genomicsName), googleConfig.auth(gcsName)) mapN { - (genomicsAuth, gcsAuth) => - val generatedReferenceFilesMappingOpt = referenceDiskLocalizationManifestFilesOpt map { - generateReferenceFilesMapping(genomicsAuth, _) - } - val dockerImageToCacheDiskImageMappingOpt = dockerImageCacheManifestFileOpt map { - generateDockerImageToDiskImageMapping(genomicsAuth, _) - } - PipelinesApiConfigurationAttributes( - project = project, - computeServiceAccount = computeServiceAccount, - auths = PipelinesApiAuths(genomicsAuth, gcsAuth), - restrictMetadataAccess = restrictMetadata, - enableFuse = enableFuse, - executionBucket = bucket, - endpointUrl = endpointUrl, - location = location, - maxPollingInterval = maxPollingInterval, - qps = qps, - cacheHitDuplicationStrategy = cacheHitDuplicationStrategy, - requestWorkers = requestWorkers, - pipelineTimeout = pipelineTimeout, - logFlushPeriod = logFlushPeriod, - gcsTransferConfiguration = gcsTransferConfiguration, - virtualPrivateCloudConfiguration = virtualPrivateCloudConfiguration, - batchRequestTimeoutConfiguration = batchRequestTimeoutConfiguration, - referenceFileToDiskImageMappingOpt = generatedReferenceFilesMappingOpt, - dockerImageToCacheDiskImageMappingOpt = dockerImageToCacheDiskImageMappingOpt, - checkpointingInterval = checkpointingInterval - ) - } + def authGoogleConfigForPapiConfigurationAttributes( + project: String, + bucket: String, + endpointUrl: URL, + genomicsName: String, + location: String, + restrictMetadata: Boolean, + enableFuse: Boolean, + gcsName: String, + qps: Int Refined Positive, + cacheHitDuplicationStrategy: PipelinesCacheHitDuplicationStrategy, + requestWorkers: Int Refined Positive, + gcsTransferConfiguration: GcsTransferConfiguration, + virtualPrivateCloudConfiguration: VirtualPrivateCloudConfiguration, + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration, + referenceDiskLocalizationManifestFilesOpt: Option[List[ManifestFile]], + dockerImageCacheManifestFileOpt: Option[ValidFullGcsPath] + ): ErrorOr[PipelinesApiConfigurationAttributes] = + (googleConfig.auth(genomicsName), googleConfig.auth(gcsName)) mapN { (genomicsAuth, gcsAuth) => + val generatedReferenceFilesMappingOpt = referenceDiskLocalizationManifestFilesOpt map { + generateReferenceFilesMapping(genomicsAuth, _) + } + val dockerImageToCacheDiskImageMappingOpt = dockerImageCacheManifestFileOpt map { + generateDockerImageToDiskImageMapping(genomicsAuth, _) + } + PipelinesApiConfigurationAttributes( + project = project, + computeServiceAccount = computeServiceAccount, + auths = PipelinesApiAuths(genomicsAuth, gcsAuth), + restrictMetadataAccess = restrictMetadata, + enableFuse = enableFuse, + executionBucket = bucket, + endpointUrl = endpointUrl, + location = location, + maxPollingInterval = maxPollingInterval, + qps = qps, + cacheHitDuplicationStrategy = cacheHitDuplicationStrategy, + requestWorkers = requestWorkers, + pipelineTimeout = pipelineTimeout, + logFlushPeriod = logFlushPeriod, + gcsTransferConfiguration = gcsTransferConfiguration, + virtualPrivateCloudConfiguration = virtualPrivateCloudConfiguration, + batchRequestTimeoutConfiguration = batchRequestTimeoutConfiguration, + referenceFileToDiskImageMappingOpt = generatedReferenceFilesMappingOpt, + dockerImageToCacheDiskImageMappingOpt = dockerImageToCacheDiskImageMappingOpt, + checkpointingInterval = checkpointingInterval + ) + } (project, - executionBucket, - endpointUrl, - genomicsAuthName, - location, - genomicsRestrictMetadataAccess, - genomicsEnableFuse, - gcsFilesystemAuthName, - qpsValidation, - duplicationStrategy, - requestWorkers, - gcsTransferConfiguration, - virtualPrivateCloudConfiguration, - batchRequestTimeoutConfigurationValidation, - referenceDiskLocalizationManifestFiles, - dockerImageCacheManifestFile + executionBucket, + endpointUrl, + genomicsAuthName, + location, + genomicsRestrictMetadataAccess, + genomicsEnableFuse, + gcsFilesystemAuthName, + qpsValidation, + duplicationStrategy, + requestWorkers, + gcsTransferConfiguration, + virtualPrivateCloudConfiguration, + batchRequestTimeoutConfigurationValidation, + referenceDiskLocalizationManifestFiles, + dockerImageCacheManifestFile ) flatMapN authGoogleConfigForPapiConfigurationAttributes match { case Valid(r) => r case Invalid(f) => @@ -311,25 +353,27 @@ object PipelinesApiConfigurationAttributes } } - private def validateSingleGcsPath(gcsPath: String): ErrorOr[ValidFullGcsPath] = { + private def validateSingleGcsPath(gcsPath: String): ErrorOr[ValidFullGcsPath] = GcsPathBuilder.validateGcsPath(gcsPath) match { case validPath: ValidFullGcsPath => validPath.validNel case invalidPath => s"Invalid GCS path: $invalidPath".invalidNel } - } - private [common] def validateGcsPathToDockerImageCacheManifestFile(backendConfig: Config): ErrorOr[Option[ValidFullGcsPath]] = { + private[common] def validateGcsPathToDockerImageCacheManifestFile( + backendConfig: Config + ): ErrorOr[Option[ValidFullGcsPath]] = backendConfig.getAs[String]("docker-image-cache-manifest-file") match { case Some(gcsPath) => validateSingleGcsPath(gcsPath).map(Option.apply) case None => None.validNel } - } /** * Validate that the entries corresponding to "reference-disk-localization-manifests" in the specified * backend are parseable as `ManifestFile`s. */ - private [common] def validateReferenceDiskManifestConfigs(backendConfig: Config, backendName: String): ErrorOr[Option[List[ManifestFile]]] = { + private[common] def validateReferenceDiskManifestConfigs(backendConfig: Config, + backendName: String + ): ErrorOr[Option[List[ManifestFile]]] = Try(backendConfig.getAs[List[Config]]("reference-disk-localization-manifests")) match { case Failure(e) => ("Error attempting to parse value for 'reference-disk-localization-manifests' as List[Config]: " + @@ -344,10 +388,13 @@ object PipelinesApiConfigurationAttributes // equivalent using clunkier syntax: configs traverse parser.decode[ManifestFile] match { case Right(manifests) => - logger.info(s"Reference disks feature for $backendName backend is configured with the following reference images: ${manifests.map(_.imageIdentifier).mkString(", ")}.") + logger.info( + s"Reference disks feature for $backendName backend is configured with the following reference images: ${manifests.map(_.imageIdentifier).mkString(", ")}." + ) Option(manifests).validNel case Left(err) => - val message = s"Reference disks misconfigured for backend $backendName, could not parse as List[ManifestFile]" + val message = + s"Reference disks misconfigured for backend $backendName, could not parse as List[ManifestFile]" logger.error(message, err.getCause) s"$message: ${err.getMessage}".invalidNel } @@ -356,7 +403,6 @@ object PipelinesApiConfigurationAttributes None.validNel } } - } def validateQps(config: Config): ErrorOr[Int Refined Positive] = { import eu.timepit.refined._ @@ -365,12 +411,13 @@ object PipelinesApiConfigurationAttributes val qpsCandidate = qp100s / 100 refineV[Positive](qpsCandidate) match { - case Left(_) => s"Calculated QPS for Google Genomics API ($qpsCandidate/s) was not a positive integer (supplied value was $qp100s per 100s)".invalidNel + case Left(_) => + s"Calculated QPS for Google Genomics API ($qpsCandidate/s) was not a positive integer (supplied value was $qp100s per 100s)".invalidNel case Right(refined) => refined.validNel } } - def validateGenomicsLocation(genomicsUrl: ErrorOr[URL], location: Option[String]): ErrorOr[String] = { + def validateGenomicsLocation(genomicsUrl: ErrorOr[URL], location: Option[String]): ErrorOr[String] = genomicsUrl match { case Valid(url) if url.toString.contains("lifesciences") => location match { @@ -379,25 +426,26 @@ object PipelinesApiConfigurationAttributes } case _ => location.getOrElse("").validNel } - } def validateGsutilMemorySpecification(config: Config, configPath: String): ErrorOr[String] = { val entry = config.as[Option[String]](configPath) entry match { case None => "0".validNel case Some(v @ GsutilHumanBytes(_, _)) => v.validNel - case Some(bad) => s"Invalid gsutil memory specification in Cromwell configuration at path '$configPath': '$bad'".invalidNel + case Some(bad) => + s"Invalid gsutil memory specification in Cromwell configuration at path '$configPath': '$bad'".invalidNel } } - def validatePositiveInt(n: Int, configPath: String): Validated[NonEmptyList[String], Refined[Int, Positive]] = { + def validatePositiveInt(n: Int, configPath: String): Validated[NonEmptyList[String], Refined[Int, Positive]] = refineV[Positive](n) match { case Left(_) => s"Value $n for $configPath is not strictly positive".invalidNel case Right(refined) => refined.validNel } - } - def readOptionalPositiveMillisecondsIntFromDuration(backendConfig: Config, configPath: String): ErrorOr[Option[Int Refined Positive]] = { + def readOptionalPositiveMillisecondsIntFromDuration(backendConfig: Config, + configPath: String + ): ErrorOr[Option[Int Refined Positive]] = { def validate(n: FiniteDuration) = { val result: ErrorOr[Int Refined Positive] = Try(n.toMillis.toInt).toErrorOr flatMap { millisInt => @@ -417,7 +465,7 @@ object PipelinesApiConfigurationAttributes } // Copy/port of gsutil's "_GenerateSuffixRegex" - private [common] lazy val GsutilHumanBytes: Regex = { + private[common] lazy val GsutilHumanBytes: Regex = { val _EXP_STRINGS = List( List("B", "bit"), List("KiB", "Kibit", "K"), @@ -425,7 +473,7 @@ object PipelinesApiConfigurationAttributes List("GiB", "Gibit", "G"), List("TiB", "Tibit", "T"), List("PiB", "Pibit", "P"), - List("EiB", "Eibit", "E"), + List("EiB", "Eibit", "E") ) val suffixes = for { diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperations.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperations.scala index 97a0b1a1da5..b9728272153 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperations.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperations.scala @@ -20,7 +20,8 @@ trait PipelinesApiDockerCacheMappingOperations { private class DockerImageManifestVersionError(message: String) extends RuntimeException(message) with NoStackTrace def generateDockerImageToDiskImageMapping(auth: GoogleAuthMode, - dockerImageCacheManifestFile: ValidFullGcsPath): Map[String, DockerImageCacheEntry] = { + dockerImageCacheManifestFile: ValidFullGcsPath + ): Map[String, DockerImageCacheEntry] = { val gcsClient = StorageOptions .newBuilder() @@ -34,35 +35,42 @@ trait PipelinesApiDockerCacheMappingOperations { def getDockerCacheDiskImageForAJob(dockerImageToCacheDiskImageMappingOpt: Option[Map[String, DockerImageCacheEntry]], dockerImageAsSpecifiedByUser: String, dockerImageWithDigest: String, - jobLogger: JobLogger): Option[String] = { + jobLogger: JobLogger + ): Option[String] = dockerImageToCacheDiskImageMappingOpt .flatMap(_.get(dockerImageAsSpecifiedByUser)) .filter { cachedDockerImageDigestAndDiskName => val hashStartingPositionInActualDockerImage = dockerImageWithDigest.indexOf('@') if (hashStartingPositionInActualDockerImage != -1) { - val actualDigestOfDesiredDockerImage = dockerImageWithDigest.substring(hashStartingPositionInActualDockerImage + 1) + val actualDigestOfDesiredDockerImage = + dockerImageWithDigest.substring(hashStartingPositionInActualDockerImage + 1) if (cachedDockerImageDigestAndDiskName.dockerImageDigest == actualDigestOfDesiredDockerImage) { true } else { - jobLogger.info(s"Cached Docker image digest mismatch. Requested docker image $dockerImageAsSpecifiedByUser has different digest than " + - s"corresponding cached image located at the ${cachedDockerImageDigestAndDiskName.diskImageName} disk image. " + - s"Digest of requested image is $actualDigestOfDesiredDockerImage, but digest of cached image is ${cachedDockerImageDigestAndDiskName.dockerImageDigest}. " + - s"Docker image cache feature will not be used for this task.") + jobLogger.info( + s"Cached Docker image digest mismatch. Requested docker image $dockerImageAsSpecifiedByUser has different digest than " + + s"corresponding cached image located at the ${cachedDockerImageDigestAndDiskName.diskImageName} disk image. " + + s"Digest of requested image is $actualDigestOfDesiredDockerImage, but digest of cached image is ${cachedDockerImageDigestAndDiskName.dockerImageDigest}. " + + s"Docker image cache feature will not be used for this task." + ) false } } else { - jobLogger.error(s"Programmer error ! Odd docker image name where supposed to be name with digest: $dockerImageWithDigest") + jobLogger.error( + s"Programmer error ! Odd docker image name where supposed to be name with digest: $dockerImageWithDigest" + ) false } } .map(_.diskImageName) - } - private[common] def readDockerImageCacheManifestFileFromGCS(gcsClient: Storage, gcsPath: ValidFullGcsPath): IO[DockerImageCacheManifest] = { - val manifestFileBlobIo = IO { gcsClient.get(BlobId.of(gcsPath.bucket, gcsPath.path.substring(1))) } + private[common] def readDockerImageCacheManifestFileFromGCS(gcsClient: Storage, + gcsPath: ValidFullGcsPath + ): IO[DockerImageCacheManifest] = { + val manifestFileBlobIo = IO(gcsClient.get(BlobId.of(gcsPath.bucket, gcsPath.path.substring(1)))) manifestFileBlobIo flatMap { manifestFileBlob => - val jsonStringIo = IO { manifestFileBlob.getContent().map(_.toChar).mkString } + val jsonStringIo = IO(manifestFileBlob.getContent().map(_.toChar).mkString) jsonStringIo.flatMap { jsonStr => decode[DockerImageCacheManifest](jsonStr) match { case Left(error) => IO.raiseError(error) @@ -70,8 +78,12 @@ trait PipelinesApiDockerCacheMappingOperations { if (parsedManifest.manifestFormatVersion == CURRENT_SUPPORTED_MANIFEST_FORMAT_VERSION) { IO.pure(parsedManifest) } else { - IO.raiseError(new DockerImageManifestVersionError(s"Current supported docker image cache manifest format version " + - s"is $CURRENT_SUPPORTED_MANIFEST_FORMAT_VERSION, but got ${parsedManifest.manifestFormatVersion}")) + IO.raiseError( + new DockerImageManifestVersionError( + s"Current supported docker image cache manifest format version " + + s"is $CURRENT_SUPPORTED_MANIFEST_FORMAT_VERSION, but got ${parsedManifest.manifestFormatVersion}" + ) + ) } } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiExpressionFunctions.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiExpressionFunctions.scala index bbd1aed9ecc..e90042bad2c 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiExpressionFunctions.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiExpressionFunctions.scala @@ -10,26 +10,25 @@ import cromwell.filesystems.gcs.GcsPathBuilder import cromwell.filesystems.gcs.GcsPathBuilder.{InvalidGcsPath, PossiblyValidRelativeGcsPath, ValidFullGcsPath} import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder -class PipelinesApiPathFunctions(pathBuilders: PathBuilders, callContext: CallContext) extends CallCorePathFunctionSet(pathBuilders, callContext) { - override def relativeToHostCallRoot(path: String) = { +class PipelinesApiPathFunctions(pathBuilders: PathBuilders, callContext: CallContext) + extends CallCorePathFunctionSet(pathBuilders, callContext) { + override def relativeToHostCallRoot(path: String) = GcsPathBuilder.validateGcsPath(path) match { case _: ValidFullGcsPath => path case _ => callContext.root.resolve(path.stripPrefix("file://").stripPrefix("/")).pathAsString } - } } class PipelinesApiExpressionFunctions(standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { + extends StandardExpressionFunctions(standardParams) { override lazy val ioCommandBuilder: IoCommandBuilder = GcsBatchCommandBuilder - override def preMapping(str: String) = { + override def preMapping(str: String) = GcsPathBuilder.validateGcsPath(str) match { case _: ValidFullGcsPath => str case PossiblyValidRelativeGcsPath => callContext.root.resolve(str.stripPrefix("/")).pathAsString case _: InvalidGcsPath => str } - } override lazy val pathFunctions = new PipelinesApiPathFunctions(pathBuilders, callContext) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiFinalizationActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiFinalizationActor.scala index 9be35d11bd9..08d91b414dd 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiFinalizationActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiFinalizationActor.scala @@ -8,8 +8,7 @@ import cromwell.core.io.AsyncIoActorClient import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder import wom.graph.CommandCallNode -case class PipelinesApiFinalizationActorParams -( +case class PipelinesApiFinalizationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], @@ -22,7 +21,8 @@ case class PipelinesApiFinalizationActorParams } class PipelinesApiFinalizationActor(val pipelinesParams: PipelinesApiFinalizationActorParams) - extends StandardFinalizationActor(pipelinesParams) with AsyncIoActorClient { + extends StandardFinalizationActor(pipelinesParams) + with AsyncIoActorClient { lazy val jesConfiguration: PipelinesApiConfiguration = pipelinesParams.jesConfiguration diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActor.scala index a97b8f7f9df..b3533c05b97 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActor.scala @@ -14,7 +14,11 @@ import com.google.auth.oauth2.OAuth2Credentials import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes._ import cromwell.backend.google.pipelines.common.PipelinesApiInitializationActor._ import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory -import cromwell.backend.standard.{StandardInitializationActor, StandardInitializationActorParams, StandardValidatedRuntimeAttributesBuilder} +import cromwell.backend.standard.{ + StandardInitializationActor, + StandardInitializationActorParams, + StandardValidatedRuntimeAttributesBuilder +} import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor} import cromwell.cloudsupport.gcp.auth.GoogleAuthMode.{httpTransport, jsonFactory} import cromwell.cloudsupport.gcp.auth.{GoogleAuthMode, UserServiceAccountMode} @@ -33,8 +37,7 @@ import wom.graph.CommandCallNode import scala.concurrent.Future import scala.util.Try -case class PipelinesApiInitializationActorParams -( +case class PipelinesApiInitializationActorParams( workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], @@ -46,7 +49,8 @@ case class PipelinesApiInitializationActorParams } class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializationActorParams) - extends StandardInitializationActor(pipelinesParams) with AsyncIoActorClient { + extends StandardInitializationActor(pipelinesParams) + with AsyncIoActorClient { override lazy val ioActor: ActorRef = pipelinesParams.ioActor protected val pipelinesConfiguration: PipelinesApiConfiguration = pipelinesParams.jesConfiguration @@ -58,19 +62,20 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio // Credentials object for the GCS API private lazy val gcsCredentials: Future[Credentials] = pipelinesConfiguration.papiAttributes.auths.gcs - .retryCredentials(workflowOptions, List(StorageScopes.DEVSTORAGE_FULL_CONTROL)) + .retryCredentials(workflowOptions, List(StorageScopes.DEVSTORAGE_FULL_CONTROL)) // Credentials object for the Genomics API private lazy val genomicsCredentials: Future[Credentials] = pipelinesConfiguration.papiAttributes.auths.genomics - .retryCredentials(workflowOptions, List( - CloudLifeSciencesScopes.CLOUD_PLATFORM, - GenomicsScopes.GENOMICS - )) + .retryCredentials(workflowOptions, + List( + CloudLifeSciencesScopes.CLOUD_PLATFORM, + GenomicsScopes.GENOMICS + ) + ) // Genomics object to access the Genomics API - private lazy val genomics: Future[PipelinesApiRequestFactory] = { + private lazy val genomics: Future[PipelinesApiRequestFactory] = genomicsCredentials map pipelinesConfiguration.genomicsFactory.fromCredentials - } val privateDockerEncryptionKeyName: Option[String] = { val optionsEncryptionKey = workflowOptions.get(GoogleAuthMode.DockerCredentialsEncryptionKeyNameKey).toOption @@ -90,11 +95,14 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio // That doesn't seem great but it's effectively what the existing code around user service accounts appears to be doing. val userServiceAccountAuth: Option[GoogleAuthMode] = for { _ <- workflowOptions.get(GoogleAuthMode.UserServiceAccountKey).toOption - usaAuth <- pipelinesConfiguration.googleConfig.authsByName.values collectFirst { case u: UserServiceAccountMode => u } + usaAuth <- pipelinesConfiguration.googleConfig.authsByName.values collectFirst { + case u: UserServiceAccountMode => u + } } yield usaAuth - def encryptionAuthFromConfig: Option[GoogleAuthMode] = pipelinesConfiguration.dockerEncryptionAuthName.flatMap { name => - pipelinesConfiguration.googleConfig.auth(name).toOption + def encryptionAuthFromConfig: Option[GoogleAuthMode] = pipelinesConfiguration.dockerEncryptionAuthName.flatMap { + name => + pipelinesConfiguration.googleConfig.auth(name).toOption } // If there's no user service account auth in the workflow options fall back to an auth specified in config. userServiceAccountAuth orElse encryptionAuthFromConfig @@ -104,12 +112,18 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio new String(Base64.decodeBase64(dockerToken)).split(':') match { case Array(username, password) => // unencrypted tokens are base64-encoded username:password - Option(JsObject( - Map( - "username" -> JsString(username), - "password" -> JsString(password) - )).compactPrint) - case _ => throw new RuntimeException(s"provided dockerhub token '$dockerToken' is not a base64-encoded username:password") + Option( + JsObject( + Map( + "username" -> JsString(username), + "password" -> JsString(password) + ) + ).compactPrint + ) + case _ => + throw new RuntimeException( + s"provided dockerhub token '$dockerToken' is not a base64-encoded username:password" + ) } } @@ -123,17 +137,21 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio } private def vpcNetworkAndSubnetworkProjectLabelsFuture(): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { - def googleProject(descriptor: BackendWorkflowDescriptor): String = { - descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleProject, pipelinesParams.jesConfiguration.papiAttributes.project) - } + def googleProject(descriptor: BackendWorkflowDescriptor): String = + descriptor.workflowOptions.getOrElse(WorkflowOptionKeys.GoogleProject, + pipelinesParams.jesConfiguration.papiAttributes.project + ) - def projectMetadataRequest(vpcConfig: VirtualPrivateCloudLabels): Future[HttpRequest] = { + def projectMetadataRequest(vpcConfig: VirtualPrivateCloudLabels): Future[HttpRequest] = Future { - val credentials = vpcConfig.auth.credentials(workflowOptions.get(_).get, List(CloudLifeSciencesScopes.CLOUD_PLATFORM)) + val credentials = + vpcConfig.auth.credentials(workflowOptions.get(_).get, List(CloudLifeSciencesScopes.CLOUD_PLATFORM)) val httpCredentialsAdapter = new HttpCredentialsAdapter(credentials) - val cloudResourceManagerBuilder = new CloudResourceManager - .Builder(GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, httpCredentialsAdapter) + val cloudResourceManagerBuilder = new CloudResourceManager.Builder(GoogleAuthMode.httpTransport, + GoogleAuthMode.jsonFactory, + httpCredentialsAdapter + ) .setApplicationName(pipelinesConfiguration.googleConfig.applicationName) .build() @@ -141,19 +159,23 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio project.buildHttpRequest() } - } def projectMetadataResponseToLabels(httpResponse: HttpResponse): Future[ProjectLabels] = { implicit val googleProjectMetadataLabelDecoder: Decoder[ProjectLabels] = deriveDecoder - Future.fromTry(decode[ProjectLabels](httpResponse.parseAsString()).toTry).recoverWith { - case e: Throwable => Future.failed(new RuntimeException(s"Failed to parse labels from project metadata response from Google Cloud Resource Manager API. " + - s"${ExceptionUtils.getMessage(e)}", e)) + Future.fromTry(decode[ProjectLabels](httpResponse.parseAsString()).toTry).recoverWith { case e: Throwable => + Future.failed( + new RuntimeException( + s"Failed to parse labels from project metadata response from Google Cloud Resource Manager API. " + + s"${ExceptionUtils.getMessage(e)}", + e + ) + ) } } def networkLabelsFromProjectLabels(vpcConfig: VirtualPrivateCloudLabels, - projectLabels: ProjectLabels, - ): Option[VpcAndSubnetworkProjectLabelValues] = { + projectLabels: ProjectLabels + ): Option[VpcAndSubnetworkProjectLabelValues] = projectLabels.labels.get(vpcConfig.network) map { vpcNetworkLabelValue => val subnetworkLabelOption = vpcConfig.subnetwork.flatMap { s => projectLabels.labels.collectFirst { @@ -163,22 +185,22 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio VpcAndSubnetworkProjectLabelValues(vpcNetworkLabelValue, subnetworkLabelOption) } - } - def fetchVpcLabelsFromProjectMetadata(vpcConfig: VirtualPrivateCloudLabels - ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { + def fetchVpcLabelsFromProjectMetadata( + vpcConfig: VirtualPrivateCloudLabels + ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = for { projectMetadataResponse <- projectMetadataRequest(vpcConfig).map(_.executeAsync().get()) projectLabels <- projectMetadataResponseToLabels(projectMetadataResponse) } yield networkLabelsFromProjectLabels(vpcConfig, projectLabels) - } /* First, try to fetch the network information from labels, where that fetch may still return None. Then, if we did not discover a network via labels for whatever reason try to look for literal values. */ - def fetchVpcLabels(vpcConfig: VirtualPrivateCloudConfiguration - ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { + def fetchVpcLabels( + vpcConfig: VirtualPrivateCloudConfiguration + ): Future[Option[VpcAndSubnetworkProjectLabelValues]] = { // Added explicit types to hopefully help future devs who stumble across this two-step code val fetchedFromLabels: Future[Option[VpcAndSubnetworkProjectLabelValues]] = vpcConfig.labelsOption match { case Some(labels: VirtualPrivateCloudLabels) => fetchVpcLabelsFromProjectMetadata(labels) @@ -202,8 +224,13 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio gcsCred <- gcsCredentials genomicsCred <- genomicsCredentials validatedPathBuilders <- pathBuilders - } yield new PipelinesApiWorkflowPaths( - workflowDescriptor, gcsCred, genomicsCred, pipelinesConfiguration, validatedPathBuilders, standardStreamNameToFileNameMetadataMapper)(ioEc) + } yield new PipelinesApiWorkflowPaths(workflowDescriptor, + gcsCred, + genomicsCred, + pipelinesConfiguration, + validatedPathBuilders, + standardStreamNameToFileNameMetadataMapper + )(ioEc) override lazy val initializationData: Future[PipelinesApiBackendInitializationData] = for { jesWorkflowPaths <- workflowPaths @@ -218,19 +245,21 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio genomicsRequestFactory = genomicsFactory, privateDockerEncryptionKeyName = privateDockerEncryptionKeyName, privateDockerEncryptedToken = privateDockerEncryptedToken, - vpcNetworkAndSubnetworkProjectLabels = vpcNetworkAndSubnetworkProjectLabels) + vpcNetworkAndSubnetworkProjectLabels = vpcNetworkAndSubnetworkProjectLabels + ) override def validateWorkflowOptions(): Try[Unit] = GoogleLabels.fromWorkflowOptions(workflowOptions).map(_ => ()) - override def beforeAll(): Future[Option[BackendInitializationData]] = { + override def beforeAll(): Future[Option[BackendInitializationData]] = for { paths <- workflowPaths _ = publishWorkflowRoot(paths.workflowRoot.pathAsString) data <- initializationData } yield Option(data) - } - def standardStreamNameToFileNameMetadataMapper(pipelinesApiJobPaths: PipelinesApiJobPaths, streamName: String): String = + def standardStreamNameToFileNameMetadataMapper(pipelinesApiJobPaths: PipelinesApiJobPaths, + streamName: String + ): String = PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper(pipelinesApiJobPaths, streamName) override lazy val ioCommandBuilder: GcsBatchCommandBuilder.type = GcsBatchCommandBuilder @@ -238,7 +267,9 @@ class PipelinesApiInitializationActor(pipelinesParams: PipelinesApiInitializatio object PipelinesApiInitializationActor { // For metadata publishing purposes default to using the name of a standard stream as the stream's filename. - def defaultStandardStreamNameToFileNameMetadataMapper(pipelinesApiJobPaths: PipelinesApiJobPaths, streamName: String): String = streamName + def defaultStandardStreamNameToFileNameMetadataMapper(pipelinesApiJobPaths: PipelinesApiJobPaths, + streamName: String + ): String = streamName def encryptKms(keyName: String, credentials: OAuth2Credentials, plainText: String): String = { val httpCredentialsAdapter = new HttpCredentialsAdapter(credentials) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobCachingActorHelper.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobCachingActorHelper.scala index 353e33a5019..74fb944fe51 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobCachingActorHelper.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobCachingActorHelper.scala @@ -12,9 +12,8 @@ import scala.language.postfixOps trait PipelinesApiJobCachingActorHelper extends StandardCachingActorHelper { this: PipelinesApiAsyncBackendJobExecutionActor with JobLogging => - lazy val initializationData: PipelinesApiBackendInitializationData = { + lazy val initializationData: PipelinesApiBackendInitializationData = backendInitializationDataAs[PipelinesApiBackendInitializationData] - } lazy val pipelinesConfiguration: PipelinesApiConfiguration = initializationData.papiConfiguration @@ -26,7 +25,8 @@ trait PipelinesApiJobCachingActorHelper extends StandardCachingActorHelper { googleLegacyMachineSelection(jobDescriptor.workflowDescriptor) ) - lazy val workingDisk: PipelinesApiAttachedDisk = runtimeAttributes.disks.find(_.name == PipelinesApiWorkingDisk.Name).get + lazy val workingDisk: PipelinesApiAttachedDisk = + runtimeAttributes.disks.find(_.name == PipelinesApiWorkingDisk.Name).get lazy val callRootPath: Path = pipelinesApiCallPaths.callExecutionRoot lazy val returnCodeFilename: String = pipelinesApiCallPaths.returnCodeFilename @@ -44,16 +44,18 @@ trait PipelinesApiJobCachingActorHelper extends StandardCachingActorHelper { val workflow = jobDescriptor.workflowDescriptor val call = jobDescriptor.taskCall val subWorkflow = workflow.callable - val subWorkflowLabels = if (!subWorkflow.equals(workflow.rootWorkflow)) - Labels("cromwell-sub-workflow-name" -> subWorkflow.name) - else - Labels.empty + val subWorkflowLabels = + if (!subWorkflow.equals(workflow.rootWorkflow)) + Labels("cromwell-sub-workflow-name" -> subWorkflow.name) + else + Labels.empty val alias = call.localName - val aliasLabels = if (!alias.equals(call.callable.name)) - Labels("wdl-call-alias" -> alias) - else - Labels.empty + val aliasLabels = + if (!alias.equals(call.callable.name)) + Labels("wdl-call-alias" -> alias) + else + Labels.empty Labels( "cromwell-workflow-id" -> s"cromwell-${workflow.rootWorkflowId}", @@ -63,15 +65,14 @@ trait PipelinesApiJobCachingActorHelper extends StandardCachingActorHelper { lazy val originalLabels: Labels = defaultLabels - lazy val backendLabels: Seq[GoogleLabel] = GoogleLabels.safeLabels(originalLabels.asTuple :_*) + lazy val backendLabels: Seq[GoogleLabel] = GoogleLabels.safeLabels(originalLabels.asTuple: _*) - lazy val originalLabelEvents: Map[String, String] = originalLabels.value map { l => s"${CallMetadataKeys.Labels}:${l.key}" -> l.value } toMap + lazy val originalLabelEvents: Map[String, String] = originalLabels.value map { l => + s"${CallMetadataKeys.Labels}:${l.key}" -> l.value + } toMap override protected def nonStandardMetadata: Map[String, Any] = { - val googleProject = initializationData - .workflowPaths - .workflowDescriptor - .workflowOptions + val googleProject = initializationData.workflowPaths.workflowDescriptor.workflowOptions .get(WorkflowOptionKeys.GoogleProject) .getOrElse(jesAttributes.project) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobPaths.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobPaths.scala index ae047187825..ec4af5344cf 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobPaths.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobPaths.scala @@ -19,7 +19,10 @@ object PipelinesApiJobPaths { // Non-`final` as this is mocked for testing since using a real instance proved too difficult. // Do not subclass this or other case classes in production code, at least without understanding the pitfalls: // https://nrinaudo.github.io/scala-best-practices/tricky_behaviours/final_case_classes.html -case class PipelinesApiJobPaths(override val workflowPaths: PipelinesApiWorkflowPaths, jobKey: BackendJobDescriptorKey, override val isCallCacheCopyAttempt: Boolean = false) extends JobPaths { +case class PipelinesApiJobPaths(override val workflowPaths: PipelinesApiWorkflowPaths, + jobKey: BackendJobDescriptorKey, + override val isCallCacheCopyAttempt: Boolean = false +) extends JobPaths { // `jesLogBasename` is a `def` rather than a `val` because it is referenced polymorphically from // the initialization code of the extended `JobPaths` trait, but this class will not have initialized its `val`s @@ -41,8 +44,11 @@ case class PipelinesApiJobPaths(override val workflowPaths: PipelinesApiWorkflow override lazy val customMetadataPaths = Map( CallMetadataKeys.BackendLogsPrefix + ":log" -> jesLogPath ) ++ ( - workflowPaths.monitoringScriptPath map { p => Map(PipelinesApiMetadataKeys.MonitoringScript -> p, - PipelinesApiMetadataKeys.MonitoringLog -> jesMonitoringLogPath) } getOrElse Map.empty + workflowPaths.monitoringScriptPath map { p => + Map(PipelinesApiMetadataKeys.MonitoringScript -> p, + PipelinesApiMetadataKeys.MonitoringLog -> jesMonitoringLogPath + ) + } getOrElse Map.empty ) override lazy val customDetritusPaths: Map[String, Path] = Map( @@ -53,12 +59,11 @@ case class PipelinesApiJobPaths(override val workflowPaths: PipelinesApiWorkflow PipelinesApiJobPaths.JesLogPathKey -> jesLogPath ) - override def standardOutputAndErrorPaths: Map[String, Path] = { + override def standardOutputAndErrorPaths: Map[String, Path] = super.standardOutputAndErrorPaths map { case (k, v) => val updated = workflowPaths.standardStreamNameToFileNameMetadataMapper(this, k) k -> v.parent.resolve(updated) } - } override def forCallCacheCopyAttempts: JobPaths = this.copy(isCallCacheCopyAttempt = true) } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiParameters.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiParameters.scala index 88fb0cdaafc..9076e08e0d0 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiParameters.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiParameters.scala @@ -55,12 +55,14 @@ sealed trait PipelinesApiOutput extends PipelinesParameter { final case class PipelinesApiFileInput(name: String, cloudPath: Path, relativeHostPath: Path, - mount: PipelinesApiAttachedDisk) extends PipelinesApiInput + mount: PipelinesApiAttachedDisk +) extends PipelinesApiInput final case class PipelinesApiDirectoryInput(name: String, cloudPath: Path, relativeHostPath: Path, - mount: PipelinesApiAttachedDisk) extends PipelinesApiInput + mount: PipelinesApiAttachedDisk +) extends PipelinesApiInput final case class PipelinesApiFileOutput(name: String, cloudPath: Path, @@ -69,7 +71,8 @@ final case class PipelinesApiFileOutput(name: String, optional: Boolean, secondary: Boolean, uploadPeriod: Option[FiniteDuration] = None, - override val contentType: Option[ContentType] = None) extends PipelinesApiOutput + override val contentType: Option[ContentType] = None +) extends PipelinesApiOutput final case class PipelinesApiDirectoryOutput(name: String, cloudPath: Path, @@ -77,7 +80,8 @@ final case class PipelinesApiDirectoryOutput(name: String, mount: PipelinesApiAttachedDisk, optional: Boolean, secondary: Boolean, - override val contentType: Option[ContentType] = None) extends PipelinesApiOutput + override val contentType: Option[ContentType] = None +) extends PipelinesApiOutput // TODO: Remove when support for V1 is stopped, this is only used to pass the extra_param auth file final case class PipelinesApiLiteralInput(name: String, value: String) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala index d190325f0cd..e4a6d6e2d0d 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala @@ -27,7 +27,8 @@ trait PipelinesApiReferenceFilesMappingOperations { * may take a significant amount of time. */ def generateReferenceFilesMapping(auth: GoogleAuthMode, - referenceDiskLocalizationManifests: List[ManifestFile]): Map[String, PipelinesApiReferenceFilesDisk] = { + referenceDiskLocalizationManifests: List[ManifestFile] + ): Map[String, PipelinesApiReferenceFilesDisk] = { val gcsClient = StorageOptions .newBuilder() .setCredentials(auth.credentials(Set(StorageScopes.DEVSTORAGE_READ_ONLY))) @@ -35,33 +36,39 @@ trait PipelinesApiReferenceFilesMappingOperations { .getService val validReferenceFilesMapIO = referenceDiskLocalizationManifests - .traverse(manifestFile => getMapOfValidReferenceFilePathsToDisks(gcsClient, manifestFile)) - .map(_.flatten.toMap) + .traverse(manifestFile => getMapOfValidReferenceFilePathsToDisks(gcsClient, manifestFile)) + .map(_.flatten.toMap) validReferenceFilesMapIO.unsafeRunSync() } - def getReferenceInputsToMountedPathMappings(referenceFileToDiskImageMapping: Map[String, PipelinesApiReferenceFilesDisk], - inputFiles: List[PipelinesApiInput]): Map[PipelinesApiInput, String] = { - val gcsPathsToInputs = inputFiles.collect { case i if i.cloudPath.isInstanceOf[GcsPath] => (i.cloudPath.asInstanceOf[GcsPath].pathAsString, i) }.toMap + def getReferenceInputsToMountedPathMappings( + referenceFileToDiskImageMapping: Map[String, PipelinesApiReferenceFilesDisk], + inputFiles: List[PipelinesApiInput] + ): Map[PipelinesApiInput, String] = { + val gcsPathsToInputs = inputFiles.collect { + case i if i.cloudPath.isInstanceOf[GcsPath] => (i.cloudPath.asInstanceOf[GcsPath].pathAsString, i) + }.toMap referenceFileToDiskImageMapping.collect { - case (path, disk) if gcsPathsToInputs.keySet.contains(s"gs://$path") => + case (path, disk) if gcsPathsToInputs.keySet.contains(s"gs://$path") => (gcsPathsToInputs(s"gs://$path"), s"${disk.mountPoint.pathAsString}/$path") } } def getReferenceDisksToMount(referenceFileToDiskImageMapping: Map[String, PipelinesApiReferenceFilesDisk], - inputFilePaths: Set[String]): List[PipelinesApiReferenceFilesDisk] = { + inputFilePaths: Set[String] + ): List[PipelinesApiReferenceFilesDisk] = referenceFileToDiskImageMapping.view.filterKeys(key => inputFilePaths.contains(s"gs://$key")).values.toList.distinct - } - private def getReferenceFileToValidatedGcsPathMap(referenceFiles: Set[ReferenceFile]): IO[Map[ReferenceFile, ValidFullGcsPath]] = { - val filesAndValidatedPaths = referenceFiles.map { - referenceFile => (referenceFile, GcsPathBuilder.validateGcsPath(s"gs://${referenceFile.path}")) + private def getReferenceFileToValidatedGcsPathMap( + referenceFiles: Set[ReferenceFile] + ): IO[Map[ReferenceFile, ValidFullGcsPath]] = { + val filesAndValidatedPaths = referenceFiles.map { referenceFile => + (referenceFile, GcsPathBuilder.validateGcsPath(s"gs://${referenceFile.path}")) }.toMap - val filesWithValidPaths = filesAndValidatedPaths.collect { - case (referenceFile, validPath: ValidFullGcsPath) => (referenceFile, validPath) + val filesWithValidPaths = filesAndValidatedPaths.collect { case (referenceFile, validPath: ValidFullGcsPath) => + (referenceFile, validPath) } val filesWithInvalidPaths = filesAndValidatedPaths.collect { case (referenceFile, invalidPath: InvalidFullGcsPath) => (referenceFile, invalidPath) @@ -75,30 +82,30 @@ trait PipelinesApiReferenceFilesMappingOperations { } protected def bulkValidateCrc32cs(gcsClient: Storage, - filesWithValidPaths: Map[ReferenceFile, ValidFullGcsPath]): IO[Map[ReferenceFile, Boolean]] = { + filesWithValidPaths: Map[ReferenceFile, ValidFullGcsPath] + ): IO[Map[ReferenceFile, Boolean]] = IO { val gcsBatch = gcsClient.batch() - val filesAndBlobResults = filesWithValidPaths map { - case (referenceFile, ValidFullGcsPath(bucket, path)) => - val blobGetResult = gcsBatch.get(BlobId.of(bucket, path.substring(1)), BlobGetOption.fields(BlobField.CRC32C)) - (referenceFile, blobGetResult) + val filesAndBlobResults = filesWithValidPaths map { case (referenceFile, ValidFullGcsPath(bucket, path)) => + val blobGetResult = gcsBatch.get(BlobId.of(bucket, path.substring(1)), BlobGetOption.fields(BlobField.CRC32C)) + (referenceFile, blobGetResult) } gcsBatch.submit() - filesAndBlobResults map { - case (referenceFile, blobGetResult) => - val crc32cFromManifest = BaseEncoding.base64.encode( - // drop 4 leading bytes from Long crc32c value - // https://stackoverflow.com/a/25111119/1794750 - util.Arrays.copyOfRange(Longs.toByteArray(referenceFile.crc32c), 4, 8) - ) + filesAndBlobResults map { case (referenceFile, blobGetResult) => + val crc32cFromManifest = BaseEncoding.base64.encode( + // drop 4 leading bytes from Long crc32c value + // https://stackoverflow.com/a/25111119/1794750 + util.Arrays.copyOfRange(Longs.toByteArray(referenceFile.crc32c), 4, 8) + ) - (referenceFile, crc32cFromManifest === blobGetResult.get().getCrc32c) + (referenceFile, crc32cFromManifest === blobGetResult.get().getCrc32c) } } - } - private def getMapOfValidReferenceFilePathsToDisks(gcsClient: Storage, manifestFile: ManifestFile): IO[Map[String, PipelinesApiReferenceFilesDisk]] = { + private def getMapOfValidReferenceFilePathsToDisks(gcsClient: Storage, + manifestFile: ManifestFile + ): IO[Map[String, PipelinesApiReferenceFilesDisk]] = { val refDisk = PipelinesApiReferenceFilesDisk(manifestFile.imageIdentifier, manifestFile.diskSizeGb) val allReferenceFilesFromManifestMap = manifestFile.files.map(refFile => (refFile, refDisk)).toMap @@ -111,10 +118,13 @@ trait PipelinesApiReferenceFilesMappingOperations { validReferenceFilesFromManifestMapIo map { validReferenceFilesFromManifestMap => val invalidReferenceFiles = allReferenceFilesFromManifestMap.keySet -- validReferenceFilesFromManifestMap.keySet if (invalidReferenceFiles.nonEmpty) { - logger.warn(s"The following files listed in references manifest have checksum mismatch with actual files in GCS: ${invalidReferenceFiles.mkString(",")}") + logger.warn( + s"The following files listed in references manifest have checksum mismatch with actual files in GCS: ${invalidReferenceFiles + .mkString(",")}" + ) } - validReferenceFilesFromManifestMap.map { - case (refFile, disk) => (refFile.path, disk) + validReferenceFilesFromManifestMap.map { case (refFile, disk) => + (refFile.path, disk) }.toMap } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala index db87797de37..3e2ee2446e0 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala @@ -35,7 +35,10 @@ object GpuResource { } } -final case class GpuResource(gpuType: GpuType, gpuCount: Int Refined Positive, nvidiaDriverVersion: String = GpuResource.DefaultNvidiaDriverVersion) +final case class GpuResource(gpuType: GpuType, + gpuCount: Int Refined Positive, + nvidiaDriverVersion: String = GpuResource.DefaultNvidiaDriverVersion +) final case class PipelinesApiRuntimeAttributes(cpu: Int Refined Positive, cpuPlatform: Option[String], @@ -51,7 +54,8 @@ final case class PipelinesApiRuntimeAttributes(cpu: Int Refined Positive, noAddress: Boolean, googleLegacyMachineSelection: Boolean, useDockerImageCache: Option[Boolean], - checkpointFilename: Option[String]) + checkpointFilename: Option[String] +) object PipelinesApiRuntimeAttributes { @@ -81,138 +85,193 @@ object PipelinesApiRuntimeAttributes { val CpuPlatformIntelIceLakeValue = "Intel Ice Lake" val UseDockerImageCacheKey = "useDockerImageCache" - private val useDockerImageCacheValidationInstance = new BooleanRuntimeAttributesValidation(UseDockerImageCacheKey).optional + private val useDockerImageCacheValidationInstance = new BooleanRuntimeAttributesValidation( + UseDockerImageCacheKey + ).optional val CheckpointFileKey = "checkpointFile" private val checkpointFileValidationInstance = new StringRuntimeAttributesValidation(CheckpointFileKey).optional private val MemoryDefaultValue = "2048 MB" - private def cpuValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = CpuValidation.instance - .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) + private def cpuValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.instance + .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) - private def cpuPlatformValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = cpuPlatformValidationInstance + private def cpuPlatformValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = + cpuPlatformValidationInstance - private def cpuMinValidation(runtimeConfig: Option[Config]):RuntimeAttributesValidation[Int Refined Positive] = CpuValidation.instanceMin - .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) + private def cpuMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.instanceMin + .withDefault(CpuValidation.configDefaultWomValue(runtimeConfig) getOrElse CpuValidation.defaultMin) - private def gpuTypeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[GpuType] = GpuTypeValidation.optional + private def gpuTypeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[GpuType] = + GpuTypeValidation.optional val GpuDriverVersionKey = "nvidiaDriverVersion" - private def gpuDriverValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = new StringRuntimeAttributesValidation(GpuDriverVersionKey).optional + private def gpuDriverValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = + new StringRuntimeAttributesValidation(GpuDriverVersionKey).optional - private def gpuCountValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optional + private def gpuCountValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optional - private def gpuMinValidation(runtimeConfig: Option[Config]):OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optionalMin + private def gpuMinValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Int Refined Positive] = GpuValidation.optionalMin private def failOnStderrValidation(runtimeConfig: Option[Config]) = FailOnStderrValidation.default(runtimeConfig) - private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = ContinueOnReturnCodeValidation.default(runtimeConfig) + private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = + ContinueOnReturnCodeValidation.default(runtimeConfig) - private def disksValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Seq[PipelinesApiAttachedDisk]] = DisksValidation + private def disksValidation( + runtimeConfig: Option[Config] + ): RuntimeAttributesValidation[Seq[PipelinesApiAttachedDisk]] = DisksValidation .withDefault(DisksValidation.configDefaultWomValue(runtimeConfig) getOrElse DisksDefaultValue) - private def zonesValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Vector[String]] = ZonesValidation - .withDefault(ZonesValidation.configDefaultWomValue(runtimeConfig) getOrElse ZonesDefaultValue) + private def zonesValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Vector[String]] = + ZonesValidation + .withDefault(ZonesValidation.configDefaultWomValue(runtimeConfig) getOrElse ZonesDefaultValue) - private def preemptibleValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = preemptibleValidationInstance - .withDefault(preemptibleValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse PreemptibleDefaultValue) + private def preemptibleValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = + preemptibleValidationInstance + .withDefault(preemptibleValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse PreemptibleDefaultValue) - private def memoryValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = { + private def memoryValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = MemoryValidation.withDefaultMemory( RuntimeAttributesKeys.MemoryKey, - MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, runtimeConfig) getOrElse MemoryDefaultValue) - } + MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryKey, runtimeConfig) getOrElse MemoryDefaultValue + ) - private def memoryMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = { - MemoryValidation.withDefaultMemory( - RuntimeAttributesKeys.MemoryMinKey, - MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryMinKey, runtimeConfig) getOrElse MemoryDefaultValue) - } + private def memoryMinValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[MemorySize] = + MemoryValidation.withDefaultMemory(RuntimeAttributesKeys.MemoryMinKey, + MemoryValidation.configDefaultString(RuntimeAttributesKeys.MemoryMinKey, + runtimeConfig + ) getOrElse MemoryDefaultValue + ) - private def bootDiskSizeValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = bootDiskValidationInstance - .withDefault(bootDiskValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse BootDiskDefaultValue) + private def bootDiskSizeValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Int] = + bootDiskValidationInstance + .withDefault(bootDiskValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse BootDiskDefaultValue) - private def noAddressValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = noAddressValidationInstance - .withDefault(noAddressValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse NoAddressDefaultValue) + private def noAddressValidation(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = + noAddressValidationInstance + .withDefault(noAddressValidationInstance.configDefaultWomValue(runtimeConfig) getOrElse NoAddressDefaultValue) - private def useDockerImageCacheValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = + private def useDockerImageCacheValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Boolean] = useDockerImageCacheValidationInstance - private val dockerValidation: RuntimeAttributesValidation[String] = DockerValidation.instance - private val outDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = { + private val outDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = InformationValidation.optional(RuntimeAttributesKeys.OutDirMinKey, MemoryUnit.MB, allowZero = true) - } - private val tmpDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = { + private val tmpDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = InformationValidation.optional(RuntimeAttributesKeys.TmpDirMinKey, MemoryUnit.MB, allowZero = true) - } - private val inputDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = { + private val inputDirMinValidation: OptionalRuntimeAttributesValidation[MemorySize] = InformationValidation.optional(RuntimeAttributesKeys.DnaNexusInputDirMinKey, MemoryUnit.MB, allowZero = true) - } - def runtimeAttributesBuilder(jesConfiguration: PipelinesApiConfiguration): StandardValidatedRuntimeAttributesBuilder = { + def runtimeAttributesBuilder( + jesConfiguration: PipelinesApiConfiguration + ): StandardValidatedRuntimeAttributesBuilder = { val runtimeConfig = jesConfiguration.runtimeConfig - StandardValidatedRuntimeAttributesBuilder.default(runtimeConfig).withValidation( - gpuCountValidation(runtimeConfig), - gpuTypeValidation(runtimeConfig), - gpuDriverValidation(runtimeConfig), - cpuValidation(runtimeConfig), - cpuMinValidation(runtimeConfig), - gpuMinValidation(runtimeConfig), - disksValidation(runtimeConfig), - zonesValidation(runtimeConfig), - preemptibleValidation(runtimeConfig), - memoryValidation(runtimeConfig), - memoryMinValidation(runtimeConfig), - bootDiskSizeValidation(runtimeConfig), - noAddressValidation(runtimeConfig), - cpuPlatformValidation(runtimeConfig), - useDockerImageCacheValidation(runtimeConfig), - checkpointFileValidationInstance, - dockerValidation, - outDirMinValidation, - tmpDirMinValidation, - inputDirMinValidation - ) + StandardValidatedRuntimeAttributesBuilder + .default(runtimeConfig) + .withValidation( + gpuCountValidation(runtimeConfig), + gpuTypeValidation(runtimeConfig), + gpuDriverValidation(runtimeConfig), + cpuValidation(runtimeConfig), + cpuMinValidation(runtimeConfig), + gpuMinValidation(runtimeConfig), + disksValidation(runtimeConfig), + zonesValidation(runtimeConfig), + preemptibleValidation(runtimeConfig), + memoryValidation(runtimeConfig), + memoryMinValidation(runtimeConfig), + bootDiskSizeValidation(runtimeConfig), + noAddressValidation(runtimeConfig), + cpuPlatformValidation(runtimeConfig), + useDockerImageCacheValidation(runtimeConfig), + checkpointFileValidationInstance, + dockerValidation, + outDirMinValidation, + tmpDirMinValidation, + inputDirMinValidation + ) } - def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, runtimeAttrsConfig: Option[Config], googleLegacyMachineSelection: Boolean = false): PipelinesApiRuntimeAttributes = { - val cpu: Int Refined Positive = RuntimeAttributesValidation.extract(cpuValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val cpuPlatform: Option[String] = RuntimeAttributesValidation.extractOption(cpuPlatformValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, + runtimeAttrsConfig: Option[Config], + googleLegacyMachineSelection: Boolean = false + ): PipelinesApiRuntimeAttributes = { + val cpu: Int Refined Positive = + RuntimeAttributesValidation.extract(cpuValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val cpuPlatform: Option[String] = RuntimeAttributesValidation.extractOption( + cpuPlatformValidation(runtimeAttrsConfig).key, + validatedRuntimeAttributes + ) - val checkpointFileName: Option[String] = RuntimeAttributesValidation.extractOption(checkpointFileValidationInstance.key, validatedRuntimeAttributes) + val checkpointFileName: Option[String] = + RuntimeAttributesValidation.extractOption(checkpointFileValidationInstance.key, validatedRuntimeAttributes) // GPU - lazy val gpuType: Option[GpuType] = RuntimeAttributesValidation.extractOption(gpuTypeValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) - lazy val gpuCount: Option[Int Refined Positive] = RuntimeAttributesValidation.extractOption(gpuCountValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) - lazy val gpuDriver: Option[String] = RuntimeAttributesValidation.extractOption(gpuDriverValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + lazy val gpuType: Option[GpuType] = + RuntimeAttributesValidation.extractOption(gpuTypeValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + lazy val gpuCount: Option[Int Refined Positive] = + RuntimeAttributesValidation.extractOption(gpuCountValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) + lazy val gpuDriver: Option[String] = + RuntimeAttributesValidation.extractOption(gpuDriverValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) val gpuResource: Option[GpuResource] = if (gpuType.isDefined || gpuCount.isDefined || gpuDriver.isDefined) { - Option(GpuResource(gpuType.getOrElse(GpuType.DefaultGpuType), gpuCount.getOrElse(GpuType.DefaultGpuCount), gpuDriver.getOrElse(GpuResource.DefaultNvidiaDriverVersion))) + Option( + GpuResource(gpuType.getOrElse(GpuType.DefaultGpuType), + gpuCount.getOrElse(GpuType.DefaultGpuCount), + gpuDriver.getOrElse(GpuResource.DefaultNvidiaDriverVersion) + ) + ) } else { None } val zones: Vector[String] = RuntimeAttributesValidation.extract(ZonesValidation, validatedRuntimeAttributes) - val preemptible: Int = RuntimeAttributesValidation.extract(preemptibleValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val bootDiskSize: Int = RuntimeAttributesValidation.extract(bootDiskSizeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val memory: MemorySize = RuntimeAttributesValidation.extract(memoryValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val disks: Seq[PipelinesApiAttachedDisk] = RuntimeAttributesValidation.extract(disksValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val preemptible: Int = + RuntimeAttributesValidation.extract(preemptibleValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val bootDiskSize: Int = + RuntimeAttributesValidation.extract(bootDiskSizeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val memory: MemorySize = + RuntimeAttributesValidation.extract(memoryValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val disks: Seq[PipelinesApiAttachedDisk] = + RuntimeAttributesValidation.extract(disksValidation(runtimeAttrsConfig), validatedRuntimeAttributes) val docker: String = RuntimeAttributesValidation.extract(dockerValidation, validatedRuntimeAttributes) - val failOnStderr: Boolean = RuntimeAttributesValidation.extract(failOnStderrValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val noAddress: Boolean = RuntimeAttributesValidation.extract(noAddressValidation(runtimeAttrsConfig), validatedRuntimeAttributes) - val useDockerImageCache: Option[Boolean] = RuntimeAttributesValidation.extractOption(useDockerImageCacheValidation(runtimeAttrsConfig).key, validatedRuntimeAttributes) - - val outDirMin: Option[MemorySize] = RuntimeAttributesValidation.extractOption(outDirMinValidation.key, validatedRuntimeAttributes) - val tmpDirMin: Option[MemorySize] = RuntimeAttributesValidation.extractOption(tmpDirMinValidation.key, validatedRuntimeAttributes) - val inputDirMin: Option[MemorySize] = RuntimeAttributesValidation.extractOption(inputDirMinValidation.key, validatedRuntimeAttributes) + val failOnStderr: Boolean = + RuntimeAttributesValidation.extract(failOnStderrValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val continueOnReturnCode: ContinueOnReturnCode = RuntimeAttributesValidation.extract( + continueOnReturnCodeValidation(runtimeAttrsConfig), + validatedRuntimeAttributes + ) + val noAddress: Boolean = + RuntimeAttributesValidation.extract(noAddressValidation(runtimeAttrsConfig), validatedRuntimeAttributes) + val useDockerImageCache: Option[Boolean] = RuntimeAttributesValidation.extractOption( + useDockerImageCacheValidation(runtimeAttrsConfig).key, + validatedRuntimeAttributes + ) - val totalExecutionDiskSizeBytes = List(inputDirMin.map(_.bytes), outDirMin.map(_.bytes), tmpDirMin.map(_.bytes)).flatten.fold(MemorySize(0, MemoryUnit.Bytes).bytes)(_ + _) + val outDirMin: Option[MemorySize] = + RuntimeAttributesValidation.extractOption(outDirMinValidation.key, validatedRuntimeAttributes) + val tmpDirMin: Option[MemorySize] = + RuntimeAttributesValidation.extractOption(tmpDirMinValidation.key, validatedRuntimeAttributes) + val inputDirMin: Option[MemorySize] = + RuntimeAttributesValidation.extractOption(inputDirMinValidation.key, validatedRuntimeAttributes) + + val totalExecutionDiskSizeBytes = List(inputDirMin.map(_.bytes), + outDirMin.map(_.bytes), + tmpDirMin.map(_.bytes) + ).flatten.fold(MemorySize(0, MemoryUnit.Bytes).bytes)(_ + _) val totalExecutionDiskSize = MemorySize(totalExecutionDiskSizeBytes, MemoryUnit.Bytes) val adjustedDisks = disks.adjustWorkingDiskWithNewMin(totalExecutionDiskSize, ()) @@ -264,24 +323,23 @@ object DisksValidation extends RuntimeAttributesValidation[Seq[PipelinesApiAttac } private def validateLocalDisks(disks: Seq[String]): ErrorOr[Seq[PipelinesApiAttachedDisk]] = { - val diskNels: ErrorOr[Seq[PipelinesApiAttachedDisk]] = disks.toList.traverse[ErrorOr, PipelinesApiAttachedDisk](validateLocalDisk) + val diskNels: ErrorOr[Seq[PipelinesApiAttachedDisk]] = + disks.toList.traverse[ErrorOr, PipelinesApiAttachedDisk](validateLocalDisk) val defaulted: ErrorOr[Seq[PipelinesApiAttachedDisk]] = addDefault(diskNels) defaulted } - private def validateLocalDisk(disk: String): ErrorOr[PipelinesApiAttachedDisk] = { + private def validateLocalDisk(disk: String): ErrorOr[PipelinesApiAttachedDisk] = PipelinesApiAttachedDisk.parse(disk) match { case scala.util.Success(attachedDisk) => attachedDisk.validNel case scala.util.Failure(ex) => ex.getMessage.invalidNel } - } - private def addDefault(disksNel: ErrorOr[Seq[PipelinesApiAttachedDisk]]): ErrorOr[Seq[PipelinesApiAttachedDisk]] = { + private def addDefault(disksNel: ErrorOr[Seq[PipelinesApiAttachedDisk]]): ErrorOr[Seq[PipelinesApiAttachedDisk]] = disksNel map { case disks if disks.exists(_.name == PipelinesApiWorkingDisk.Name) => disks case disks => disks :+ PipelinesApiWorkingDisk.Default } - } override protected def missingValueMessage: String = s"Expecting $key runtime attribute to be a comma separated String or Array[String]" diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPaths.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPaths.scala index 959112587ab..aad39a6f565 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPaths.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPaths.scala @@ -20,34 +20,41 @@ object PipelinesApiWorkflowPaths { private val AuthFilePathOptionKey = "auth_bucket" private val GcsPrefix = "gs://" - private[common] def callCachePathPrefixFromExecutionRoot(executionRoot: String): String = { + private[common] def callCachePathPrefixFromExecutionRoot(executionRoot: String): String = // If the root looks like gs://bucket/stuff-under-bucket this should return gs://bucket GcsPrefix + executionRoot.substring(GcsPrefix.length).takeWhile(_ != '/') - } } -case class PipelinesApiWorkflowPaths(workflowDescriptor: BackendWorkflowDescriptor, - gcsCredentials: Credentials, - genomicsCredentials: Credentials, - papiConfiguration: PipelinesApiConfiguration, - override val pathBuilders: PathBuilders, - // This allows for the adjustment of the standard stream file names in PAPI v1 to match the - // combined controller + job standard output and error files. PAPI v1 controls the periodic - // delocalization of these files so the metadata Cromwell publishes for these files needs - // to match the PAPI v1 names. - standardStreamNameToFileNameMetadataMapper: (PipelinesApiJobPaths, String) => String)(implicit ec: ExecutionContext) extends WorkflowPaths { +case class PipelinesApiWorkflowPaths( + workflowDescriptor: BackendWorkflowDescriptor, + gcsCredentials: Credentials, + genomicsCredentials: Credentials, + papiConfiguration: PipelinesApiConfiguration, + override val pathBuilders: PathBuilders, + // This allows for the adjustment of the standard stream file names in PAPI v1 to match the + // combined controller + job standard output and error files. PAPI v1 controls the periodic + // delocalization of these files so the metadata Cromwell publishes for these files needs + // to match the PAPI v1 names. + standardStreamNameToFileNameMetadataMapper: (PipelinesApiJobPaths, String) => String +)(implicit ec: ExecutionContext) + extends WorkflowPaths { override lazy val executionRootString: String = workflowDescriptor.workflowOptions.getOrElse(PipelinesApiWorkflowPaths.GcsRootOptionKey, papiConfiguration.root) - override lazy val callCacheRootPrefix: Option[String] = Option(callCachePathPrefixFromExecutionRoot(executionRootString)) + override lazy val callCacheRootPrefix: Option[String] = Option( + callCachePathPrefixFromExecutionRoot(executionRootString) + ) private val workflowOptions: WorkflowOptions = workflowDescriptor.workflowOptions val gcsAuthFilePath: Path = { // The default auth file bucket is always at the root of the root workflow - val defaultBucket = executionRoot.resolve(workflowDescriptor.rootWorkflow.name).resolve(workflowDescriptor.rootWorkflowId.toString) - val bucket = workflowDescriptor.workflowOptions.get(PipelinesApiWorkflowPaths.AuthFilePathOptionKey) getOrElse defaultBucket.pathAsString + val defaultBucket = + executionRoot.resolve(workflowDescriptor.rootWorkflow.name).resolve(workflowDescriptor.rootWorkflowId.toString) + val bucket = workflowDescriptor.workflowOptions.get( + PipelinesApiWorkflowPaths.AuthFilePathOptionKey + ) getOrElse defaultBucket.pathAsString /* * This is an "exception". The filesystem used here is built from genomicsAuth @@ -62,23 +69,24 @@ case class PipelinesApiWorkflowPaths(workflowDescriptor: BackendWorkflowDescript Option(papiConfiguration.papiAttributes.project) ) - val authBucket = pathBuilderWithGenomicsAuth.build(bucket) recover { - case ex => throw new Exception(s"Invalid gcs auth_bucket path $bucket", ex) + val authBucket = pathBuilderWithGenomicsAuth.build(bucket) recover { case ex => + throw new Exception(s"Invalid gcs auth_bucket path $bucket", ex) } get authBucket.resolve(s"${workflowDescriptor.rootWorkflowId}_auth.json") } - val monitoringScriptPath: Option[Path] = workflowOptions.get(WorkflowOptionKeys.MonitoringScript).toOption map { path => - // Fail here if the path exists but can't be built - getPath(path).get + val monitoringScriptPath: Option[Path] = workflowOptions.get(WorkflowOptionKeys.MonitoringScript).toOption map { + path => + // Fail here if the path exists but can't be built + getPath(path).get } - override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): PipelinesApiJobPaths = { + override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): PipelinesApiJobPaths = new PipelinesApiJobPaths(workflowPaths.asInstanceOf[PipelinesApiWorkflowPaths], jobKey) - } - override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor) + override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = + this.copy(workflowDescriptor = workflowDescriptor) override def config: Config = papiConfiguration.configurationDescriptor.backendConfig } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala index 01819ce0082..50a372bc027 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala @@ -3,7 +3,10 @@ package cromwell.google.pipelines.common import cats.syntax.apply._ import cats.syntax.validated._ import common.validation.ErrorOr.ErrorOr -import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.{preemptionCountKey, unexpectedRetryCountKey} +import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.{ + preemptionCountKey, + unexpectedRetryCountKey +} import cromwell.services.keyvalue.KeyValueServiceActor._ import scala.util.{Failure, Success, Try} @@ -14,9 +17,10 @@ object PreviousRetryReasons { def tryApply(prefetchedKvEntries: Map[String, KvResponse], attemptNumber: Int): ErrorOr[PreviousRetryReasons] = { val validatedPreemptionCount = validatedKvResponse(prefetchedKvEntries.get(preemptionCountKey), preemptionCountKey) - val validatedUnexpectedRetryCount = validatedKvResponse(prefetchedKvEntries.get(unexpectedRetryCountKey), unexpectedRetryCountKey) + val validatedUnexpectedRetryCount = + validatedKvResponse(prefetchedKvEntries.get(unexpectedRetryCountKey), unexpectedRetryCountKey) - (validatedPreemptionCount, validatedUnexpectedRetryCount) mapN { PreviousRetryReasons.apply } + (validatedPreemptionCount, validatedUnexpectedRetryCount) mapN PreviousRetryReasons.apply } def apply(knownPreemptedCount: Int, knownUnexpectedRetryCount: Int, attempt: Int): PreviousRetryReasons = { @@ -35,10 +39,9 @@ object PreviousRetryReasons { case None => s"Programmer Error: Engine made no effort to prefetch $fromKey".invalidNel } - private def validatedInt(s: String, fromKey: String): ErrorOr[Int] = { + private def validatedInt(s: String, fromKey: String): ErrorOr[Int] = Try(s.toInt) match { case Success(i) => i.validNel case Failure(_) => s"Unexpected value found in the KV store: $fromKey='$s'".invalidNel } - } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValues.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValues.scala index 53e4203c6a9..267d360376a 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValues.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValues.scala @@ -3,6 +3,7 @@ package cromwell.backend.google.pipelines.common import cromwell.backend.google.pipelines.common.VpcAndSubnetworkProjectLabelValues._ final case class VpcAndSubnetworkProjectLabelValues(vpcName: String, subnetNameOpt: Option[String]) { + /** * Returns a qualified network name replacing the string `\${projectId}` in the network name if found. */ @@ -20,9 +21,8 @@ final case class VpcAndSubnetworkProjectLabelValues(vpcName: String, subnetNameO /** * Replaces the string `\${projectId}` in the subnet name if found. */ - def subnetNameOption(projectId: String): Option[String] = { + def subnetNameOption(projectId: String): Option[String] = subnetNameOpt map { _.replace(ProjectIdToken, projectId) } - } } object VpcAndSubnetworkProjectLabelValues { diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionCommands.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionCommands.scala index 2357db6cf25..54af3200acb 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionCommands.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionCommands.scala @@ -33,19 +33,20 @@ object ActionCommands { def escape: String = StringEscapeUtils.escapeXSI(path.pathAsString) } - private def makeContentTypeFlag(contentType: Option[ContentType]) = contentType.map(ct => s"""-h "Content-Type: $ct"""").getOrElse("") + private def makeContentTypeFlag(contentType: Option[ContentType]) = + contentType.map(ct => s"""-h "Content-Type: $ct"""").getOrElse("") def makeContainerDirectory(containerPath: Path) = s"mkdir -p ${containerPath.escape}" - def delocalizeDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && " + s"gsutil $flag ${contentType |> makeContentTypeFlag} -m rsync -r ${containerPath.escape} ${cloudPath.escape}" } } - } /** * As per https://cloud.google.com/storage/docs/gsutil/addlhelp/HowSubdirectoriesWork, rule #2 @@ -59,29 +60,29 @@ object ActionCommands { * By instead using the parent directory (and ensuring it ends with a slash), gsutil will treat that as a directory and put the file under it. * So the final gsutil command will look something like gsutil cp /local/file.txt gs://bucket/subdir/ */ - def delocalizeFile(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeFile(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && " + s"gsutil $flag ${contentType |> makeContentTypeFlag} cp ${containerPath.escape} ${cloudPath.parent.escape.ensureSlashed}" } } - } /** * delocalizeFile necessarily copies the file to the same name. Use this if you want to to specify a name different from the original * Make sure that there's no object named "yourfinalname_something" (see above) in the same cloud directory. */ - def delocalizeFileTo(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeFileTo(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && " + s"gsutil $flag ${contentType |> makeContentTypeFlag} cp ${containerPath.escape} ${cloudPath.escape}" } } - } def ifExist(containerPath: Path)(f: => String) = s"if [ -e ${containerPath.escape} ]; then $f; fi" @@ -93,7 +94,7 @@ object ActionCommands { | sleep ${duration.toSeconds} |done""".stripMargin - def retry(f: => String)(implicit gcsTransferConfiguration: GcsTransferConfiguration, wait: FiniteDuration): String = { + def retry(f: => String)(implicit gcsTransferConfiguration: GcsTransferConfiguration, wait: FiniteDuration): String = s"""for i in $$(seq ${gcsTransferConfiguration.transferAttempts}); do | ( | $f @@ -108,35 +109,34 @@ object ActionCommands { | fi |done |exit "$$RC"""".stripMargin - } - def delocalizeFileOrDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def delocalizeFileOrDirectory(containerPath: Path, cloudPath: Path, contentType: Option[ContentType])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = s"""if [ -d ${containerPath.escape} ]; then | ${delocalizeDirectory(containerPath, cloudPath, contentType)} |else | ${delocalizeFile(containerPath, cloudPath, contentType)} |fi""".stripMargin - } - def localizeDirectory(cloudPath: Path, containerPath: Path) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def localizeDirectory(cloudPath: Path, containerPath: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"${containerPath |> makeContainerDirectory} && " + s"rm -f $$HOME/.config/gcloud/gce && gsutil $flag -m rsync -r ${cloudPath.escape} ${containerPath.escape}" } } - } - def localizeFile(cloudPath: Path, containerPath: Path) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + def localizeFile(cloudPath: Path, containerPath: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): String = retry { recoverRequesterPaysError(cloudPath) { flag => s"rm -f $$HOME/.config/gcloud/gce && gsutil $flag cp ${cloudPath.escape} ${containerPath.escape}" } } - } def recoverRequesterPaysError(path: Path)(f: String => String): String = { val commandWithoutProject = f("") diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionLabels.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionLabels.scala index 09941f5b6ba..6e136215398 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionLabels.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionLabels.scala @@ -2,6 +2,7 @@ package cromwell.backend.google.pipelines.common.action object ActionLabels { object Key { + /** * Very short description of the action */ diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionUtils.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionUtils.scala index a30e27f48ca..b05971a13ad 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionUtils.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/action/ActionUtils.scala @@ -5,6 +5,7 @@ import org.apache.commons.text.StringEscapeUtils import net.ceedubs.ficus.Ficus._ object ActionUtils { + /** Image to use for ssh access. */ val sshImage = "gcr.io/cloud-genomics-pipelines/tools" @@ -72,7 +73,8 @@ object ActionUtils { */ private val backgroundActionTerminationGraceTime = 10 - val terminateAllBackgroundActionsCommand: String = s"kill -TERM -1 && sleep $backgroundActionTerminationGraceTime || true" + val terminateAllBackgroundActionsCommand: String = + s"kill -TERM -1 && sleep $backgroundActionTerminationGraceTime || true" def timestampedMessage(message: String): String = s"""printf '%s %s\\n' "$$(date -u '+%Y/%m/%d %H:%M:%S')" ${shellEscaped(message)}""" @@ -88,12 +90,12 @@ object ActionUtils { checkpointingStart: List[Action], checkpointingShutdown: List[Action], sshAccess: List[Action], - isBackground: Action => Boolean, - ): List[Action] = { + isBackground: Action => Boolean + ): List[Action] = { val toBeSortedActions = localization ++ userAction ++ memoryRetryAction ++ deLocalization - val sortedActions = toBeSortedActions.sortWith({ - case (action, _) => isBackground(action) - }) + val sortedActions = toBeSortedActions.sortWith { case (action, _) => + isBackground(action) + } sshAccess ++ containerSetup ++ monitoringSetup ++ checkpointingStart ++ sortedActions ++ checkpointingShutdown ++ monitoringShutdown } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiBatchHandler.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiBatchHandler.scala index c88d52e33c6..0dbb4e50466 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiBatchHandler.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiBatchHandler.scala @@ -9,5 +9,8 @@ import scala.util.Try trait PipelinesApiBatchHandler { def makeBatchRequest: BatchRequest - def enqueue[T <: PAPIApiRequest](papiApiRequest: T, batchRequest: BatchRequest, pollingManager: ActorRef): Future[Try[Unit]] + def enqueue[T <: PAPIApiRequest](papiApiRequest: T, + batchRequest: BatchRequest, + pollingManager: ActorRef + ): Future[Try[Unit]] } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiFactoryInterface.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiFactoryInterface.scala index fe5908fae9b..4e0ec2cba5b 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiFactoryInterface.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiFactoryInterface.scala @@ -16,7 +16,7 @@ abstract class PipelinesApiFactoryInterface { val httpCredentialsAdapter = new HttpCredentialsAdapter(credentials) build(httpCredentialsAdapter) } - + protected def build(httpRequestInitializer: HttpRequestInitializer): PipelinesApiRequestFactory def usesEncryptedDocker: Boolean diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestFactory.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestFactory.scala index 2ed2e50c6cf..71bc4203b62 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestFactory.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestFactory.scala @@ -31,9 +31,9 @@ object PipelinesApiRequestFactory { * Input parameters that are not strictly needed by the user's command but are Cromwell byproducts. */ case class DetritusInputParameters( - executionScriptInputParameter: PipelinesApiFileInput, - monitoringScriptInputParameter: Option[PipelinesApiFileInput] - ) { + executionScriptInputParameter: PipelinesApiFileInput, + monitoringScriptInputParameter: Option[PipelinesApiFileInput] + ) { def all: List[PipelinesApiFileInput] = List(executionScriptInputParameter) ++ monitoringScriptInputParameter } @@ -41,11 +41,12 @@ object PipelinesApiRequestFactory { * Output parameters that are not produced by the user's command but are Cromwell byproducts. */ case class DetritusOutputParameters( - monitoringScriptOutputParameter: Option[PipelinesApiFileOutput], - rcFileOutputParameter: PipelinesApiFileOutput, - memoryRetryRCFileOutputParameter: PipelinesApiFileOutput - ) { - def all: List[PipelinesApiFileOutput] = memoryRetryRCFileOutputParameter :: List(rcFileOutputParameter) ++ monitoringScriptOutputParameter + monitoringScriptOutputParameter: Option[PipelinesApiFileOutput], + rcFileOutputParameter: PipelinesApiFileOutput, + memoryRetryRCFileOutputParameter: PipelinesApiFileOutput + ) { + def all: List[PipelinesApiFileOutput] = + memoryRetryRCFileOutputParameter :: List(rcFileOutputParameter) ++ monitoringScriptOutputParameter } /** @@ -54,12 +55,12 @@ object PipelinesApiRequestFactory { * to treat them differently. */ case class InputOutputParameters( - detritusInputParameters: DetritusInputParameters, - jobInputParameters: List[PipelinesApiInput], - jobOutputParameters: List[PipelinesApiOutput], - detritusOutputParameters: DetritusOutputParameters, - literalInputParameters: List[PipelinesApiLiteralInput] - ) { + detritusInputParameters: DetritusInputParameters, + jobInputParameters: List[PipelinesApiInput], + jobOutputParameters: List[PipelinesApiOutput], + detritusOutputParameters: DetritusOutputParameters, + literalInputParameters: List[PipelinesApiLiteralInput] + ) { lazy val fileInputParameters: List[PipelinesApiInput] = jobInputParameters ++ detritusInputParameters.all lazy val fileOutputParameters: List[PipelinesApiOutput] = detritusOutputParameters.all ++ jobOutputParameters } @@ -92,7 +93,7 @@ object PipelinesApiRequestFactory { enableSshAccess: Boolean, vpcNetworkAndSubnetworkProjectLabels: Option[VpcAndSubnetworkProjectLabelValues], dockerImageCacheDiskOpt: Option[String] - ) { + ) { def literalInputs = inputOutputParameters.literalInputParameters def inputParameters = inputOutputParameters.fileInputParameters def outputParameters = inputOutputParameters.fileOutputParameters diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestHandler.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestHandler.scala index efae1a922bb..e5426b1c339 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestHandler.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestHandler.scala @@ -10,16 +10,19 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.Try trait PipelinesApiRequestHandler { - def initializeHttpRequest(batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration) - (httpRequest: HttpRequest): Unit = { - batchRequestTimeoutConfiguration.readTimeoutMillis foreach { - timeout => httpRequest.setReadTimeout(timeout.value) + def initializeHttpRequest( + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration + )(httpRequest: HttpRequest): Unit = { + batchRequestTimeoutConfiguration.readTimeoutMillis foreach { timeout => + httpRequest.setReadTimeout(timeout.value) } - batchRequestTimeoutConfiguration.connectTimeoutMillis foreach { - timeout => httpRequest.setConnectTimeout(timeout.value) + batchRequestTimeoutConfiguration.connectTimeoutMillis foreach { timeout => + httpRequest.setConnectTimeout(timeout.value) } } def makeBatchRequest: BatchRequest - def enqueue[T <: PAPIApiRequest](papiApiRequest: T, batchRequest: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] + def enqueue[T <: PAPIApiRequest](papiApiRequest: T, batchRequest: BatchRequest, pollingManager: ActorRef)(implicit + ec: ExecutionContext + ): Future[Try[Unit]] } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala index 81e3c2d9fda..f67b027f0c4 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala @@ -28,53 +28,62 @@ import scala.util.control.NoStackTrace /** * Holds a set of PAPI request until a PipelinesApiRequestActor pulls the work. */ -class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: Int Refined Positive, override val serviceRegistryActor: ActorRef) - (implicit batchHandler: PipelinesApiRequestHandler) extends Actor - with ActorLogging with PapiInstrumentation with CromwellInstrumentationScheduler with Timers { +class PipelinesApiRequestManager(val qps: Int Refined Positive, + requestWorkers: Int Refined Positive, + override val serviceRegistryActor: ActorRef +)(implicit batchHandler: PipelinesApiRequestHandler) + extends Actor + with ActorLogging + with PapiInstrumentation + with CromwellInstrumentationScheduler + with Timers { override val supervisorStrategy = SupervisorStrategy.stoppingStrategy private val maxRetries = 10 /* - * Context: the batch.execute() method throws an IOException("insufficient data written") in certain conditions. - * Here is what we know about it and how this attempts to address the issue. - * - * It was determined empirically that errors start to be thrown when the batch request approaches 15MB. - * Looking more closely at timing it appears that the exception takes almost exactly 60 seconds to be thrown - * from the batch.execute method, which suggests that this might be time related rather than byte size related and that - * the 15MB limit is just an artifact of how much data can be sent / received in 60 seconds by the client / server. - * - * In an attempt to provide a fix for this issue, the total size of the batch size is limited to 14MB, which is a rather - * arbitrary value only supported by local testing. - * - * Result of further investigation on the cause: - * IOException("insufficient data written") is being thrown because the http request attempts to close() its output stream. - * The close() method throws the "insufficient data written" exception because it still had data to send. - * The close() method was called as part of a finally, because an exception was thrown earlier when attempting to write to the - * stream. This exception is however swallowed by the one thrown in the close(). - * This commit https://github.com/google/google-http-java-client/pull/333 fixes the swallowing issue so the original - * exception is thrown instead: IOException(“Error writing request body to server”). - * Tracing back why this exception is being thrown, it appears that at some point the socket gets closed externally - * (maybe the google server closes it ?) - * which results in a SocketException("broken pipe") being thrown and eventually bubbles up to the IOExceptions above. - * - * see sun.net.www.protocol.http.HttpURLConnection - * and com.google.api.client.http.javanet.NetHttpRequest - * - */ + * Context: the batch.execute() method throws an IOException("insufficient data written") in certain conditions. + * Here is what we know about it and how this attempts to address the issue. + * + * It was determined empirically that errors start to be thrown when the batch request approaches 15MB. + * Looking more closely at timing it appears that the exception takes almost exactly 60 seconds to be thrown + * from the batch.execute method, which suggests that this might be time related rather than byte size related and that + * the 15MB limit is just an artifact of how much data can be sent / received in 60 seconds by the client / server. + * + * In an attempt to provide a fix for this issue, the total size of the batch size is limited to 14MB, which is a rather + * arbitrary value only supported by local testing. + * + * Result of further investigation on the cause: + * IOException("insufficient data written") is being thrown because the http request attempts to close() its output stream. + * The close() method throws the "insufficient data written" exception because it still had data to send. + * The close() method was called as part of a finally, because an exception was thrown earlier when attempting to write to the + * stream. This exception is however swallowed by the one thrown in the close(). + * This commit https://github.com/google/google-http-java-client/pull/333 fixes the swallowing issue so the original + * exception is thrown instead: IOException(“Error writing request body to server”). + * Tracing back why this exception is being thrown, it appears that at some point the socket gets closed externally + * (maybe the google server closes it ?) + * which results in a SocketException("broken pipe") being thrown and eventually bubbles up to the IOExceptions above. + * + * see sun.net.www.protocol.http.HttpURLConnection + * and com.google.api.client.http.javanet.NetHttpRequest + * + */ private val maxBatchRequestSize: Long = 14L * 1024L * 1024L private val requestTooLargeException = new UserPAPIApiException( - cause = new IllegalArgumentException(s"The task run request has exceeded the maximum PAPI request size ($maxBatchRequestSize bytes)."), - helpfulHint = Option("If you have a task with a very large number of inputs and / or outputs in your workflow you should try to reduce it. " + - "Depending on your case you could: 1) Zip your input files together and unzip them in the command. 2) Use a file of file names " + - "and localize the files yourself.") + cause = new IllegalArgumentException( + s"The task run request has exceeded the maximum PAPI request size ($maxBatchRequestSize bytes)." + ), + helpfulHint = Option( + "If you have a task with a very large number of inputs and / or outputs in your workflow you should try to reduce it. " + + "Depending on your case you could: 1) Zip your input files together and unzip them in the command. 2) Use a file of file names " + + "and localize the files yourself." + ) ) private[api] lazy val nbWorkers = requestWorkers.value private lazy val workerBatchInterval = determineBatchInterval(qps) * nbWorkers.toLong - // workQueue is protected for the unit tests, not intended to be generally overridden protected[api] var workQueue: Queue[PAPIApiRequest] = Queue.empty private var workInProgress: Map[ActorRef, PipelinesApiWorkBatch] = Map.empty @@ -82,7 +91,9 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: // the scheduled delay, unless the workflow is aborted in the meantime in which case they will be cancelled. private var queriesWaitingForRetry: Set[PAPIApiRequest] = Set.empty[PAPIApiRequest] - private def papiRequestWorkerProps = PipelinesApiRequestWorker.props(self, workerBatchInterval, serviceRegistryActor).withMailbox(Mailbox.PriorityMailbox) + private def papiRequestWorkerProps = PipelinesApiRequestWorker + .props(self, workerBatchInterval, serviceRegistryActor) + .withMailbox(Mailbox.PriorityMailbox) // statusPollers is protected for the unit tests, not intended to be generally overridden protected[api] var statusPollers: Vector[ActorRef] = Vector.empty @@ -100,7 +111,9 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: val newLoad = if (workQueue.size > LoadConfig.PAPIThreshold) HighLoad else NormalLoad if (previousLoad == NormalLoad && newLoad == HighLoad) - log.warning(s"PAPI Request Manager transitioned to HighLoad with queue size ${workQueue.size} exceeding limit of ${LoadConfig.PAPIThreshold}") + log.warning( + s"PAPI Request Manager transitioned to HighLoad with queue size ${workQueue.size} exceeding limit of ${LoadConfig.PAPIThreshold}" + ) else if (previousLoad == HighLoad && newLoad == NormalLoad) log.info("PAPI Request Manager transitioned back to NormaLoad") @@ -122,30 +135,35 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: case PipelinesWorkerRequestWork(maxBatchSize) => handleWorkerAskingForWork(sender(), maxBatchSize) case failure: PAPIApiRequestFailed => handleQueryFailure(failure) - case Terminated(actorRef) => onFailure(actorRef, new RuntimeException("PipelinesApiRequestHandler actor termination caught by manager") with NoStackTrace) - case other => log.error(s"Unexpected message from {} to ${this.getClass.getSimpleName}: {}", sender().path.name, other) + case Terminated(actorRef) => + onFailure(actorRef, + new RuntimeException("PipelinesApiRequestHandler actor termination caught by manager") with NoStackTrace + ) + case other => + log.error(s"Unexpected message from {} to ${this.getClass.getSimpleName}: {}", sender().path.name, other) } override def receive = instrumentationReceive(monitorQueueSize _).orElse(requestManagerReceive) private def abort(workflowId: WorkflowId) = { - def aborted(query: PAPIRunCreationRequest) = query.requester ! PipelinesApiRunCreationQueryFailed(query, JobAbortedException) + def aborted(query: PAPIRunCreationRequest) = + query.requester ! PipelinesApiRunCreationQueryFailed(query, JobAbortedException) - workQueue = workQueue.filterNot({ + workQueue = workQueue.filterNot { case query: PAPIRunCreationRequest if query.workflowId == workflowId => aborted(query) true case _ => false - }) + } - queriesWaitingForRetry = queriesWaitingForRetry.filterNot({ + queriesWaitingForRetry = queriesWaitingForRetry.filterNot { case query: PAPIRunCreationRequest if query.workflowId == workflowId => timers.cancel(query) queriesWaitingForRetry = queriesWaitingForRetry - query aborted(query) true case _ => false - }) + } } private def handleQueryFailure(failure: PAPIApiRequestFailed) = { @@ -155,7 +173,9 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: // NB: we don't count user errors towards the PAPI failed queries count in our metrics: if (!userError) { failedQuery(failure) - log.warning(s"PAPI request workers tried and failed ${failure.query.failedAttempts} times to make ${failure.query.getClass.getSimpleName} request to PAPI") + log.warning( + s"PAPI request workers tried and failed ${failure.query.failedAttempts} times to make ${failure.query.getClass.getSimpleName} request to PAPI" + ) } failure.query.requester ! failure @@ -169,7 +189,7 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: timers.startSingleTimer(nextRequest, nextRequest, delay) } - if (userError || failure.query.failedAttempts >= maxRetries ) { + if (userError || failure.query.failedAttempts >= maxRetries) { failQuery() } else { retryQuery() @@ -177,7 +197,12 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: } private def handleWorkerAskingForWork(papiRequestWorkerActor: ActorRef, maxBatchSize: Int) = { - log.debug("Request for PAPI requests received from {} (max batch size is {}, current queue size is {})", papiRequestWorkerActor.path.name, maxBatchSize, workQueue.size) + log.debug( + "Request for PAPI requests received from {} (max batch size is {}, current queue size is {})", + papiRequestWorkerActor.path.name, + maxBatchSize, + workQueue.size + ) workInProgress -= papiRequestWorkerActor val beheaded = beheadWorkQueue(maxBatchSize) @@ -196,10 +221,13 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: } // Intentionally not final, this runs afoul of SI-4440 (I believe) - private case class BeheadedWorkQueue(workToDo: Option[NonEmptyList[PAPIApiRequest]], newWorkQueue: Queue[PAPIApiRequest]) + private case class BeheadedWorkQueue(workToDo: Option[NonEmptyList[PAPIApiRequest]], + newWorkQueue: Queue[PAPIApiRequest] + ) private def beheadWorkQueue(maxBatchSize: Int): BeheadedWorkQueue = { import common.collections.EnhancedCollections._ - val DeQueued(head, tail) = workQueue.takeWhileWeighted(maxBatchRequestSize, _.contentLength, Option(maxBatchSize), strict = true) + val DeQueued(head, tail) = + workQueue.takeWhileWeighted(maxBatchRequestSize, _.contentLength, Option(maxBatchSize), strict = true) head.toList match { case h :: t => BeheadedWorkQueue(Option(NonEmptyList(h, t)), tail) @@ -240,7 +268,12 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: s"Exception details: $throwable" ) case None => - log.error(throwable, "The PAPI request worker '{}' terminated ({}). The request manager did not know what work the actor was doing so cannot resubmit any requests or inform any requesters of failures. This should never happen - please report this as a programmer error.", terminee.path.name, throwable.getMessage) + log.error( + throwable, + "The PAPI request worker '{}' terminated ({}). The request manager did not know what work the actor was doing so cannot resubmit any requests or inform any requesters of failures. This should never happen - please report this as a programmer error.", + terminee.path.name, + throwable.getMessage + ) } resetWorker(terminee) @@ -249,7 +282,10 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: private def resetWorker(worker: ActorRef): Unit = { if (!statusPollers.contains(worker)) { - log.warning("PAPI request worker {} is being reset but was never registered in the pool of workers. This should never happen - please report this as a programmer error.", worker.path.name) + log.warning( + "PAPI request worker {} is being reset but was never registered in the pool of workers. This should never happen - please report this as a programmer error.", + worker.path.name + ) } val stillGoodWorkers = statusPollers.filterNot(_ == worker) @@ -257,12 +293,16 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: statusPollers = stillGoodWorkers :+ newWorker - log.info("PAPI request worker {} has been removed and replaced by {} in the pool of {} workers", worker.path.name, newWorker.path.name, statusPollers.size) + log.info("PAPI request worker {} has been removed and replaced by {} in the pool of {} workers", + worker.path.name, + newWorker.path.name, + statusPollers.size + ) } private[api] def resetAllWorkers() = { log.info("'resetAllWorkers()' called to fill vector with {} new workers", nbWorkers) - val result = Vector.fill(nbWorkers) { makeAndWatchWorkerActor() } + val result = Vector.fill(nbWorkers)(makeAndWatchWorkerActor()) statusPollers = result } @@ -275,7 +315,9 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: // Separate method to allow overriding in tests: private[api] def makeWorkerActor(): ActorRef = { val result = context.actorOf(papiRequestWorkerProps, s"PAPIQueryWorker-${UUID.randomUUID()}") - log.info(s"Request manager ${self.path.name} created new PAPI request worker ${result.path.name} with batch interval of ${workerBatchInterval}") + log.info( + s"Request manager ${self.path.name} created new PAPI request worker ${result.path.name} with batch interval of ${workerBatchInterval}" + ) result } } @@ -284,8 +326,10 @@ object PipelinesApiRequestManager { case object ResetAllRequestWorkers case object QueueMonitoringTimerKey case object QueueMonitoringTimerAction extends ControlMessage - def props(qps: Int Refined Positive, requestWorkers: Int Refined Positive, serviceRegistryActor: ActorRef) - (implicit batchHandler: PipelinesApiRequestHandler): Props = Props(new PipelinesApiRequestManager(qps, requestWorkers, serviceRegistryActor)).withDispatcher(BackendDispatcher) + def props(qps: Int Refined Positive, requestWorkers: Int Refined Positive, serviceRegistryActor: ActorRef)(implicit + batchHandler: PipelinesApiRequestHandler + ): Props = + Props(new PipelinesApiRequestManager(qps, requestWorkers, serviceRegistryActor)).withDispatcher(BackendDispatcher) /** * Given the Genomics API queries per 100 seconds and given MaxBatchSize will determine a batch interval which @@ -320,7 +364,8 @@ object PipelinesApiRequestManager { httpRequest: HttpRequest, jobId: StandardAsyncJob, failedAttempts: Int = 0, - backoff: Backoff = PAPIApiRequest.backoff) extends PAPIApiRequest { + backoff: Backoff = PAPIApiRequest.backoff + ) extends PAPIApiRequest { override def withFailedAttempt = this.copy(failedAttempts = failedAttempts + 1, backoff = backoff.next) } @@ -328,7 +373,8 @@ object PipelinesApiRequestManager { requester: ActorRef, httpRequest: HttpRequest, failedAttempts: Int = 0, - backoff: Backoff = PAPIApiRequest.backoff) extends PAPIApiRequest { + backoff: Backoff = PAPIApiRequest.backoff + ) extends PAPIApiRequest { override def withFailedAttempt = this.copy(failedAttempts = failedAttempts + 1, backoff = backoff.next) } @@ -337,7 +383,8 @@ object PipelinesApiRequestManager { httpRequest: HttpRequest, jobId: StandardAsyncJob, failedAttempts: Int = 0, - backoff: Backoff = PAPIApiRequest.backoff) extends PAPIApiRequest { + backoff: Backoff = PAPIApiRequest.backoff + ) extends PAPIApiRequest { override def withFailedAttempt = this.copy(failedAttempts = failedAttempts + 1, backoff = backoff.next) } @@ -346,16 +393,21 @@ object PipelinesApiRequestManager { val cause: PAPIApiException } - final case class PipelinesApiStatusQueryFailed(query: PAPIApiRequest, cause: PAPIApiException) extends PAPIApiRequestFailed - final case class PipelinesApiRunCreationQueryFailed(query: PAPIApiRequest, cause: PAPIApiException) extends PAPIApiRequestFailed - final case class PipelinesApiAbortQueryFailed(query: PAPIApiRequest, cause: PAPIApiException) extends PAPIApiRequestFailed + final case class PipelinesApiStatusQueryFailed(query: PAPIApiRequest, cause: PAPIApiException) + extends PAPIApiRequestFailed + final case class PipelinesApiRunCreationQueryFailed(query: PAPIApiRequest, cause: PAPIApiException) + extends PAPIApiRequestFailed + final case class PipelinesApiAbortQueryFailed(query: PAPIApiRequest, cause: PAPIApiException) + extends PAPIApiRequestFailed - private[api] final case class PipelinesApiWorkBatch(workBatch: NonEmptyList[PAPIApiRequest]) + final private[api] case class PipelinesApiWorkBatch(workBatch: NonEmptyList[PAPIApiRequest]) private[api] case object NoWorkToDo - private[api] final case class PipelinesWorkerRequestWork(maxBatchSize: Int) extends ControlMessage + final private[api] case class PipelinesWorkerRequestWork(maxBatchSize: Int) extends ControlMessage - final case class GoogleJsonException(e: GoogleJsonError, responseHeaders: HttpHeaders) extends IOException with CromwellFatalExceptionMarker { + final case class GoogleJsonException(e: GoogleJsonError, responseHeaders: HttpHeaders) + extends IOException + with CromwellFatalExceptionMarker { override def getMessage: String = e.getMessage } @@ -365,10 +417,12 @@ object PipelinesApiRequestManager { } class SystemPAPIApiException(val cause: Throwable) extends PAPIApiException { - override def getMessage: String = s"Unable to complete PAPI request due to system or connection error (${cause.getMessage})" + override def getMessage: String = + s"Unable to complete PAPI request due to system or connection error (${cause.getMessage})" } final class UserPAPIApiException(val cause: Throwable, helpfulHint: Option[String]) extends PAPIApiException { - override def getMessage: String = s"Unable to complete PAPI request due to a problem with the request (${cause.getMessage}). ${helpfulHint.getOrElse("")}" + override def getMessage: String = + s"Unable to complete PAPI request due to a problem with the request (${cause.getMessage}). ${helpfulHint.getOrElse("")}" } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorker.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorker.scala index 2ccadf54ed5..694dc68390e 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorker.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorker.scala @@ -17,9 +17,13 @@ import scala.util.{Failure, Success, Try} /** * Sends batched requests to JES as a worker to the JesApiQueryManager */ -class PipelinesApiRequestWorker(val pollingManager: ActorRef, val batchInterval: FiniteDuration, override val serviceRegistryActor: ActorRef) - (implicit val batchHandler: PipelinesApiRequestHandler) - extends Actor with ActorLogging with CromwellInstrumentationActor { +class PipelinesApiRequestWorker(val pollingManager: ActorRef, + val batchInterval: FiniteDuration, + override val serviceRegistryActor: ActorRef +)(implicit val batchHandler: PipelinesApiRequestHandler) + extends Actor + with ActorLogging + with CromwellInstrumentationActor { self ! NoWorkToDo // Starts the check-for-work cycle when the actor is fully initialized. @@ -51,18 +55,18 @@ class PipelinesApiRequestWorker(val pollingManager: ActorRef, val batchInterval: runBatch(batch) Future.sequence(batchFutures.toList) } - + // These are separate functions so that the tests can hook in and replace the JES-side stuff private[api] def createBatch(): BatchRequest = batch - private[api] def runBatch(batch: BatchRequest): Unit = { - try { + private[api] def runBatch(batch: BatchRequest): Unit = + try if (batch.size() > 0) batch.execute() - } catch { + catch { case e: java.io.IOException => - val msg = s"A batch of PAPI status requests failed. The request manager will retry automatically up to 10 times. The error was: ${e.getMessage}" + val msg = + s"A batch of PAPI status requests failed. The request manager will retry automatically up to 10 times. The error was: ${e.getMessage}" throw new Exception(msg, e.getCause) with NoStackTrace } - } // TODO: FSMify this actor? private def interstitialRecombobulation: PartialFunction[Try[List[Try[Unit]]], Unit] = { @@ -71,11 +75,17 @@ class PipelinesApiRequestWorker(val pollingManager: ActorRef, val batchInterval: scheduleCheckForWork() case Success(someFailures) => val errors = someFailures collect { case Failure(t) => t.getMessage } - log.warning("PAPI request worker had {} failures making {} requests: {}", errors.size, someFailures.size, errors.mkString(System.lineSeparator, "," + System.lineSeparator, "")) + log.warning( + "PAPI request worker had {} failures making {} requests: {}", + errors.size, + someFailures.size, + errors.mkString(System.lineSeparator, "," + System.lineSeparator, "") + ) scheduleCheckForWork() case Failure(t) => // NB: Should be impossible since we only ever do completionPromise.trySuccess() - val msg = "Programmer Error: Completion promise unexpectedly set to Failure: {}. Don't do this, otherwise the Future.sequence is short-circuited on the first failure" + val msg = + "Programmer Error: Completion promise unexpectedly set to Failure: {}. Don't do this, otherwise the Future.sequence is short-circuited on the first failure" log.error(msg, t.getMessage) scheduleCheckForWork() } @@ -85,16 +95,19 @@ class PipelinesApiRequestWorker(val pollingManager: ActorRef, val batchInterval: * Warning: Only use this from inside a receive method. */ private def scheduleCheckForWork(): Unit = { - context.system.scheduler.scheduleOnce(batchInterval) { pollingManager ! PipelinesApiRequestManager.PipelinesWorkerRequestWork(MaxBatchSize) } + context.system.scheduler.scheduleOnce(batchInterval) { + pollingManager ! PipelinesApiRequestManager.PipelinesWorkerRequestWork(MaxBatchSize) + } () } } object PipelinesApiRequestWorker { - def props(pollingManager: ActorRef, batchInterval: FiniteDuration, serviceRegistryActor: ActorRef) - (implicit batchHandler: PipelinesApiRequestHandler) = { - Props(new PipelinesApiRequestWorker(pollingManager, batchInterval, serviceRegistryActor)).withDispatcher(BackendDispatcher) - } + def props(pollingManager: ActorRef, batchInterval: FiniteDuration, serviceRegistryActor: ActorRef)(implicit + batchHandler: PipelinesApiRequestHandler + ) = + Props(new PipelinesApiRequestWorker(pollingManager, batchInterval, serviceRegistryActor)) + .withDispatcher(BackendDispatcher) // The Batch API limits us to 100 at a time val MaxBatchSize = 100 diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/RunStatus.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/RunStatus.scala index 51064deff21..933a6139344 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/RunStatus.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/RunStatus.scala @@ -34,7 +34,8 @@ object RunStatus { case class Success(eventList: Seq[ExecutionEvent], machineType: Option[String], zone: Option[String], - instanceName: Option[String]) extends TerminalRunStatus { + instanceName: Option[String] + ) extends TerminalRunStatus { override def toString = "Success" } @@ -45,19 +46,36 @@ object RunStatus { machineType: Option[String], zone: Option[String], instanceName: Option[String], - wasPreemptible: Boolean): UnsuccessfulRunStatus = { + wasPreemptible: Boolean + ): UnsuccessfulRunStatus = { val jesCode: Option[Int] = errorMessage flatMap { em => Try(em.substring(0, em.indexOf(':')).toInt).toOption } // Because of Reasons, sometimes errors which aren't indicative of preemptions are treated as preemptions. val unsuccessfulStatusBuilder = errorCode match { - case Status.ABORTED if jesCode.contains(PipelinesApiAsyncBackendJobExecutionActor.JesPreemption) => Preempted.apply _ - case Status.ABORTED if jesCode.contains(PipelinesApiAsyncBackendJobExecutionActor.JesUnexpectedTermination) && wasPreemptible => Preempted.apply _ - case Status.ABORTED if errorMessage.exists(_.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style)) => Preempted.apply _ - case Status.UNKNOWN if errorMessage.exists(_.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedToStartDueToPreemptionSubstring)) => Preempted.apply _ + case Status.ABORTED if jesCode.contains(PipelinesApiAsyncBackendJobExecutionActor.JesPreemption) => + Preempted.apply _ + case Status.ABORTED + if jesCode.contains(PipelinesApiAsyncBackendJobExecutionActor.JesUnexpectedTermination) && wasPreemptible => + Preempted.apply _ + case Status.ABORTED + if errorMessage.exists(_.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style)) => + Preempted.apply _ + case Status.UNKNOWN + if errorMessage.exists( + _.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedToStartDueToPreemptionSubstring) + ) => + Preempted.apply _ case Status.CANCELLED => Cancelled.apply _ case _ => Failed.apply _ } - unsuccessfulStatusBuilder.apply(errorCode, jesCode, errorMessage.toList, eventList, machineType, zone, instanceName) + unsuccessfulStatusBuilder.apply(errorCode, + jesCode, + errorMessage.toList, + eventList, + machineType, + zone, + instanceName + ) } } @@ -67,7 +85,8 @@ object RunStatus { eventList: Seq[ExecutionEvent], machineType: Option[String], zone: Option[String], - instanceName: Option[String]) extends UnsuccessfulRunStatus { + instanceName: Option[String] + ) extends UnsuccessfulRunStatus { override def toString = "Failed" } @@ -75,12 +94,13 @@ object RunStatus { * What Cromwell calls Aborted, PAPI calls Cancelled. This means the job was "cancelled" by the user */ final case class Cancelled(errorCode: Status, - jesCode: Option[Int], - errorMessages: List[String], - eventList: Seq[ExecutionEvent], - machineType: Option[String], - zone: Option[String], - instanceName: Option[String]) extends UnsuccessfulRunStatus { + jesCode: Option[Int], + errorMessages: List[String], + eventList: Seq[ExecutionEvent], + machineType: Option[String], + zone: Option[String], + instanceName: Option[String] + ) extends UnsuccessfulRunStatus { override def toString = "Cancelled" } @@ -90,7 +110,8 @@ object RunStatus { eventList: Seq[ExecutionEvent], machineType: Option[String], zone: Option[String], - instanceName: Option[String]) extends UnsuccessfulRunStatus { + instanceName: Option[String] + ) extends UnsuccessfulRunStatus { override def toString = "Preempted" } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiAbortClient.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiAbortClient.scala index 0654f2699c5..90fcd293f07 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiAbortClient.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiAbortClient.scala @@ -3,8 +3,14 @@ package cromwell.backend.google.pipelines.common.api.clients import akka.actor.{Actor, ActorLogging, ActorRef} import cromwell.backend.google.pipelines.common.PapiInstrumentation import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory -import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{PAPIAbortRequest, PipelinesApiAbortQueryFailed} -import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiAbortClient.{PAPIAbortRequestSuccessful, PAPIOperationIsAlreadyTerminal} +import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{ + PAPIAbortRequest, + PipelinesApiAbortQueryFailed +} +import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiAbortClient.{ + PAPIAbortRequestSuccessful, + PAPIOperationIsAlreadyTerminal +} import cromwell.backend.standard.StandardAsyncJob import cromwell.core.WorkflowId import cromwell.core.logging.JobLogging @@ -23,9 +29,8 @@ trait PipelinesApiAbortClient { this: Actor with ActorLogging with JobLogging wi val papiApiActor: ActorRef val requestFactory: PipelinesApiRequestFactory - def abortJob(jobId: StandardAsyncJob) = { + def abortJob(jobId: StandardAsyncJob) = papiApiActor ! PAPIAbortRequest(workflowId, self, requestFactory.cancelRequest(jobId), jobId) - } def abortActorClientReceive: Actor.Receive = { case PAPIAbortRequestSuccessful(jobId) => diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiRunCreationClient.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiRunCreationClient.scala index 0ff1920b732..ef585129f3b 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiRunCreationClient.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiRunCreationClient.scala @@ -3,7 +3,10 @@ package cromwell.backend.google.pipelines.common.api.clients import akka.actor.{Actor, ActorLogging, ActorRef} import cromwell.backend.google.pipelines.common.PapiInstrumentation import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters -import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{PipelinesApiRunCreationQueryFailed, SystemPAPIApiException} +import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{ + PipelinesApiRunCreationQueryFailed, + SystemPAPIApiException +} import cromwell.backend.google.pipelines.common.api.{PipelinesApiRequestFactory, PipelinesApiRequestManager} import cromwell.backend.standard.StandardAsyncJob import cromwell.core.WorkflowId @@ -18,7 +21,10 @@ object PipelinesApiRunCreationClient { * Exception used to represent the fact that a job was aborted before a creation attempt was made. * Meaning it was in the queue when the abort request was made, so it was just removed from the queue. */ - case object JobAbortedException extends SystemPAPIApiException(new Exception("The job was removed from the queue before a PAPI creation request was made")) + case object JobAbortedException + extends SystemPAPIApiException( + new Exception("The job was removed from the queue before a PAPI creation request was made") + ) } /** @@ -45,15 +51,21 @@ trait PipelinesApiRunCreationClient { this: Actor with ActorLogging with PapiIns runCreationClientPromise = None } - def runPipeline(workflowId: WorkflowId, createPipelineParameters: CreatePipelineParameters, jobLogger: JobLogger): Future[StandardAsyncJob] = { + def runPipeline(workflowId: WorkflowId, + createPipelineParameters: CreatePipelineParameters, + jobLogger: JobLogger + ): Future[StandardAsyncJob] = runCreationClientPromise match { case Some(p) => p.future case None => - papiApiActor ! PipelinesApiRequestManager.PAPIRunCreationRequest(workflowId, self, requestFactory.runRequest(createPipelineParameters, jobLogger)) + papiApiActor ! PipelinesApiRequestManager.PAPIRunCreationRequest( + workflowId, + self, + requestFactory.runRequest(createPipelineParameters, jobLogger) + ) val newPromise = Promise[StandardAsyncJob]() runCreationClientPromise = Option(newPromise) newPromise.future } - } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiStatusRequestClient.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiStatusRequestClient.scala index cb6cf2f9940..1f266484489 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiStatusRequestClient.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/clients/PipelinesApiStatusRequestClient.scala @@ -38,14 +38,17 @@ trait PipelinesApiStatusRequestClient { this: Actor with ActorLogging with PapiI pollingActorClientPromise = None } - def pollStatus(workflowId: WorkflowId, jobId: StandardAsyncJob): Future[RunStatus] = { + def pollStatus(workflowId: WorkflowId, jobId: StandardAsyncJob): Future[RunStatus] = pollingActorClientPromise match { case Some(p) => p.future case None => - papiApiActor ! PipelinesApiRequestManager.PAPIStatusPollRequest(workflowId, self, requestFactory.getRequest(jobId), jobId) + papiApiActor ! PipelinesApiRequestManager.PAPIStatusPollRequest(workflowId, + self, + requestFactory.getRequest(jobId), + jobId + ) val newPromise = Promise[RunStatus]() pollingActorClientPromise = Option(newPromise) newPromise.future } - } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/authentication/PipelinesApiVMAuthentication.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/authentication/PipelinesApiVMAuthentication.scala index 583ac380851..c00e2a6c16a 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/authentication/PipelinesApiVMAuthentication.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/authentication/PipelinesApiVMAuthentication.scala @@ -7,6 +7,7 @@ import common.validation.Validation._ import cromwell.cloudsupport.gcp.GoogleConfiguration import cromwell.core.DockerCredentials import spray.json.{JsString, JsValue} + /** * Interface for Authentication information that can be included as a json object in the file uploaded to GCS * upon workflow creation and used in the VM. @@ -15,7 +16,7 @@ sealed trait PipelinesApiAuthObject { def context: String def map: Map[String, JsValue] - def toMap: Map[String, Map[String, JsValue]] = Map(context -> map) + def toMap: Map[String, Map[String, JsValue]] = Map(context -> map) } object PipelinesApiDockerCredentials { @@ -26,7 +27,9 @@ object PipelinesApiDockerCredentials { case None => ().validNel // fine case _ => for { - authName <- dockerCredentials.authName.toErrorOr("KMS Encryption key defined for private Docker but no auth specified") + authName <- dockerCredentials.authName.toErrorOr( + "KMS Encryption key defined for private Docker but no auth specified" + ) _ <- googleConfig.auth(authName) } yield () } @@ -35,10 +38,10 @@ object PipelinesApiDockerCredentials { case Invalid(errors) => throw new RuntimeException(errors.toList.mkString(", ")) case Valid(_) => - new PipelinesApiDockerCredentials( - token = dockerCredentials.token, - keyName = dockerCredentials.keyName, - authName = dockerCredentials.authName) + new PipelinesApiDockerCredentials(token = dockerCredentials.token, + keyName = dockerCredentials.keyName, + authName = dockerCredentials.authName + ) } } } @@ -48,8 +51,9 @@ object PipelinesApiDockerCredentials { */ case class PipelinesApiDockerCredentials(override val token: String, override val keyName: Option[String], - override val authName: Option[String]) - extends DockerCredentials(token = token, keyName = keyName, authName = authName) with PipelinesApiAuthObject { + override val authName: Option[String] +) extends DockerCredentials(token = token, keyName = keyName, authName = authName) + with PipelinesApiAuthObject { override val context = "docker" override val map = Map( diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActor.scala index 90d9fd88d2d..55f2ab0a278 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActor.scala @@ -16,54 +16,62 @@ import wom.values.WomFile import scala.language.postfixOps import scala.util.Try -class PipelinesApiBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams) { +class PipelinesApiBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) + extends StandardCacheHitCopyingActor(standardParams) { override protected val commandBuilder: GcsBatchCommandBuilder.type = GcsBatchCommandBuilder private val cachingStrategy = BackendInitializationData .as[PipelinesApiBackendInitializationData](standardParams.backendInitializationDataOption) - .papiConfiguration.papiAttributes.cacheHitDuplicationStrategy - + .papiConfiguration + .papiAttributes + .cacheHitDuplicationStrategy + override def processSimpletons(womValueSimpletons: Seq[WomValueSimpleton], - sourceCallRootPath: Path, - ): Try[(CallOutputs, Set[IoCommand[_]])] = + sourceCallRootPath: Path + ): Try[(CallOutputs, Set[IoCommand[_]])] = cachingStrategy match { - case CopyCachedOutputs => super.processSimpletons(womValueSimpletons, sourceCallRootPath) - case UseOriginalCachedOutputs => - val touchCommands: Seq[Try[IoTouchCommand]] = womValueSimpletons collect { - case WomValueSimpleton(_, wdlFile: WomFile) => getPath(wdlFile.value) flatMap GcsBatchCommandBuilder.touchCommand - } - - TryUtil.sequence(touchCommands) map { - WomValueBuilder.toJobOutputs(jobDescriptor.taskCall.outputPorts, womValueSimpletons) -> _.toSet - } - } + case CopyCachedOutputs => super.processSimpletons(womValueSimpletons, sourceCallRootPath) + case UseOriginalCachedOutputs => + val touchCommands: Seq[Try[IoTouchCommand]] = womValueSimpletons collect { + case WomValueSimpleton(_, wdlFile: WomFile) => + getPath(wdlFile.value) flatMap GcsBatchCommandBuilder.touchCommand + } + + TryUtil.sequence(touchCommands) map { + WomValueBuilder.toJobOutputs(jobDescriptor.taskCall.outputPorts, womValueSimpletons) -> _.toSet + } + } - override def extractBlacklistPrefix(path: String): Option[String] = Option(path.stripPrefix("gs://").takeWhile(_ != '/')) + override def extractBlacklistPrefix(path: String): Option[String] = Option( + path.stripPrefix("gs://").takeWhile(_ != '/') + ) - override def processDetritus(sourceJobDetritusFiles: Map[String, String] - ): Try[(Map[String, Path], Set[IoCommand[_]])] = + override def processDetritus( + sourceJobDetritusFiles: Map[String, String] + ): Try[(Map[String, Path], Set[IoCommand[_]])] = cachingStrategy match { - case CopyCachedOutputs => super.processDetritus(sourceJobDetritusFiles) - case UseOriginalCachedOutputs => - // apply getPath on each detritus string file - val detritusAsPaths = detritusFileKeys(sourceJobDetritusFiles).toSeq map { key => - key -> getPath(sourceJobDetritusFiles(key)) - } toMap + case CopyCachedOutputs => super.processDetritus(sourceJobDetritusFiles) + case UseOriginalCachedOutputs => + // apply getPath on each detritus string file + val detritusAsPaths = detritusFileKeys(sourceJobDetritusFiles).toSeq map { key => + key -> getPath(sourceJobDetritusFiles(key)) + } toMap - // Don't forget to re-add the CallRootPathKey that has been filtered out by detritusFileKeys - TryUtil.sequenceMap(detritusAsPaths, "Failed to make paths out of job detritus") flatMap { newDetritus => - Try { - // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - (newDetritus + (JobPaths.CallRootPathKey -> destinationCallRootPath)) -> - newDetritus.values.map(GcsBatchCommandBuilder.touchCommand(_).get).toSet + // Don't forget to re-add the CallRootPathKey that has been filtered out by detritusFileKeys + TryUtil.sequenceMap(detritusAsPaths, "Failed to make paths out of job detritus") flatMap { newDetritus => + Try { + // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap + (newDetritus + (JobPaths.CallRootPathKey -> destinationCallRootPath)) -> + newDetritus.values.map(GcsBatchCommandBuilder.touchCommand(_).get).toSet + } } - } - } + } override protected def additionalIoCommands(sourceCallRootPath: Path, originalSimpletons: Seq[WomValueSimpleton], newOutputs: CallOutputs, - originalDetritus: Map[String, String], - newDetritus: Map[String, Path]): Try[List[Set[IoCommand[_]]]] = Try { + originalDetritus: Map[String, String], + newDetritus: Map[String, Path] + ): Try[List[Set[IoCommand[_]]]] = Try { cachingStrategy match { case UseOriginalCachedOutputs => val content = @@ -74,13 +82,17 @@ class PipelinesApiBackendCacheHitCopyingActor(standardParams: StandardCacheHitCo """.stripMargin // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - List(Set( - GcsBatchCommandBuilder.writeCommand( - path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", - content = content, - options = Seq(CloudStorageOptions.withMimeType("text/plain")), - ).get - )) + List( + Set( + GcsBatchCommandBuilder + .writeCommand( + path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", + content = content, + options = Seq(CloudStorageOptions.withMimeType("text/plain")) + ) + .get + ) + ) case CopyCachedOutputs => List.empty } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendFileHashingActor.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendFileHashingActor.scala index 030b8cf8a70..e275a322ea4 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendFileHashingActor.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendFileHashingActor.scala @@ -3,6 +3,7 @@ package cromwell.backend.google.pipelines.common.callcaching import cromwell.backend.standard.callcaching.{StandardFileHashingActor, StandardFileHashingActorParams} import cromwell.filesystems.gcs.batch.GcsBatchCommandBuilder -class PipelinesApiBackendFileHashingActor(standardParams: StandardFileHashingActorParams) extends StandardFileHashingActor(standardParams) { +class PipelinesApiBackendFileHashingActor(standardParams: StandardFileHashingActorParams) + extends StandardFileHashingActor(standardParams) { override val ioCommandBuilder = GcsBatchCommandBuilder } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/InvalidGcsPathsInManifestFileException.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/InvalidGcsPathsInManifestFileException.scala index 982caa9f80a..14344a2a45a 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/InvalidGcsPathsInManifestFileException.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/InvalidGcsPathsInManifestFileException.scala @@ -3,5 +3,6 @@ package cromwell.backend.google.pipelines.common.errors import scala.util.control.NoStackTrace class InvalidGcsPathsInManifestFileException(paths: List[String]) extends Exception with NoStackTrace { - override def getMessage: String = s"Some of the paths in manifest file are not valid GCS paths: \n${paths.mkString("\n")}" + override def getMessage: String = + s"Some of the paths in manifest file are not valid GCS paths: \n${paths.mkString("\n")}" } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/PipelinesApiKnownJobFailure.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/PipelinesApiKnownJobFailure.scala index 7c232c04700..e2842a78e95 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/PipelinesApiKnownJobFailure.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/errors/PipelinesApiKnownJobFailure.scala @@ -10,7 +10,7 @@ case class FailToLocalizeFailure(messages: List[String]) extends PipelinesApiKno } case class FailedToDelocalizeFailure(message: String, jobTag: String, stderrPath: Option[Path]) - extends PipelinesApiKnownJobFailure { + extends PipelinesApiKnownJobFailure { lazy val stderrMessage = stderrPath map { p => s"3) Look into the stderr (${p.pathAsString}) file for evidence that some of the output files the command is expected to create were not created." } getOrElse "" diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/PipelinesApiAttachedDisk.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/PipelinesApiAttachedDisk.scala index fb686ec4371..fec1ad22c52 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/PipelinesApiAttachedDisk.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/PipelinesApiAttachedDisk.scala @@ -20,9 +20,14 @@ object PipelinesApiAttachedDisk { def diskTypeValidation(diskTypeString: String): ErrorOr[DiskType] = validateDiskType(diskTypeString) val validation: ErrorOr[PipelinesApiAttachedDisk] = s match { - case WorkingDiskPattern(sizeGb, diskType) => (validateDiskType(diskType), sizeGbValidation(sizeGb)) mapN { PipelinesApiWorkingDisk.apply } - case MountedDiskPattern(mountPoint, sizeGb, diskType) => (sizeGbValidation(sizeGb), diskTypeValidation(diskType)) mapN { (s, dt) => PipelinesApiEmptyMountedDisk(dt, s, DefaultPathBuilder.get(mountPoint)) } - case _ => s"Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: '$s'".invalidNel + case WorkingDiskPattern(sizeGb, diskType) => + (validateDiskType(diskType), sizeGbValidation(sizeGb)) mapN PipelinesApiWorkingDisk.apply + case MountedDiskPattern(mountPoint, sizeGb, diskType) => + (sizeGbValidation(sizeGb), diskTypeValidation(diskType)) mapN { (s, dt) => + PipelinesApiEmptyMountedDisk(dt, s, DefaultPathBuilder.get(mountPoint)) + } + case _ => + s"Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: '$s'".invalidNel } Try(validation match { @@ -35,30 +40,30 @@ object PipelinesApiAttachedDisk { }) } - private def validateDiskType(diskTypeName: String): ErrorOr[DiskType] = { + private def validateDiskType(diskTypeName: String): ErrorOr[DiskType] = DiskType.values().find(_.diskTypeName == diskTypeName) match { case Some(diskType) => diskType.validNel case None => val diskTypeNames = DiskType.values.map(_.diskTypeName).mkString(", ") s"Disk TYPE $diskTypeName should be one of $diskTypeNames".invalidNel } - } - private def validateLong(value: String): ErrorOr[Long] = { - try { + private def validateLong(value: String): ErrorOr[Long] = + try value.toLong.validNel - } catch { + catch { case _: IllegalArgumentException => s"$value not convertible to a Long".invalidNel } - } - + implicit class EnhancedDisks(val disks: Seq[PipelinesApiAttachedDisk]) extends AnyVal { - def adjustWorkingDiskWithNewMin(minimum: MemorySize, onAdjustment: => Unit): Seq[PipelinesApiAttachedDisk] = disks map { - case disk: PipelinesApiWorkingDisk if disk == PipelinesApiWorkingDisk.Default && disk.sizeGb < minimum.to(MemoryUnit.GB).amount.toInt => - onAdjustment - disk.copy(sizeGb = minimum.to(MemoryUnit.GB).amount.toInt) - case other => other - } + def adjustWorkingDiskWithNewMin(minimum: MemorySize, onAdjustment: => Unit): Seq[PipelinesApiAttachedDisk] = + disks map { + case disk: PipelinesApiWorkingDisk + if disk == PipelinesApiWorkingDisk.Default && disk.sizeGb < minimum.to(MemoryUnit.GB).amount.toInt => + onAdjustment + disk.copy(sizeGb = minimum.to(MemoryUnit.GB).amount.toInt) + case other => other + } } } @@ -69,7 +74,8 @@ trait PipelinesApiAttachedDisk { def mountPoint: Path } -case class PipelinesApiEmptyMountedDisk(diskType: DiskType, sizeGb: Int, mountPoint: Path) extends PipelinesApiAttachedDisk { +case class PipelinesApiEmptyMountedDisk(diskType: DiskType, sizeGb: Int, mountPoint: Path) + extends PipelinesApiAttachedDisk { val name = s"d-${mountPoint.pathAsString.md5Sum}" override def toString: String = s"$mountPoint $sizeGb ${diskType.diskTypeName}" } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala index 01bbc913e87..56b70e049fd 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala @@ -7,22 +7,20 @@ import cromwell.core.path.Path package object io { implicit class PathEnhanced(val path: Path) extends AnyVal { - def writeAsJson(content: String): Path = { + def writeAsJson(content: String): Path = path.writeBytes(content.getBytes.iterator)(Seq(CloudStorageOptions.withMimeType("application/json"))) - } - def writeAsText(content: String): Path = { + def writeAsText(content: String): Path = path.writeBytes(content.getBytes.iterator)(Seq(CloudStorageOptions.withMimeType("text/plain"))) - } } - private [pipelines] def isFatalJesException(t: Throwable): Boolean = t match { + private[pipelines] def isFatalJesException(t: Throwable): Boolean = t match { case e: HttpResponseException if e.getStatusCode == 403 => true case e: HttpResponseException if e.getStatusCode == 400 && e.getContent.contains("INVALID_ARGUMENT") => true case _ => false } - private [pipelines] def isTransientJesException(t: Throwable): Boolean = t match { + private[pipelines] def isTransientJesException(t: Throwable): Boolean = t match { // Quota exceeded case e: HttpResponseException if e.getStatusCode == 429 => true case _ => false diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/CheckpointingConfiguration.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/CheckpointingConfiguration.scala index c55ccdcda0a..95351449060 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/CheckpointingConfiguration.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/CheckpointingConfiguration.scala @@ -10,18 +10,21 @@ final class CheckpointingConfiguration(jobDescriptor: BackendJobDescriptor, workflowPaths: WorkflowPaths, commandDirectory: Path, checkpointInterval: FiniteDuration - ) { - def checkpointFileCloud(checkpointFileName: String): String = { +) { + def checkpointFileCloud(checkpointFileName: String): String = // The checkpoint file for ANY attempt always goes in the "attempt 1" directory. That way we guarantee that // every attempt is able to recover from the single source of checkpointing truth. - workflowPaths.toJobPaths(jobDescriptor.key.copy(attempt = 1), jobDescriptor.workflowDescriptor) - .callExecutionRoot.resolve("__checkpointing").resolve(checkpointFileName).toAbsolutePath.pathAsString - } + workflowPaths + .toJobPaths(jobDescriptor.key.copy(attempt = 1), jobDescriptor.workflowDescriptor) + .callExecutionRoot + .resolve("__checkpointing") + .resolve(checkpointFileName) + .toAbsolutePath + .pathAsString def tmpCheckpointFileCloud(checkpointFileName: String): String = checkpointFileCloud(checkpointFileName) + "-tmp" - def checkpointFileLocal(checkpointFileName: String): String = { + def checkpointFileLocal(checkpointFileName: String): String = commandDirectory.resolve(checkpointFileName).toAbsolutePath.pathAsString - } def tmpCheckpointFileLocal(checkpointFileName: String): String = checkpointFileLocal(checkpointFileName) + "-tmp" def localizePreviousCheckpointCommand(checkpointFileName: String): String = { diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/Env.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/Env.scala index 62bb2c2d12a..a886cc7a819 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/Env.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/Env.scala @@ -3,6 +3,7 @@ package cromwell.backend.google.pipelines.common.monitoring import cromwell.backend.BackendJobDescriptor object Env { + /** * Name of an environmental variable */ @@ -12,13 +13,12 @@ object Env { val TaskCallAttempt = "TASK_CALL_ATTEMPT" val DiskMounts = "DISK_MOUNTS" - def monitoringImageEnvironment(jobDescriptor: BackendJobDescriptor) - (mountPaths: List[String]): Map[String, String] = + def monitoringImageEnvironment(jobDescriptor: BackendJobDescriptor)(mountPaths: List[String]): Map[String, String] = Map( Env.WorkflowId -> jobDescriptor.workflowDescriptor.id.toString, Env.TaskCallName -> jobDescriptor.taskCall.localName, Env.TaskCallIndex -> jobDescriptor.key.index.map(_.toString).getOrElse("NA"), Env.TaskCallAttempt -> jobDescriptor.key.attempt.toString, - Env.DiskMounts -> mountPaths.mkString(" "), + Env.DiskMounts -> mountPaths.mkString(" ") ) } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/MonitoringImage.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/MonitoringImage.scala index 24e7e424e80..d953b1a2168 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/MonitoringImage.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/monitoring/MonitoringImage.scala @@ -13,8 +13,8 @@ final class MonitoringImage(jobDescriptor: BackendJobDescriptor, workflowPaths: WorkflowPaths, commandDirectory: Path, workingDisk: PipelinesApiAttachedDisk, - localMonitoringImageScriptPath: Path, - ) { + localMonitoringImageScriptPath: Path +) { val monitoringImageOption: Option[String] = workflowOptions.get(WorkflowOptionKeys.MonitoringImage).toOption @@ -24,22 +24,21 @@ final class MonitoringImage(jobDescriptor: BackendJobDescriptor, for { _ <- monitoringImageOption // Only use the monitoring_image_script when monitoring_image provided monitoringImageScript <- workflowOptions.get(WorkflowOptionKeys.MonitoringImageScript).toOption - } yield { - PathFactory.buildPath( - monitoringImageScript, - workflowPaths.pathBuilders, - ) - } + } yield PathFactory.buildPath( + monitoringImageScript, + workflowPaths.pathBuilders + ) val monitoringImageCommand: List[String] = monitoringImageScriptOption match { - case Some(_) => List( - "/bin/sh", - "-c", - s"cd '${commandDirectory.pathAsString}' && " + - s"chmod +x '${monitoringImageScriptContainerPath.pathAsString}' && " + - s"'${monitoringImageScriptContainerPath.pathAsString}'" - ) + case Some(_) => + List( + "/bin/sh", + "-c", + s"cd '${commandDirectory.pathAsString}' && " + + s"chmod +x '${monitoringImageScriptContainerPath.pathAsString}' && " + + s"'${monitoringImageScriptContainerPath.pathAsString}'" + ) case None => Nil } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/IoSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/IoSpec.scala index 80cfc3543b9..de2434ca90b 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/IoSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/IoSpec.scala @@ -1,7 +1,12 @@ package cromwell.backend.google.pipelines.common import com.google.api.client.http.HttpResponseException -import com.google.api.client.testing.http.{HttpTesting, MockHttpTransport, MockLowLevelHttpRequest, MockLowLevelHttpResponse} +import com.google.api.client.testing.http.{ + HttpTesting, + MockHttpTransport, + MockLowLevelHttpRequest, + MockLowLevelHttpResponse +} import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -25,10 +30,9 @@ class IoSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } private def mockTransport(statusCode: Int) = new MockHttpTransport() { - override def buildRequest(method: String, url: String) = { + override def buildRequest(method: String, url: String) = new MockLowLevelHttpRequest() { override def execute() = new MockLowLevelHttpResponse().setStatusCode(statusCode) } - } } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala index 48ace9666e0..919ee49a255 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/MachineConstraintsSpec.scala @@ -74,16 +74,16 @@ class MachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M (MemorySize(1024.0, MemoryUnit.MB), refineMV[Positive](1), n2OptionIceLake, false, "n2-custom-2-2048"), (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), n2OptionIceLake, false, "n2-custom-36-36864"), - // Same tests but with AMD Rome (n2d) #cpu > 16 are in increments of 16 + // Same tests but with AMD Rome (n2d) #cpu > 16 are in increments of 16 (MemorySize(1024, MemoryUnit.MB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1024"), - (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2dOption, false, "n2d-custom-4-4096"), + (MemorySize(4, MemoryUnit.GB), refineMV[Positive](3), n2dOption, false, "n2d-custom-4-4096"), (MemorySize(1, MemoryUnit.GB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1024"), - (MemorySize(1 , MemoryUnit.GB), refineMV[Positive](4), n2dOption, false, "n2d-custom-4-2048"), + (MemorySize(1, MemoryUnit.GB), refineMV[Positive](4), n2dOption, false, "n2d-custom-4-2048"), (MemorySize(14, MemoryUnit.GB), refineMV[Positive](16), n2dOption, false, "n2d-custom-16-14336"), (MemorySize(13.65, MemoryUnit.GB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-14080"), (MemorySize(1520.96, MemoryUnit.MB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1536"), (MemorySize(1024.0, MemoryUnit.MB), refineMV[Positive](1), n2dOption, false, "n2d-custom-2-1024"), - (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), n2dOption, false, "n2d-custom-48-24576"), + (MemorySize(2, MemoryUnit.GB), refineMV[Positive](33), n2dOption, false, "n2d-custom-48-24576"), (MemorySize(2, MemoryUnit.GB), refineMV[Positive](81), n2dOption, false, "n2d-custom-96-49152"), (MemorySize(256, MemoryUnit.GB), refineMV[Positive](128), n2dOption, false, "n2d-custom-96-262144") ) @@ -94,7 +94,7 @@ class MachineConstraintsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M cpu = cpu, cpuPlatformOption = cpuPlatformOption, googleLegacyMachineSelection = googleLegacyMachineSelection, - jobLogger = NOPLogger.NOP_LOGGER, + jobLogger = NOPLogger.NOP_LOGGER ) shouldBe expected } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala index 828112b557d..37221ff8534 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala @@ -11,17 +11,31 @@ import cats.data.NonEmptyList import com.google.api.client.http.HttpRequest import com.google.cloud.NoCredentials import common.collections.EnhancedCollections._ -import cromwell.backend.BackendJobExecutionActor.{BackendJobExecutionResponse, JobFailedNonRetryableResponse, JobFailedRetryableResponse} +import cromwell.backend.BackendJobExecutionActor.{ + BackendJobExecutionResponse, + JobFailedNonRetryableResponse, + JobFailedRetryableResponse +} import cromwell.backend._ import cromwell.backend.async.AsyncBackendJobExecutionActor.{Execute, ExecutionMode} -import cromwell.backend.async.{AbortedExecutionHandle, ExecutionHandle, FailedNonRetryableExecutionHandle, FailedRetryableExecutionHandle} +import cromwell.backend.async.{ + AbortedExecutionHandle, + ExecutionHandle, + FailedNonRetryableExecutionHandle, + FailedRetryableExecutionHandle +} import cromwell.backend.google.pipelines.common.PipelinesApiAsyncBackendJobExecutionActor.JesPendingExecutionHandle import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.PAPIStatusPollRequest import cromwell.backend.google.pipelines.common.api.RunStatus.UnsuccessfulRunStatus import cromwell.backend.google.pipelines.common.io.{DiskType, PipelinesApiWorkingDisk} import cromwell.backend.io.JobPathsSpecHelper._ -import cromwell.backend.standard.{DefaultStandardAsyncExecutionActorParams, StandardAsyncExecutionActorParams, StandardAsyncJob, StandardExpressionFunctionsParams} +import cromwell.backend.standard.{ + DefaultStandardAsyncExecutionActorParams, + StandardAsyncExecutionActorParams, + StandardAsyncJob, + StandardExpressionFunctionsParams +} import cromwell.core._ import cromwell.core.callcaching.NoDocker import cromwell.core.labels.Labels @@ -55,13 +69,14 @@ import scala.concurrent.{Await, ExecutionContext, Future, Promise} import scala.language.postfixOps import scala.util.Success -class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite - with AnyFlatSpecLike - with Matchers - with ImplicitSender - with BackendSpec - with BeforeAndAfter - with DefaultJsonProtocol { +class PipelinesApiAsyncBackendJobExecutionActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with BackendSpec + with BeforeAndAfter + with DefaultJsonProtocol { val mockPathBuilder: GcsPathBuilder = MockGcsPathBuilder.instance import MockGcsPathBuilder._ @@ -75,60 +90,76 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val YoSup: String = s""" - |task sup { - | String addressee - | command { - | echo "yo sup $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | runtime { - | docker: "ubuntu:latest" - | [PREEMPTIBLE] - | } - |} - | - |workflow wf_sup { - | call sup - |} + |task sup { + | String addressee + | command { + | echo "yo sup $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | runtime { + | docker: "ubuntu:latest" + | [PREEMPTIBLE] + | } + |} + | + |workflow wf_sup { + | call sup + |} """.stripMargin val Inputs: Map[FullyQualifiedName, WomValue] = Map("wf_sup.sup.addressee" -> WomString("dog")) private val NoOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) - private lazy val TestableCallContext = CallContext(mockPathBuilder.build("gs://root").get, DummyStandardPaths, isDocker = false) + private lazy val TestableCallContext = + CallContext(mockPathBuilder.build("gs://root").get, DummyStandardPaths, isDocker = false) - private lazy val TestableStandardExpressionFunctionsParams: StandardExpressionFunctionsParams - = new StandardExpressionFunctionsParams { - override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilder) - override lazy val callContext: CallContext = TestableCallContext - override val ioActorProxy: ActorRef = simpleIoActor - override val executionContext: ExecutionContext = system.dispatcher - } + private lazy val TestableStandardExpressionFunctionsParams: StandardExpressionFunctionsParams = + new StandardExpressionFunctionsParams { + override lazy val pathBuilders: List[PathBuilder] = List(mockPathBuilder) + override lazy val callContext: CallContext = TestableCallContext + override val ioActorProxy: ActorRef = simpleIoActor + override val executionContext: ExecutionContext = system.dispatcher + } - lazy val TestableJesExpressionFunctions: PipelinesApiExpressionFunctions = { + lazy val TestableJesExpressionFunctions: PipelinesApiExpressionFunctions = new PipelinesApiExpressionFunctions(TestableStandardExpressionFunctionsParams) - } private def buildInitializationData(jobDescriptor: BackendJobDescriptor, configuration: PipelinesApiConfiguration) = { - val pathBuilders = Await.result(configuration.configurationDescriptor.pathBuilders(WorkflowOptions.empty), 5.seconds) + val pathBuilders = + Await.result(configuration.configurationDescriptor.pathBuilders(WorkflowOptions.empty), 5.seconds) val workflowPaths = PipelinesApiWorkflowPaths( - jobDescriptor.workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), configuration, pathBuilders, PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + jobDescriptor.workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + configuration, + pathBuilders, + PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val runtimeAttributesBuilder = PipelinesApiRuntimeAttributes.runtimeAttributesBuilder(configuration) val requestFactory: PipelinesApiRequestFactory = new PipelinesApiRequestFactory { override def cancelRequest(job: StandardAsyncJob): HttpRequest = null override def getRequest(job: StandardAsyncJob): HttpRequest = null override def runRequest(createPipelineParameters: PipelinesApiRequestFactory.CreatePipelineParameters, - jobLogger: JobLogger, - ): HttpRequest = null + jobLogger: JobLogger + ): HttpRequest = null } - PipelinesApiBackendInitializationData(workflowPaths, runtimeAttributesBuilder, configuration, null, requestFactory, None, None, None) + PipelinesApiBackendInitializationData(workflowPaths, + runtimeAttributesBuilder, + configuration, + null, + requestFactory, + None, + None, + None + ) } - class TestablePipelinesApiJobExecutionActor(params: StandardAsyncExecutionActorParams, functions: PipelinesApiExpressionFunctions) - extends PipelinesApiAsyncBackendJobExecutionActor(params) { + class TestablePipelinesApiJobExecutionActor(params: StandardAsyncExecutionActorParams, + functions: PipelinesApiExpressionFunctions + ) extends PipelinesApiAsyncBackendJobExecutionActor(params) { def this(jobDescriptor: BackendJobDescriptor, promise: Promise[BackendJobExecutionResponse], @@ -136,8 +167,8 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite functions: PipelinesApiExpressionFunctions = TestableJesExpressionFunctions, jesSingletonActor: ActorRef = emptyActor, ioActor: ActorRef = mockIoActor, - serviceRegistryActor: ActorRef = kvService) = { - + serviceRegistryActor: ActorRef = kvService + ) = this( DefaultStandardAsyncExecutionActorParams( jobIdKey = PipelinesApiAsyncBackendJobExecutionActor.JesOperationIdKey, @@ -152,7 +183,6 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite ), functions ) - } override lazy val jobLogger: JobLogger = new JobLogger( loggerName = "TestLogger", @@ -168,7 +198,6 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite override lazy val backendEngineFunctions: PipelinesApiExpressionFunctions = functions } - private val runtimeAttributesBuilder = PipelinesApiRuntimeAttributes.runtimeAttributesBuilder(papiConfiguration) private val workingDisk = PipelinesApiWorkingDisk(DiskType.SSD, 200) @@ -180,11 +209,18 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite |} """.stripMargin - private def buildPreemptibleJobDescriptor(preemptible: Int, previousPreemptions: Int, previousUnexpectedRetries: Int, failedRetriesCountOpt: Option[Int] = None): BackendJobDescriptor = { + private def buildPreemptibleJobDescriptor(preemptible: Int, + previousPreemptions: Int, + previousUnexpectedRetries: Int, + failedRetriesCountOpt: Option[Int] = None + ): BackendJobDescriptor = { val attempt = previousPreemptions + previousUnexpectedRetries + 1 - val wdlNamespace = WdlNamespaceWithWorkflow.load(YoSup.replace("[PREEMPTIBLE]", s"preemptible: $preemptible"), - Seq.empty[Draft2ImportResolver]).get - val womDefinition = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val wdlNamespace = WdlNamespaceWithWorkflow + .load(YoSup.replace("[PREEMPTIBLE]", s"preemptible: $preemptible"), Seq.empty[Draft2ImportResolver]) + .get + val womDefinition = wdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(Inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => @@ -209,21 +245,45 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val key = BackendJobDescriptorKey(job, None, attempt) val runtimeAttributes = makeRuntimeAttributes(job) val prefetchedKvEntries = Map( - PipelinesApiBackendLifecycleActorFactory.preemptionCountKey -> KvPair(ScopedKey(workflowDescriptor.id, KvJobKey(key), PipelinesApiBackendLifecycleActorFactory.preemptionCountKey), previousPreemptions.toString), - PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey -> KvPair(ScopedKey(workflowDescriptor.id, KvJobKey(key), PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey), previousUnexpectedRetries.toString)) - val prefetchedKvEntriesUpd = if(failedRetriesCountOpt.isEmpty) { + PipelinesApiBackendLifecycleActorFactory.preemptionCountKey -> KvPair( + ScopedKey(workflowDescriptor.id, + KvJobKey(key), + PipelinesApiBackendLifecycleActorFactory.preemptionCountKey + ), + previousPreemptions.toString + ), + PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey -> KvPair( + ScopedKey(workflowDescriptor.id, + KvJobKey(key), + PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey + ), + previousUnexpectedRetries.toString + ) + ) + val prefetchedKvEntriesUpd = if (failedRetriesCountOpt.isEmpty) { prefetchedKvEntries } else { - prefetchedKvEntries + (BackendLifecycleActorFactory.FailedRetryCountKey -> KvPair(ScopedKey(workflowDescriptor.id, KvJobKey(key), BackendLifecycleActorFactory.FailedRetryCountKey), failedRetriesCountOpt.get.toString )) + prefetchedKvEntries + (BackendLifecycleActorFactory.FailedRetryCountKey -> KvPair( + ScopedKey(workflowDescriptor.id, KvJobKey(key), BackendLifecycleActorFactory.FailedRetryCountKey), + failedRetriesCountOpt.get.toString + )) } - BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(Inputs), NoDocker, None, prefetchedKvEntriesUpd) + BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(Inputs), + NoDocker, + None, + prefetchedKvEntriesUpd + ) case Left(badtimes) => fail(badtimes.toList.mkString(", ")) } } private case class DockerImageCacheTestingParameters(dockerImageCacheDiskOpt: Option[String], dockerImageAsSpecifiedByUser: String, - isDockerImageCacheUsageRequested: Boolean) + isDockerImageCacheUsageRequested: Boolean + ) private def executionActor(jobDescriptor: BackendJobDescriptor, promise: Promise[BackendJobExecutionResponse], @@ -232,13 +292,19 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite serviceRegistryActor: ActorRef = kvService, referenceInputFilesOpt: Option[Set[PipelinesApiInput]] = None, dockerImageCacheTestingParamsOpt: Option[DockerImageCacheTestingParameters] = None - ): ActorRef = { + ): ActorRef = { val job = StandardAsyncJob(UUID.randomUUID().toString) val run = Run(job) val handle = new JesPendingExecutionHandle(jobDescriptor, run.job, Option(run), None) - class ExecuteOrRecoverActor extends TestablePipelinesApiJobExecutionActor(jobDescriptor, promise, papiConfiguration, jesSingletonActor = jesSingletonActor, serviceRegistryActor = serviceRegistryActor) { + class ExecuteOrRecoverActor + extends TestablePipelinesApiJobExecutionActor(jobDescriptor, + promise, + papiConfiguration, + jesSingletonActor = jesSingletonActor, + serviceRegistryActor = serviceRegistryActor + ) { override def executeOrRecover(mode: ExecutionMode)(implicit ec: ExecutionContext): Future[ExecutionHandle] = { sendIncrementMetricsForReferenceFiles(referenceInputFilesOpt) dockerImageCacheTestingParamsOpt.foreach { dockerImageCacheTestingParams => @@ -257,34 +323,58 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite system.actorOf(Props(new ExecuteOrRecoverActor), "ExecuteOrRecoverActor-" + UUID.randomUUID) } - private def runAndFail(previousPreemptions: Int, previousUnexpectedRetries: Int, preemptible: Int, errorCode: Status, innerErrorMessage: String, expectPreemptible: Boolean): BackendJobExecutionResponse = { - - val runStatus = UnsuccessfulRunStatus(errorCode, Option(innerErrorMessage), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), expectPreemptible) + private def runAndFail(previousPreemptions: Int, + previousUnexpectedRetries: Int, + preemptible: Int, + errorCode: Status, + innerErrorMessage: String, + expectPreemptible: Boolean + ): BackendJobExecutionResponse = { + + val runStatus = UnsuccessfulRunStatus(errorCode, + Option(innerErrorMessage), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + expectPreemptible + ) val statusPoller = TestProbe("statusPoller") val promise = Promise[BackendJobExecutionResponse]() - val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, previousPreemptions, previousUnexpectedRetries) + val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, previousPreemptions, previousUnexpectedRetries) // TODO: Use this to check the new KV entries are there! - //val kvProbe = TestProbe("kvProbe") + // val kvProbe = TestProbe("kvProbe") val backend = executionActor(jobDescriptor, promise, statusPoller.ref, expectPreemptible) backend ! Execute - statusPoller.expectMsgPF(max = Timeout, hint = "awaiting status poll") { - case _: PAPIStatusPollRequest => backend ! runStatus + statusPoller.expectMsgPF(max = Timeout, hint = "awaiting status poll") { case _: PAPIStatusPollRequest => + backend ! runStatus } Await.result(promise.future, Timeout) } - def buildPreemptibleTestActorRef(attempt: Int, preemptible: Int, failedRetriesCountOpt: Option[Int] = None): TestActorRef[TestablePipelinesApiJobExecutionActor] = { + def buildPreemptibleTestActorRef(attempt: Int, + preemptible: Int, + failedRetriesCountOpt: Option[Int] = None + ): TestActorRef[TestablePipelinesApiJobExecutionActor] = { // For this test we say that all previous attempts were preempted: - val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, attempt - 1, previousUnexpectedRetries = 0, failedRetriesCountOpt = failedRetriesCountOpt) - val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), - papiConfiguration, - TestableJesExpressionFunctions, - emptyActor, - failIoActor)) + val jobDescriptor = buildPreemptibleJobDescriptor(preemptible, + attempt - 1, + previousUnexpectedRetries = 0, + failedRetriesCountOpt = failedRetriesCountOpt + ) + val props = Props( + new TestablePipelinesApiJobExecutionActor(jobDescriptor, + Promise(), + papiConfiguration, + TestableJesExpressionFunctions, + emptyActor, + failIoActor + ) + ) TestActorRef(props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") } @@ -294,12 +384,33 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite { // Set of "handle call failures appropriately with respect to preemption and failure" tests val expectations = Table( - ("previous_preemptions", "previous_unexpectedRetries", "preemptible", "errorCode", "message", "shouldRunAsPreemptible", "shouldRetry"), + ("previous_preemptions", + "previous_unexpectedRetries", + "preemptible", + "errorCode", + "message", + "shouldRunAsPreemptible", + "shouldRetry" + ), // No preemptible attempts allowed, but standard failures should be retried. - (0, 0, 0, Status.ABORTED, "13: retryable error", false, true), // This is the new "unexpected failure" mode, which is now retried + (0, + 0, + 0, + Status.ABORTED, + "13: retryable error", + false, + true + ), // This is the new "unexpected failure" mode, which is now retried (0, 1, 0, Status.ABORTED, "13: retryable error", false, true), (0, 2, 0, Status.ABORTED, "13: retryable error", false, false), // The third unexpected failure is a real failure. - (0, 0, 0, Status.ABORTED, "14: usually means preempted...?", false, false), // Usually means "preempted', but this wasn't a preemptible VM, so this should just be a failure. + (0, + 0, + 0, + Status.ABORTED, + "14: usually means preempted...?", + false, + false + ), // Usually means "preempted', but this wasn't a preemptible VM, so this should just be a failure. (0, 0, 0, Status.ABORTED, "15: other error", false, false), (0, 0, 0, Status.OUT_OF_RANGE, "13: unexpected error", false, false), (0, 0, 0, Status.OUT_OF_RANGE, "14: test error msg", false, false), @@ -328,27 +439,62 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite (1, 0, 1, Status.OUT_OF_RANGE, "Instance failed to start due to preemption.", false, false) ) - expectations foreach { case (previousPreemptions, previousUnexpectedRetries, preemptible, errorCode, innerErrorMessage, shouldBePreemptible, shouldRetry) => - val descriptor = s"previousPreemptions=$previousPreemptions, previousUnexpectedRetries=$previousUnexpectedRetries preemptible=$preemptible, errorCode=$errorCode, innerErrorMessage=$innerErrorMessage" - it should s"handle call failures appropriately with respect to preemption and failure ($descriptor)" in { - runAndFail(previousPreemptions, previousUnexpectedRetries, preemptible, errorCode, innerErrorMessage, shouldBePreemptible) match { - case response: JobFailedNonRetryableResponse => - if(shouldRetry) fail(s"A should-be-retried job ($descriptor) was sent back to the engine with: $response") - case response: JobFailedRetryableResponse => - if(!shouldRetry) fail(s"A shouldn't-be-retried job ($descriptor) was sent back to the engine with $response") - case huh => fail(s"Unexpected response: $huh") + expectations foreach { + case (previousPreemptions, + previousUnexpectedRetries, + preemptible, + errorCode, + innerErrorMessage, + shouldBePreemptible, + shouldRetry + ) => + val descriptor = + s"previousPreemptions=$previousPreemptions, previousUnexpectedRetries=$previousUnexpectedRetries preemptible=$preemptible, errorCode=$errorCode, innerErrorMessage=$innerErrorMessage" + it should s"handle call failures appropriately with respect to preemption and failure ($descriptor)" in { + runAndFail(previousPreemptions, + previousUnexpectedRetries, + preemptible, + errorCode, + innerErrorMessage, + shouldBePreemptible + ) match { + case response: JobFailedNonRetryableResponse => + if (shouldRetry) + fail(s"A should-be-retried job ($descriptor) was sent back to the engine with: $response") + case response: JobFailedRetryableResponse => + if (!shouldRetry) + fail(s"A shouldn't-be-retried job ($descriptor) was sent back to the engine with $response") + case huh => fail(s"Unexpected response: $huh") + } } - } } } it should "send proper value for \"number of reference files used gauge\" metric, or don't send anything if reference disks feature is disabled" in { - val expectedInput1 = PipelinesApiFileInput(name = "testfile1", relativeHostPath = DefaultPathBuilder.build(Paths.get(s"test/reference/path/file1")), mount = null, cloudPath = null) - val expectedInput2 = PipelinesApiFileInput(name = "testfile2", relativeHostPath = DefaultPathBuilder.build(Paths.get(s"test/reference/path/file2")), mount = null, cloudPath = null) + val expectedInput1 = PipelinesApiFileInput(name = "testfile1", + relativeHostPath = + DefaultPathBuilder.build(Paths.get(s"test/reference/path/file1")), + mount = null, + cloudPath = null + ) + val expectedInput2 = PipelinesApiFileInput(name = "testfile2", + relativeHostPath = + DefaultPathBuilder.build(Paths.get(s"test/reference/path/file2")), + mount = null, + cloudPath = null + ) val expectedReferenceInputFiles = Set[PipelinesApiInput](expectedInput1, expectedInput2) - val expectedMsg1 = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput1.relativeHostPath.pathAsString)))) - val expectedMsg2 = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput2.relativeHostPath.pathAsString)))) + val expectedMsg1 = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput1.relativeHostPath.pathAsString)) + ) + ) + val expectedMsg2 = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, NonEmptyList.of("referencefiles", expectedInput2.relativeHostPath.pathAsString)) + ) + ) val jobDescriptor = buildPreemptibleJobDescriptor(0, 0, 0) val serviceRegistryProbe = TestProbe() @@ -381,59 +527,74 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val serviceRegistryProbe = TestProbe() val madeUpDockerImageName = "test_madeup_docker_image_name" - val expectedMessageWhenRequestedNotFound = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList("docker", List("image", "cache", "image_not_in_cache", madeUpDockerImageName))))) + val expectedMessageWhenRequestedNotFound = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, + NonEmptyList("docker", List("image", "cache", "image_not_in_cache", madeUpDockerImageName)) + ) + ) + ) val backendDockerCacheRequestedButNotFound = executionActor( jobDescriptor, Promise[BackendJobExecutionResponse](), TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - None, - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = true - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + None, + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = true ) + ) ) backendDockerCacheRequestedButNotFound ! Execute serviceRegistryProbe.expectMsg(expectedMessageWhenRequestedNotFound) - val expectedMessageWhenRequestedAndFound = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList("docker", List("image", "cache", "used_image_from_cache", madeUpDockerImageName))))) + val expectedMessageWhenRequestedAndFound = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, + NonEmptyList("docker", List("image", "cache", "used_image_from_cache", madeUpDockerImageName)) + ) + ) + ) val backendDockerCacheRequestedAndFound = executionActor( jobDescriptor, Promise[BackendJobExecutionResponse](), TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - Option("test_madeup_disk_image_name"), - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = true - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + Option("test_madeup_disk_image_name"), + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = true ) + ) ) backendDockerCacheRequestedAndFound ! Execute serviceRegistryProbe.expectMsg(expectedMessageWhenRequestedAndFound) - val expectedMessageWhenNotRequestedButFound = InstrumentationServiceMessage(CromwellIncrement(CromwellBucket(List.empty, NonEmptyList("docker", List("image", "cache", "cached_image_not_used", madeUpDockerImageName))))) + val expectedMessageWhenNotRequestedButFound = InstrumentationServiceMessage( + CromwellIncrement( + CromwellBucket(List.empty, + NonEmptyList("docker", List("image", "cache", "cached_image_not_used", madeUpDockerImageName)) + ) + ) + ) val backendDockerCacheNotRequestedButFound = executionActor( jobDescriptor, Promise[BackendJobExecutionResponse](), TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - Option("test_madeup_disk_image_name"), - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = false - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + Option("test_madeup_disk_image_name"), + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = false ) + ) ) backendDockerCacheNotRequestedButFound ! Execute serviceRegistryProbe.expectMsg(expectedMessageWhenNotRequestedButFound) @@ -444,14 +605,13 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite TestProbe().ref, shouldBePreemptible = false, serviceRegistryActor = serviceRegistryProbe.ref, - dockerImageCacheTestingParamsOpt = - Option( - DockerImageCacheTestingParameters( - None, - "test_madeup_docker_image_name", - isDockerImageCacheUsageRequested = false - ) + dockerImageCacheTestingParamsOpt = Option( + DockerImageCacheTestingParameters( + None, + "test_madeup_docker_image_name", + isDockerImageCacheUsageRequested = false ) + ) ) backendDockerCacheNotRequestedNotFound ! Execute serviceRegistryProbe.expectNoMessage(timeout) @@ -463,7 +623,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option("14: VM XXX shut down unexpectedly."), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus( + Status.ABORTED, + Option("14: VM XXX shut down unexpectedly."), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true @@ -477,7 +645,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option("14: VM XXX shut down unexpectedly."), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus( + Status.ABORTED, + Option("14: VM XXX shut down unexpectedly."), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedRetryableExecutionHandle] shouldBe true @@ -492,7 +668,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option("14: VM XXX shut down unexpectedly."), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus( + Status.ABORTED, + Option("14: VM XXX shut down unexpectedly."), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedRetryableExecutionHandle] shouldBe true @@ -507,7 +691,14 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option("13: Retryable Error."), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, + Option("13: Retryable Error."), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedRetryableExecutionHandle] shouldBe true @@ -522,7 +713,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus( + Status.ABORTED, + Option(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedRetryableExecutionHandle] shouldBe true @@ -537,7 +736,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus( + Status.ABORTED, + Option(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedRetryableExecutionHandle] shouldBe true @@ -552,32 +759,42 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val runId = StandardAsyncJob(UUID.randomUUID().toString) val handle = new JesPendingExecutionHandle(null, runId, None, None) - val failedStatus = UnsuccessfulRunStatus(Status.ABORTED, Option(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style), Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failedStatus = UnsuccessfulRunStatus( + Status.ABORTED, + Option(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style), + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) val executionResult = jesBackend.handleExecutionResult(failedStatus, handle) val result = Await.result(executionResult, timeout) result.isInstanceOf[FailedRetryableExecutionHandle] shouldBe true - val probe = TestProbe("probe") val job = jesBackend.workflowDescriptor.callable.taskCallNodes.head val key = BackendJobDescriptorKey(job, None, attempt = 2) - val scopedKeyPreempt = ScopedKey(jesBackend.workflowId, KvJobKey(key), PipelinesApiBackendLifecycleActorFactory.preemptionCountKey) + val scopedKeyPreempt = + ScopedKey(jesBackend.workflowId, KvJobKey(key), PipelinesApiBackendLifecycleActorFactory.preemptionCountKey) probe.send(kvService, KvGet(scopedKeyPreempt)) - probe.expectMsgPF(5.seconds) { - case kvPreempt: KvPair => kvPreempt.value shouldBe 1.toString + probe.expectMsgPF(5.seconds) { case kvPreempt: KvPair => + kvPreempt.value shouldBe 1.toString } - val scopedKeyUnexpected = ScopedKey(jesBackend.workflowId, KvJobKey(key), PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey) + val scopedKeyUnexpected = + ScopedKey(jesBackend.workflowId, KvJobKey(key), PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey) probe.send(kvService, KvGet(scopedKeyUnexpected)) - probe.expectMsgPF(5.seconds) { - case kvUnexpected: KvPair => kvUnexpected.value shouldBe 0.toString + probe.expectMsgPF(5.seconds) { case kvUnexpected: KvPair => + kvUnexpected.value shouldBe 0.toString } - val scopedKeyFailedAttempts = ScopedKey(jesBackend.workflowId, KvJobKey(key), BackendLifecycleActorFactory.FailedRetryCountKey) + val scopedKeyFailedAttempts = + ScopedKey(jesBackend.workflowId, KvJobKey(key), BackendLifecycleActorFactory.FailedRetryCountKey) probe.send(kvService, KvGet(scopedKeyFailedAttempts)) - probe.expectMsgPF(5.seconds) { - case kvFailedRetry: KvPair => kvFailedRetry.value shouldBe 1.toString + probe.expectMsgPF(5.seconds) { case kvFailedRetry: KvPair => + kvFailedRetry.value shouldBe 1.toString } } @@ -588,14 +805,23 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val handle = new JesPendingExecutionHandle(null, runId, None, None) def checkFailedResult(errorCode: Status, errorMessage: Option[String]): ExecutionHandle = { - val failed = UnsuccessfulRunStatus(errorCode, errorMessage, Seq.empty, Option("fakeMachine"), Option("fakeZone"), Option("fakeInstance"), wasPreemptible = true) + val failed = UnsuccessfulRunStatus(errorCode, + errorMessage, + Seq.empty, + Option("fakeMachine"), + Option("fakeZone"), + Option("fakeInstance"), + wasPreemptible = true + ) Await.result(jesBackend.handleExecutionResult(failed, handle), timeout) } checkFailedResult(Status.ABORTED, Option("15: Other type of error.")) .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true - checkFailedResult(Status.OUT_OF_RANGE, Option("14: Wrong errorCode.")).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true - checkFailedResult(Status.ABORTED, Option("Weird error message.")).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true + checkFailedResult(Status.OUT_OF_RANGE, Option("14: Wrong errorCode.")) + .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true + checkFailedResult(Status.ABORTED, Option("Weird error message.")) + .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true checkFailedResult(Status.ABORTED, Option("UnparsableInt: Even weirder error message.")) .isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true checkFailedResult(Status.ABORTED, None).isInstanceOf[FailedNonRetryableExecutionHandle] shouldBe true @@ -632,14 +858,20 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite gcsFileKey -> gcsFileVal ) - val wdlNamespace = WdlNamespaceWithWorkflow.load( - wdlString, - Seq.empty[Draft2ImportResolver], - ).get - val womWorkflow = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val wdlNamespace = WdlNamespaceWithWorkflow + .load( + wdlString, + Seq.empty[Draft2ImportResolver] + ) + .get + val womWorkflow = wdlNamespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), @@ -652,19 +884,28 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite None ) - val call: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t }).get + val call: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => + t + }.get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") - + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) - def gcsPathToLocal(womValue: WomValue): WomValue = { + def gcsPathToLocal(womValue: WomValue): WomValue = WomFileMapper.mapWomFiles(testActorRef.underlyingActor.mapCommandLineWomFile)(womValue).get - } val mappedInputs = jobDescriptor.localInputs safeMapValues gcsPathToLocal @@ -687,50 +928,67 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite } private val dockerAndDiskMapsWdlNamespace = - WdlNamespaceWithWorkflow.load( - SampleWdl.CurrentDirectoryMaps.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver], - ).get + WdlNamespaceWithWorkflow + .load( + SampleWdl.CurrentDirectoryMaps.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get private val dockerAndDiskArrayWdlNamespace = - WdlNamespaceWithWorkflow.load( - SampleWdl.CurrentDirectoryArray.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver], - ).get + WdlNamespaceWithWorkflow + .load( + SampleWdl.CurrentDirectoryArray.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get private val dockerAndDiskFilesWdlNamespace = - WdlNamespaceWithWorkflow.load( - SampleWdl.CurrentDirectoryFiles.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver], - ).get + WdlNamespaceWithWorkflow + .load( + SampleWdl.CurrentDirectoryFiles.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get it should "generate correct JesFileInputs from a WdlMap" in { val inputs: Map[String, WomValue] = Map( - "stringToFileMap" -> WomMap(WomMapType(WomStringType, WomSingleFileType), Map( - WomString("stringTofile1") -> WomSingleFile("gs://path/to/stringTofile1"), - WomString("stringTofile2") -> WomSingleFile("gs://path/to/stringTofile2") - )), - "fileToStringMap" -> WomMap(WomMapType(WomSingleFileType, WomStringType), Map( - WomSingleFile("gs://path/to/fileToString1") -> WomString("fileToString1"), - WomSingleFile("gs://path/to/fileToString2") -> WomString("fileToString2") - )), - "fileToFileMap" -> WomMap(WomMapType(WomSingleFileType, WomSingleFileType), Map( - WomSingleFile("gs://path/to/fileToFile1Key") -> WomSingleFile("gs://path/to/fileToFile1Value"), - WomSingleFile("gs://path/to/fileToFile2Key") -> WomSingleFile("gs://path/to/fileToFile2Value") - )), - "stringToString" -> WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("stringToString1") -> WomString("path/to/stringToString1"), - WomString("stringToString2") -> WomString("path/to/stringToString2") - )) + "stringToFileMap" -> WomMap( + WomMapType(WomStringType, WomSingleFileType), + Map( + WomString("stringTofile1") -> WomSingleFile("gs://path/to/stringTofile1"), + WomString("stringTofile2") -> WomSingleFile("gs://path/to/stringTofile2") + ) + ), + "fileToStringMap" -> WomMap( + WomMapType(WomSingleFileType, WomStringType), + Map( + WomSingleFile("gs://path/to/fileToString1") -> WomString("fileToString1"), + WomSingleFile("gs://path/to/fileToString2") -> WomString("fileToString2") + ) + ), + "fileToFileMap" -> WomMap( + WomMapType(WomSingleFileType, WomSingleFileType), + Map( + WomSingleFile("gs://path/to/fileToFile1Key") -> WomSingleFile("gs://path/to/fileToFile1Value"), + WomSingleFile("gs://path/to/fileToFile2Key") -> WomSingleFile("gs://path/to/fileToFile2Value") + ) + ), + "stringToString" -> WomMap( + WomMapType(WomStringType, WomStringType), + Map( + WomString("stringToString1") -> WomString("path/to/stringToString1"), + WomString("stringToString2") -> WomString("path/to/stringToString2") + ) + ) ) - val workflowInputs = inputs map { - case (key, value) => (s"wf_whereami.whereami.$key", value) + val workflowInputs = inputs map { case (key, value) => + (s"wf_whereami.whereami.$key", value) } val womWorkflow = - dockerAndDiskMapsWdlNamespace - .workflow + dockerAndDiskMapsWdlNamespace.workflow .toWomWorkflowDefinition(isASubworkflow = false) .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) val womExecutableChecked = @@ -738,7 +996,9 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite .toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) womExecutableChecked match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -753,62 +1013,87 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val jesInputs = testActorRef.underlyingActor.generateInputs(jobDescriptor) jesInputs should have size 8 - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.stringToFileMap-0", - cloudPath = gcsPath("gs://path/to/stringTofile1"), - relativeHostPath = DefaultPathBuilder.get("path/to/stringTofile1"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.stringToFileMap-1", - cloudPath = gcsPath("gs://path/to/stringTofile2"), - relativeHostPath = DefaultPathBuilder.get("path/to/stringTofile2"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileToStringMap-0", - cloudPath = gcsPath("gs://path/to/fileToString1"), - relativeHostPath = DefaultPathBuilder.get("path/to/fileToString1"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileToStringMap-1", - cloudPath = gcsPath("gs://path/to/fileToString2"), - relativeHostPath = DefaultPathBuilder.get("path/to/fileToString2"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileToFileMap-0", - cloudPath = gcsPath("gs://path/to/fileToFile1Key"), - relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile1Key"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileToFileMap-1", - cloudPath = gcsPath("gs://path/to/fileToFile1Value"), - relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile1Value"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileToFileMap-2", - cloudPath = gcsPath("gs://path/to/fileToFile2Key"), - relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile2Key"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileToFileMap-3", - cloudPath = gcsPath("gs://path/to/fileToFile2Value"), - relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile2Value"), - mount = workingDisk, - )) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.stringToFileMap-0", + cloudPath = gcsPath("gs://path/to/stringTofile1"), + relativeHostPath = DefaultPathBuilder.get("path/to/stringTofile1"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.stringToFileMap-1", + cloudPath = gcsPath("gs://path/to/stringTofile2"), + relativeHostPath = DefaultPathBuilder.get("path/to/stringTofile2"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileToStringMap-0", + cloudPath = gcsPath("gs://path/to/fileToString1"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToString1"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileToStringMap-1", + cloudPath = gcsPath("gs://path/to/fileToString2"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToString2"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileToFileMap-0", + cloudPath = gcsPath("gs://path/to/fileToFile1Key"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile1Key"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileToFileMap-1", + cloudPath = gcsPath("gs://path/to/fileToFile1Value"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile1Value"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileToFileMap-2", + cloudPath = gcsPath("gs://path/to/fileToFile2Key"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile2Key"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileToFileMap-3", + cloudPath = gcsPath("gs://path/to/fileToFile2Value"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile2Value"), + mount = workingDisk + ) + ) case Left(badness) => fail(badness.toList.mkString(", ")) } @@ -818,17 +1103,17 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite workflowInputs: Map[FullyQualifiedName, WomValue], callName: LocallyQualifiedName, callInputs: Map[LocallyQualifiedName, WomValue], - functions: PipelinesApiExpressionFunctions = TestableJesExpressionFunctions, - ): - TestActorRef[TestablePipelinesApiJobExecutionActor] = { + functions: PipelinesApiExpressionFunctions = TestableJesExpressionFunctions + ): TestActorRef[TestablePipelinesApiJobExecutionActor] = { val wdlNamespaceWithWorkflow = - WdlNamespaceWithWorkflow.load( - sampleWdl.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver], - ).get + WdlNamespaceWithWorkflow + .load( + sampleWdl.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get val womWorkflow = - wdlNamespaceWithWorkflow - .workflow + wdlNamespaceWithWorkflow.workflow .toWomWorkflowDefinition(isASubworkflow = false) .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) val womExecutableChecked = @@ -836,11 +1121,13 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite .toWomExecutable( Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, - strictValidation = true, + strictValidation = true ) womExecutableChecked match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -863,11 +1150,16 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite evaluatedTaskInputs = fqnWdlMapToDeclarationMap(callInputs), maybeCallCachingEligible = NoDocker, dockerSize = None, - prefetchedKvStoreEntries = Map.empty, + prefetchedKvStoreEntries = Map.empty ) - val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration, functions)) - TestActorRef[TestablePipelinesApiJobExecutionActor](props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + val props = Props( + new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration, functions) + ) + TestActorRef[TestablePipelinesApiJobExecutionActor]( + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) case Left(badness) => fail(badness.toList.mkString(", ")) } } @@ -877,23 +1169,33 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val workflowInputs = Map("file_passing.f" -> womFile) val callInputs = Map( "in" -> womFile, // how does one programmatically map the wf inputs to the call inputs? - "out_name" -> WomString("out"), // is it expected that this isn't using the default? + "out_name" -> WomString("out") // is it expected that this isn't using the default? ) val jesBackend = makeJesActorRef(SampleWdl.FilePassingWorkflow, workflowInputs, "a", callInputs).underlyingActor val jobDescriptor = jesBackend.jobDescriptor val workflowId = jesBackend.workflowId val jesInputs = jesBackend.generateInputs(jobDescriptor) jesInputs should have size 1 - jesInputs should contain(PipelinesApiFileInput( - name = "file_passing.a.in-0", - cloudPath = gcsPath("gs://blah/b/c.txt"), - relativeHostPath = DefaultPathBuilder.get("blah/b/c.txt"), - mount = workingDisk, - )) + jesInputs should contain( + PipelinesApiFileInput( + name = "file_passing.a.in-0", + cloudPath = gcsPath("gs://blah/b/c.txt"), + relativeHostPath = DefaultPathBuilder.get("blah/b/c.txt"), + mount = workingDisk + ) + ) val jesOutputs = jesBackend.generateOutputs(jobDescriptor) jesOutputs should have size 1 - jesOutputs should contain(PipelinesApiFileOutput("out", - gcsPath(s"gs://my-cromwell-workflows-bucket/file_passing/$workflowId/call-a/out"), DefaultPathBuilder.get("out"), workingDisk, optional = false, secondary = false)) + jesOutputs should contain( + PipelinesApiFileOutput( + "out", + gcsPath(s"gs://my-cromwell-workflows-bucket/file_passing/$workflowId/call-a/out"), + DefaultPathBuilder.get("out"), + workingDisk, + optional = false, + secondary = false + ) + ) } it should "generate correct JesInputs when a command line contains a write_lines call in it" in { @@ -901,10 +1203,10 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite "strs" -> WomArray(WomArrayType(WomStringType), Seq("A", "B", "C").map(WomString)) ) - class TestPipelinesApiExpressionFunctions extends PipelinesApiExpressionFunctions(TestableStandardExpressionFunctionsParams) { - override def writeFile(path: String, content: String): Future[WomSingleFile] = { + class TestPipelinesApiExpressionFunctions + extends PipelinesApiExpressionFunctions(TestableStandardExpressionFunctionsParams) { + override def writeFile(path: String, content: String): Future[WomSingleFile] = Future.fromTry(Success(WomSingleFile(s"gs://some/path/file.txt"))) - } } val functions = new TestPipelinesApiExpressionFunctions @@ -912,12 +1214,14 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val jobDescriptor = jesBackend.jobDescriptor val jesInputs = jesBackend.generateInputs(jobDescriptor) jesInputs should have size 1 - jesInputs should contain(PipelinesApiFileInput( - name = "c35ad8d3-0", - cloudPath = gcsPath("gs://some/path/file.txt"), - relativeHostPath = DefaultPathBuilder.get("some/path/file.txt"), - mount = workingDisk, - )) + jesInputs should contain( + PipelinesApiFileInput( + name = "c35ad8d3-0", + cloudPath = gcsPath("gs://some/path/file.txt"), + relativeHostPath = DefaultPathBuilder.get("some/path/file.txt"), + mount = workingDisk + ) + ) val jesOutputs = jesBackend.generateOutputs(jobDescriptor) jesOutputs should have size 0 } @@ -925,16 +1229,17 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "generate correct JesFileInputs from a WdlArray" in { val inputs: Map[String, WomValue] = Map( "fileArray" -> - WomArray(WomArrayType(WomSingleFileType), Seq(WomSingleFile("gs://path/to/file1"), WomSingleFile("gs://path/to/file2"))) + WomArray(WomArrayType(WomSingleFileType), + Seq(WomSingleFile("gs://path/to/file1"), WomSingleFile("gs://path/to/file2")) + ) ) - val workflowInputs = inputs map { - case (key, value) => (s"wf_whereami.whereami.$key", value) + val workflowInputs = inputs map { case (key, value) => + (s"wf_whereami.whereami.$key", value) } val womWorkflow = - dockerAndDiskArrayWdlNamespace - .workflow + dockerAndDiskArrayWdlNamespace.workflow .toWomWorkflowDefinition(isASubworkflow = false) .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) val womExecutableChecked = @@ -942,7 +1247,9 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite .toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) womExecutableChecked match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -957,26 +1264,39 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val jesInputs = testActorRef.underlyingActor.generateInputs(jobDescriptor) jesInputs should have size 2 - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileArray-0", - cloudPath = gcsPath("gs://path/to/file1"), - relativeHostPath = DefaultPathBuilder.get("path/to/file1"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.fileArray-1", - cloudPath = gcsPath("gs://path/to/file2"), - relativeHostPath = DefaultPathBuilder.get("path/to/file2"), - mount = workingDisk, - )) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileArray-0", + cloudPath = gcsPath("gs://path/to/file1"), + relativeHostPath = DefaultPathBuilder.get("path/to/file1"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.fileArray-1", + cloudPath = gcsPath("gs://path/to/file2"), + relativeHostPath = DefaultPathBuilder.get("path/to/file2"), + mount = workingDisk + ) + ) case Left(badness) => fail(badness.toList.mkString(", ")) } } @@ -987,13 +1307,12 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite "file2" -> WomSingleFile("gs://path/to/file2") ) - val workflowInputs = inputs map { - case (key, value) => (s"wf_whereami.whereami.$key", value) + val workflowInputs = inputs map { case (key, value) => + (s"wf_whereami.whereami.$key", value) } val womWorkflow = - dockerAndDiskFilesWdlNamespace - .workflow + dockerAndDiskFilesWdlNamespace.workflow .toWomWorkflowDefinition(isASubworkflow = false) .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) val womExecutableChecked = @@ -1001,7 +1320,9 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite .toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) womExecutableChecked match { case Right(womExecutable) => - val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) + val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap { case (port, v) => + v.select[WomValue] map { port -> _ } + } val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), womWorkflow, @@ -1016,26 +1337,39 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = BackendJobDescriptor(workflowDescriptor, + key, + runtimeAttributes, + fqnWdlMapToDeclarationMap(inputs), + NoDocker, + None, + Map.empty + ) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val jesInputs = testActorRef.underlyingActor.generateInputs(jobDescriptor) jesInputs should have size 2 - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.file1-0", - cloudPath = gcsPath("gs://path/to/file1"), - relativeHostPath = DefaultPathBuilder.get("path/to/file1"), - mount = workingDisk, - )) - jesInputs should contain(PipelinesApiFileInput( - name = "wf_whereami.whereami.file2-0", - cloudPath = gcsPath("gs://path/to/file2"), - relativeHostPath = DefaultPathBuilder.get("path/to/file2"), - mount = workingDisk, - )) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.file1-0", + cloudPath = gcsPath("gs://path/to/file1"), + relativeHostPath = DefaultPathBuilder.get("path/to/file1"), + mount = workingDisk + ) + ) + jesInputs should contain( + PipelinesApiFileInput( + name = "wf_whereami.whereami.file2-0", + cloudPath = gcsPath("gs://path/to/file2"), + relativeHostPath = DefaultPathBuilder.get("path/to/file2"), + mount = workingDisk + ) + ) case Left(badness) => fail(badness.toList.mkString(", ")) } @@ -1043,30 +1377,69 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "convert local Paths back to corresponding GCS paths in JesOutputs" in { val jesOutputs = Set( - PipelinesApiFileOutput("/cromwell_root/path/to/file1", gcsPath("gs://path/to/file1"), - DefaultPathBuilder.get("/cromwell_root/path/to/file1"), workingDisk, optional = false, secondary = false), - PipelinesApiFileOutput("/cromwell_root/path/to/file2", gcsPath("gs://path/to/file2"), - DefaultPathBuilder.get("/cromwell_root/path/to/file2"), workingDisk, optional = false, secondary = false), - PipelinesApiFileOutput("/cromwell_root/path/to/file3", gcsPath("gs://path/to/file3"), - DefaultPathBuilder.get("/cromwell_root/path/to/file3"), workingDisk, optional = false, secondary = false), - PipelinesApiFileOutput("/cromwell_root/path/to/file4", gcsPath("gs://path/to/file4"), - DefaultPathBuilder.get("/cromwell_root/path/to/file4"), workingDisk, optional = false, secondary = false), - PipelinesApiFileOutput("/cromwell_root/path/to/file5", gcsPath("gs://path/to/file5"), - DefaultPathBuilder.get("/cromwell_root/path/to/file5"), workingDisk, optional = false, secondary = false) + PipelinesApiFileOutput( + "/cromwell_root/path/to/file1", + gcsPath("gs://path/to/file1"), + DefaultPathBuilder.get("/cromwell_root/path/to/file1"), + workingDisk, + optional = false, + secondary = false + ), + PipelinesApiFileOutput( + "/cromwell_root/path/to/file2", + gcsPath("gs://path/to/file2"), + DefaultPathBuilder.get("/cromwell_root/path/to/file2"), + workingDisk, + optional = false, + secondary = false + ), + PipelinesApiFileOutput( + "/cromwell_root/path/to/file3", + gcsPath("gs://path/to/file3"), + DefaultPathBuilder.get("/cromwell_root/path/to/file3"), + workingDisk, + optional = false, + secondary = false + ), + PipelinesApiFileOutput( + "/cromwell_root/path/to/file4", + gcsPath("gs://path/to/file4"), + DefaultPathBuilder.get("/cromwell_root/path/to/file4"), + workingDisk, + optional = false, + secondary = false + ), + PipelinesApiFileOutput( + "/cromwell_root/path/to/file5", + gcsPath("gs://path/to/file5"), + DefaultPathBuilder.get("/cromwell_root/path/to/file5"), + workingDisk, + optional = false, + secondary = false + ) ) val outputValues = Seq( WomSingleFile("/cromwell_root/path/to/file1"), - WomArray(WomArrayType(WomSingleFileType), Seq( - WomSingleFile("/cromwell_root/path/to/file2"), WomSingleFile("/cromwell_root/path/to/file3"))), - WomMap(WomMapType(WomSingleFileType, WomSingleFileType), Map( - WomSingleFile("/cromwell_root/path/to/file4") -> WomSingleFile("/cromwell_root/path/to/file5") - )) + WomArray(WomArrayType(WomSingleFileType), + Seq(WomSingleFile("/cromwell_root/path/to/file2"), WomSingleFile("/cromwell_root/path/to/file3")) + ), + WomMap(WomMapType(WomSingleFileType, WomSingleFileType), + Map( + WomSingleFile("/cromwell_root/path/to/file4") -> WomSingleFile("/cromwell_root/path/to/file5") + ) + ) ) val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, NoOptions, Labels.empty, @@ -1078,32 +1451,44 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) - def wdlValueToGcsPath(jesOutputs: Set[PipelinesApiFileOutput])(womValue: WomValue): WomValue = { + def wdlValueToGcsPath(jesOutputs: Set[PipelinesApiFileOutput])(womValue: WomValue): WomValue = WomFileMapper.mapWomFiles(testActorRef.underlyingActor.womFileToGcsPath(jesOutputs.toSet))(womValue).get - } val result = outputValues map wdlValueToGcsPath(jesOutputs) result should have size 3 result should contain(WomSingleFile("gs://path/to/file1")) - result should contain(WomArray(WomArrayType(WomSingleFileType), - Seq(WomSingleFile("gs://path/to/file2"), WomSingleFile("gs://path/to/file3"))) + result should contain( + WomArray(WomArrayType(WomSingleFileType), + Seq(WomSingleFile("gs://path/to/file2"), WomSingleFile("gs://path/to/file3")) + ) ) - result should contain(WomMap(WomMapType(WomSingleFileType, WomSingleFileType), - Map(WomSingleFile("gs://path/to/file4") -> WomSingleFile("gs://path/to/file5"))) + result should contain( + WomMap(WomMapType(WomSingleFileType, WomSingleFileType), + Map(WomSingleFile("gs://path/to/file4") -> WomSingleFile("gs://path/to/file5")) + ) ) } it should "create a JesFileInput for the monitoring script, when specified" in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, WorkflowOptions.fromJsonString("""{"monitoring_script": "gs://path/to/script"}""").get, Labels.empty, @@ -1115,21 +1500,36 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val job: CommandCallNode = workflowDescriptor.callable.taskCallNodes.head val runtimeAttributes = makeRuntimeAttributes(job) val key = BackendJobDescriptorKey(job, None, 1) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) testActorRef.underlyingActor.monitoringScript shouldBe - Option(PipelinesApiFileInput("monitoring-in", gcsPath("gs://path/to/script"), DefaultPathBuilder.get("monitoring.sh"), workingDisk)) + Option( + PipelinesApiFileInput("monitoring-in", + gcsPath("gs://path/to/script"), + DefaultPathBuilder.get("monitoring.sh"), + workingDisk + ) + ) } it should "not create a JesFileInput for the monitoring script, when not specified" in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId.randomId(), - WdlNamespaceWithWorkflow.load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.EmptyString.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, NoOptions, Labels.empty, @@ -1138,14 +1538,17 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite None ) - val job: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst({case t: CommandCallNode => t}).get + val job: CommandCallNode = workflowDescriptor.callable.graph.nodes.collectFirst { case t: CommandCallNode => t }.get val key = BackendJobDescriptorKey(job, None, 1) val runtimeAttributes = makeRuntimeAttributes(job) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) testActorRef.underlyingActor.monitoringScript shouldBe None } @@ -1153,9 +1556,14 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "return JES log paths for non-scattered call" in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7c")), - WdlNamespaceWithWorkflow.load( - SampleWdl.HelloWorld.asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load(SampleWdl.HelloWorld.asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, WorkflowOptions.fromJsonString(""" {"jes_gcs_root": "gs://path/to/gcs_root"} """).get, Labels.empty, @@ -1167,11 +1575,14 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "hello").get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val jesBackend = testActorRef.underlyingActor @@ -1189,9 +1600,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite it should "return JES log paths for scattered call" in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7d")), - WdlNamespaceWithWorkflow.load( - new SampleWdl.ScatterWdl().asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), + WdlNamespaceWithWorkflow + .load( + new SampleWdl.ScatterWdl().asWorkflowSources(""" runtime {docker: "ubuntu:latest"} """).workflowSource.get, + Seq.empty[Draft2ImportResolver] + ) + .get + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, WorkflowOptions.fromJsonString(""" {"jes_gcs_root": "gs://path/to/gcs_root"} """).get, Labels.empty, @@ -1203,11 +1620,14 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "B").get val key = BackendJobDescriptorKey(call, Option(2), 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val jesBackend = testActorRef.underlyingActor @@ -1223,9 +1643,8 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite } it should "return preemptible = true only in the correct cases" in { - def attempt(max: Int, attempt: Int): PipelinesApiAsyncBackendJobExecutionActor = { + def attempt(max: Int, attempt: Int): PipelinesApiAsyncBackendJobExecutionActor = buildPreemptibleTestActorRef(attempt, max).underlyingActor - } def attempt1(max: Int) = attempt(max, 1) def attempt2(max: Int) = attempt(max, 2) @@ -1261,13 +1680,15 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite .toWomWorkflowDefinition(isASubworkflow = false) .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")), Map.empty, - WorkflowOptions.fromJsonString( - s"""|{ - | "google_project": "$googleProject", - | "jes_gcs_root": "$jesGcsRoot" - |} - |""".stripMargin - ).get, + WorkflowOptions + .fromJsonString( + s"""|{ + | "google_project": "$googleProject", + | "jes_gcs_root": "$jesGcsRoot" + |} + |""".stripMargin + ) + .get, Labels.empty, HogGroup("foo"), List.empty, @@ -1277,11 +1698,14 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == "goodbye").get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, Map.empty, NoDocker, None, Map.empty) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration)) val testActorRef = TestActorRef[TestablePipelinesApiJobExecutionActor]( - props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") + props, + s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}" + ) val jesBackend = testActorRef.underlyingActor @@ -1317,8 +1741,10 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite } private def makeRuntimeAttributes(job: CommandCallNode) = { - val evaluatedAttributes = RuntimeAttributeDefinition.evaluateRuntimeAttributes(job.callable.runtimeAttributes, null, Map.empty) - RuntimeAttributeDefinition.addDefaultsToAttributes( - runtimeAttributesBuilder.definitions.toSet, NoOptions)(evaluatedAttributes.getOrElse(fail("Failed to evaluate runtime attributes"))) + val evaluatedAttributes = + RuntimeAttributeDefinition.evaluateRuntimeAttributes(job.callable.runtimeAttributes, null, Map.empty) + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributesBuilder.definitions.toSet, NoOptions)( + evaluatedAttributes.getOrElse(fail("Failed to evaluate runtime attributes")) + ) } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAttachedDiskSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAttachedDiskSpec.scala index 0d15ed46574..5b19bfe48f1 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAttachedDiskSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAttachedDiskSpec.scala @@ -1,7 +1,12 @@ package cromwell.backend.google.pipelines.common import common.assertion.CromwellTimeoutSpec -import cromwell.backend.google.pipelines.common.io.{DiskType, PipelinesApiAttachedDisk, PipelinesApiEmptyMountedDisk, PipelinesApiWorkingDisk} +import cromwell.backend.google.pipelines.common.io.{ + DiskType, + PipelinesApiAttachedDisk, + PipelinesApiEmptyMountedDisk, + PipelinesApiWorkingDisk +} import cromwell.core.path.DefaultPathBuilder import org.scalatest.TryValues import org.scalatest.flatspec.AnyFlatSpec @@ -41,7 +46,7 @@ class PipelinesApiAttachedDiskSpec extends AnyFlatSpec with CromwellTimeoutSpec ) it should "reject malformed disk mounts" in { - forAll(invalidTable) { (unparsed) => + forAll(invalidTable) { unparsed => PipelinesApiAttachedDisk.parse(unparsed) should be(a[Failure[_]]) } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala index d997bedbd1c..f2ba65c65db 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala @@ -9,7 +9,10 @@ import org.scalatest.prop.TableDrivenPropertyChecks import scala.concurrent.duration._ import scala.language.postfixOps -class PipelinesApiBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with Matchers with TableDrivenPropertyChecks { +class PipelinesApiBackendLifecycleActorFactorySpec + extends AnyFlatSpecLike + with Matchers + with TableDrivenPropertyChecks { "PipelinesApiBackendLifecycleActorFactory" should "robustly build configuration attributes" in { @@ -33,7 +36,8 @@ class PipelinesApiBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with batchRequestTimeoutConfiguration = null, referenceFileToDiskImageMappingOpt = None, dockerImageToCacheDiskImageMappingOpt = None, - checkpointingInterval = 1 second) + checkpointingInterval = 1 second + ) PipelinesApiBackendLifecycleActorFactory.robustBuildAttributes(() => attributes) shouldBe attributes } @@ -50,8 +54,11 @@ class PipelinesApiBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with ) forAll(fails) { (attempts, description, function) => it should s"$description: make $attempts attribute creation attempts before giving up" in { - val e = the [RuntimeException] thrownBy { - PipelinesApiBackendLifecycleActorFactory.robustBuildAttributes(function, initialIntervalMillis = 1, maxIntervalMillis = 5) + val e = the[RuntimeException] thrownBy { + PipelinesApiBackendLifecycleActorFactory.robustBuildAttributes(function, + initialIntervalMillis = 1, + maxIntervalMillis = 5 + ) } e.getMessage should startWith(s"Failed to build PipelinesApiConfigurationAttributes on attempt $attempts of 3") } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala index 64971449373..80bc309bf35 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala @@ -24,7 +24,14 @@ class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with M inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = PipelinesApiWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + papiConfiguration, + pathBuilders(), + PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val callPaths = PipelinesApiJobPaths(workflowPaths, jobDescriptorKey) @@ -40,7 +47,14 @@ class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with M inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = PipelinesApiWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + papiConfiguration, + pathBuilders(), + PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val callPaths = PipelinesApiJobPaths(workflowPaths, jobDescriptorKey) @@ -60,7 +74,14 @@ class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with M inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = PipelinesApiWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + papiConfiguration, + pathBuilders(), + PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) val callPaths = PipelinesApiJobPaths(workflowPaths, jobDescriptorKey) diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala index 59625afa4a0..c1c25199e4e 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala @@ -16,8 +16,11 @@ import org.scalatest.prop.TableDrivenPropertyChecks import java.net.URL import scala.concurrent.duration._ -class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers - with TableDrivenPropertyChecks { +class PipelinesApiConfigurationAttributesSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { import PipelinesApiTestConfig._ @@ -64,8 +67,12 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT val backendConfig = ConfigFactory.parseString(configString(customContent = customContent)) val pipelinesApiAttributes = PipelinesApiConfigurationAttributes(googleConfig, backendConfig, "papi") - pipelinesApiAttributes.batchRequestTimeoutConfiguration.readTimeoutMillis.get.value should be(100.hours.toMillis.toInt) - pipelinesApiAttributes.batchRequestTimeoutConfiguration.connectTimeoutMillis.get.value should be(10.seconds.toMillis.toInt) + pipelinesApiAttributes.batchRequestTimeoutConfiguration.readTimeoutMillis.get.value should be( + 100.hours.toMillis.toInt + ) + pipelinesApiAttributes.batchRequestTimeoutConfiguration.connectTimeoutMillis.get.value should be( + 10.seconds.toMillis.toInt + ) } it should "parse an empty batch-requests.timeouts section correctly" in { @@ -136,8 +143,8 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT |""".stripMargin, VirtualPrivateCloudConfiguration( Option(VirtualPrivateCloudLabels("my-network", Option("my-subnetwork"), mockAuth)), - None, - ), + None + ) ), ( "labels config without subnetwork key", @@ -148,8 +155,8 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT |""".stripMargin, VirtualPrivateCloudConfiguration( Option(VirtualPrivateCloudLabels("my-network", None, mockAuth)), - None, - ), + None + ) ), ( "literal config", @@ -160,8 +167,8 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT |""".stripMargin, VirtualPrivateCloudConfiguration( None, - Option(VirtualPrivateCloudLiterals("my-network", Option("my-subnetwork"))), - ), + Option(VirtualPrivateCloudLiterals("my-network", Option("my-subnetwork"))) + ) ), ( "literal config without subnetwork name", @@ -171,9 +178,9 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT |""".stripMargin, VirtualPrivateCloudConfiguration( None, - Option(VirtualPrivateCloudLiterals("my-network", None)), - ), - ), + Option(VirtualPrivateCloudLiterals("my-network", None)) + ) + ) ) private val invalidVPCConfigTests = Table( @@ -184,7 +191,7 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT | network-label-key = my-network |} |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `auth`."), + List("Virtual Private Cloud configuration is invalid. Missing keys: `auth`.") ), ( "without network label-key", @@ -192,7 +199,7 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT | auth = mock |} |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`."), + List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`.") ), ( "with just a subnetwork label key", @@ -200,7 +207,7 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT | subnetwork-label-key = my-subnetwork |} |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key,auth`."), + List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key,auth`.") ), ( "with subnetwork label network key and auth", @@ -209,8 +216,8 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT | auth = mock | } |""".stripMargin, - List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`."), - ), + List("Virtual Private Cloud configuration is invalid. Missing keys: `network-label-key`.") + ) ) forAll(validVpcConfigTests) { (description, customConfig, vpcConfig) => @@ -233,13 +240,12 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT it should "not parse invalid config" in { val nakedConfig = - ConfigFactory.parseString( - """ - |{ - | genomics { - | endpoint-url = "myEndpoint" - | } - |} + ConfigFactory.parseString(""" + |{ + | genomics { + | endpoint-url = "myEndpoint" + | } + |} """.stripMargin) val exception = intercept[IllegalArgumentException with MessageAggregation] { @@ -255,27 +261,27 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT def configString(customContent: String = "", genomics: String = ""): String = s""" - |{ - | project = "myProject" - | root = "gs://myBucket" - | maximum-polling-interval = 600 - | $customContent - | genomics { - | // A reference to an auth defined in the `google` stanza at the top. This auth is used to create - | // Pipelines and manipulate auth JSONs. - | auth = "mock" - | $genomics - | endpoint-url = "http://myEndpoint" - | } - | - | filesystems = { - | gcs { - | // A reference to a potentially different auth for manipulating files via engine functions. - | auth = "mock" - | } - | } - |} - | """.stripMargin + |{ + | project = "myProject" + | root = "gs://myBucket" + | maximum-polling-interval = 600 + | $customContent + | genomics { + | // A reference to an auth defined in the `google` stanza at the top. This auth is used to create + | // Pipelines and manipulate auth JSONs. + | auth = "mock" + | $genomics + | endpoint-url = "http://myEndpoint" + | } + | + | filesystems = { + | gcs { + | // A reference to a potentially different auth for manipulating files via engine functions. + | auth = "mock" + | } + | } + |} + | """.stripMargin it should "parse gsutil memory specifications" in { val valids = List("0", "150M", "14 PIBIT", "6kib") @@ -290,7 +296,8 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT val invalids = List("-1", "150MB", "14PB") invalids foreach { - case invalid@PipelinesApiConfigurationAttributes.GsutilHumanBytes(_, _) => fail(s"Memory specification $invalid not expected to be accepted") + case invalid @ PipelinesApiConfigurationAttributes.GsutilHumanBytes(_, _) => + fail(s"Memory specification $invalid not expected to be accepted") case _ => } } @@ -317,32 +324,32 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT // Highly abridged versions of hg19 and hg38 manifests just to test for correctness // of parsing. val manifestConfig = - """ - |reference-disk-localization-manifests = [ - |{ - | "imageIdentifier" : "hg19-public-2020-10-26", - | "diskSizeGb" : 10, - | "files" : [ { - | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.fasta.fai", - | "crc32c" : 159565724 - | }, { - | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.dict", - | "crc32c" : 1679459712 - | }] - |}, - |{ - | "imageIdentifier" : "hg38-public-2020-10-26", - | "diskSizeGb" : 20, - | "files" : [ { - | "path" : "gcp-public-data--broad-references/hg38/v0/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz", - | "crc32c" : 930173616 - | }, { - | "path" : "gcp-public-data--broad-references/hg38/v0/exome_evaluation_regions.v1.interval_list", - | "crc32c" : 289077232 - | }] - |} - |] - |""".stripMargin + """ + |reference-disk-localization-manifests = [ + |{ + | "imageIdentifier" : "hg19-public-2020-10-26", + | "diskSizeGb" : 10, + | "files" : [ { + | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.fasta.fai", + | "crc32c" : 159565724 + | }, { + | "path" : "gcp-public-data--broad-references/hg19/v0/Homo_sapiens_assembly19.dict", + | "crc32c" : 1679459712 + | }] + |}, + |{ + | "imageIdentifier" : "hg38-public-2020-10-26", + | "diskSizeGb" : 20, + | "files" : [ { + | "path" : "gcp-public-data--broad-references/hg38/v0/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz", + | "crc32c" : 930173616 + | }, { + | "path" : "gcp-public-data--broad-references/hg38/v0/exome_evaluation_regions.v1.interval_list", + | "crc32c" : 289077232 + | }] + |} + |] + |""".stripMargin val backendConfig = ConfigFactory.parseString(configString(manifestConfig)) val validation = PipelinesApiConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "papi") val manifests: List[ManifestFile] = validation.toEither.toOption.get.get @@ -406,7 +413,7 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT | "imageIdentifier" : "hg19-public-2020-10-26", | "diskSizeGb" : 10, | # missing files - |}]""", + |}]""" ) badValues foreach { badValue => @@ -417,18 +424,20 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT } } - it should "parse correct existing docker-image-cache-manifest-file config" in { val dockerImageCacheManifest1Path = "gs://bucket/manifest1.json" val dockerImageCacheManifestConfigStr = s"""docker-image-cache-manifest-file = "$dockerImageCacheManifest1Path"""" val backendConfig = ConfigFactory.parseString(configString(dockerImageCacheManifestConfigStr)) - val validatedGcsPathToDockerImageCacheManifestFileErrorOr = PipelinesApiConfigurationAttributes.validateGcsPathToDockerImageCacheManifestFile(backendConfig) + val validatedGcsPathToDockerImageCacheManifestFileErrorOr = + PipelinesApiConfigurationAttributes.validateGcsPathToDockerImageCacheManifestFile(backendConfig) validatedGcsPathToDockerImageCacheManifestFileErrorOr match { case Valid(validatedGcsPathToDockerImageCacheManifestFileOpt) => validatedGcsPathToDockerImageCacheManifestFileOpt match { case Some(validatedGcsPathToDockerCacheManifestFile) => - validatedGcsPathToDockerCacheManifestFile shouldBe GcsPathBuilder.validateGcsPath(dockerImageCacheManifest1Path) + validatedGcsPathToDockerCacheManifestFile shouldBe GcsPathBuilder.validateGcsPath( + dockerImageCacheManifest1Path + ) case None => fail("GCS paths to docker image cache manifest files, parsed from config, should not be empty") } @@ -440,7 +449,8 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT it should "parse correct missing docker-image-cache-manifest-file config" in { val backendConfig = ConfigFactory.parseString(configString()) - val validatedGcsPathsToDockerImageCacheManifestFilesErrorOr = PipelinesApiConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "unit-test-backend") + val validatedGcsPathsToDockerImageCacheManifestFilesErrorOr = + PipelinesApiConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "unit-test-backend") validatedGcsPathsToDockerImageCacheManifestFilesErrorOr match { case Valid(validatedGcsPathsToDockerImageCacheManifestFilesOpt) => validatedGcsPathsToDockerImageCacheManifestFilesOpt shouldBe None diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationSpec.scala index 04dfdf72740..ee592d3ce1c 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationSpec.scala @@ -11,7 +11,12 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -class PipelinesApiConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks with BeforeAndAfterAll { +class PipelinesApiConfigurationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks + with BeforeAndAfterAll { behavior of "PipelinesApiConfigurationSpec" @@ -22,26 +27,25 @@ class PipelinesApiConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec () } - val globalConfig = ConfigFactory.parseString( - s""" - |google { - | - | application-name = "cromwell" - | - | auths = [ - | { - | name = "application-default" - | scheme = "application_default" - | }, - | { - | name = "service-account" - | scheme = "service_account" - | service-account-id = "my-service-account" - | pem-file = "${mockFile.pathAsString}" - | } - | ] - |} - | + val globalConfig = ConfigFactory.parseString(s""" + |google { + | + | application-name = "cromwell" + | + | auths = [ + | { + | name = "application-default" + | scheme = "application_default" + | }, + | { + | name = "service-account" + | scheme = "service_account" + | service-account-id = "my-service-account" + | pem-file = "${mockFile.pathAsString}" + | } + | ] + |} + | """.stripMargin) val backendConfig = ConfigFactory.parseString( @@ -88,7 +92,8 @@ class PipelinesApiConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec | } | } | - """.stripMargin) + """.stripMargin + ) it should "fail to instantiate if any required configuration is missing" in { @@ -108,17 +113,29 @@ class PipelinesApiConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec an[Exception] shouldBe thrownBy { val failingGoogleConf = GoogleConfiguration(global) val failingAttributes = PipelinesApiConfigurationAttributes(failingGoogleConf, backend, "papi") - new PipelinesApiConfiguration(BackendConfigurationDescriptor(backend, global), genomicsFactory, failingGoogleConf, failingAttributes) + new PipelinesApiConfiguration(BackendConfigurationDescriptor(backend, global), + genomicsFactory, + failingGoogleConf, + failingAttributes + ) } } } it should "have correct root" in { - new PipelinesApiConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), genomicsFactory, googleConfiguration, papiAttributes).root shouldBe "gs://my-cromwell-workflows-bucket" + new PipelinesApiConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), + genomicsFactory, + googleConfiguration, + papiAttributes + ).root shouldBe "gs://my-cromwell-workflows-bucket" } it should "have correct docker" in { - val dockerConf = new PipelinesApiConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), genomicsFactory, googleConfiguration, papiAttributes).dockerCredentials + val dockerConf = new PipelinesApiConfiguration(BackendConfigurationDescriptor(backendConfig, globalConfig), + genomicsFactory, + googleConfiguration, + papiAttributes + ).dockerCredentials dockerConf shouldBe defined dockerConf.get.token shouldBe "dockerToken" } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala index 8fd8d7dd581..dbd1d747390 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala @@ -14,7 +14,7 @@ import common.mock.MockSugar import scala.io.Source class PipelinesApiDockerCacheMappingOperationsSpec - extends AnyFlatSpecLike + extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with MockSugar @@ -25,19 +25,18 @@ class PipelinesApiDockerCacheMappingOperationsSpec it should "successfully parse docker image cache manifest JSON file as instance of Map[String, String]" in { val expectedManifest = DockerImageCacheManifest( manifestFormatVersion = 2, - dockerImageCacheMap = - Map( - "project1/dockerImage1" -> - DockerImageCacheEntry( - "sha256:c73e11b1a7854f31ff12c607738bef7b0560a880dc9d0445dde084acc0e9da09", - "projects/some-google-project/global/images/dockerCacheDiskForDockerImage1" - ), - "project2/dockerImage2" -> - DockerImageCacheEntry( - "sha256:7eb386481d87e41ebddceb948f25379bb339784df5247a3dfbdea2ac101b10c0", - "projects/another-google-project/global/images/dockerCacheDiskForDockerImage2" - ) - ) + dockerImageCacheMap = Map( + "project1/dockerImage1" -> + DockerImageCacheEntry( + "sha256:c73e11b1a7854f31ff12c607738bef7b0560a880dc9d0445dde084acc0e9da09", + "projects/some-google-project/global/images/dockerCacheDiskForDockerImage1" + ), + "project2/dockerImage2" -> + DockerImageCacheEntry( + "sha256:7eb386481d87e41ebddceb948f25379bb339784df5247a3dfbdea2ac101b10c0", + "projects/another-google-project/global/images/dockerCacheDiskForDockerImage2" + ) + ) ) val testJsonFileName = "docker-image-cache-manifest.json" @@ -46,9 +45,12 @@ class PipelinesApiDockerCacheMappingOperationsSpec val mockJsonBlob = { val mockBlob = mock[Blob] - val testJsonAsByteArray = Source.fromInputStream( - Thread.currentThread.getContextClassLoader.getResourceAsStream(testJsonFileName) - ).mkString.getBytes + val testJsonAsByteArray = Source + .fromInputStream( + Thread.currentThread.getContextClassLoader.getResourceAsStream(testJsonFileName) + ) + .mkString + .getBytes when(mockBlob.getContent()).thenReturn(testJsonAsByteArray) mockBlob @@ -60,8 +62,12 @@ class PipelinesApiDockerCacheMappingOperationsSpec mockClient } - val readFileFromGcsPrivateMethod = PrivateMethod[IO[DockerImageCacheManifest]](Symbol("readDockerImageCacheManifestFileFromGCS")) - val parsedJsonAsManifestIO = pipelinesApiDockerCacheMappingOperationsMock invokePrivate readFileFromGcsPrivateMethod(mockGcsClient, testJsonGcsPath) + val readFileFromGcsPrivateMethod = + PrivateMethod[IO[DockerImageCacheManifest]](Symbol("readDockerImageCacheManifestFileFromGCS")) + val parsedJsonAsManifestIO = + pipelinesApiDockerCacheMappingOperationsMock invokePrivate readFileFromGcsPrivateMethod(mockGcsClient, + testJsonGcsPath + ) val parsedJsonAsManifest = parsedJsonAsManifestIO.unsafeRunSync() parsedJsonAsManifest.equals(expectedManifest) shouldBe true @@ -72,7 +78,8 @@ class PipelinesApiDockerCacheMappingOperationsSpec val testDockerImageName = "test_madeup_docker_image" val testDockerImageDigest = "fake_docker_image_digest" val testDiskImageName = "fake_disk_image_name" - val dockerImageToCacheDiskImageMapping = Map(testDockerImageName -> DockerImageCacheEntry(testDockerImageDigest, testDiskImageName)) + val dockerImageToCacheDiskImageMapping = + Map(testDockerImageName -> DockerImageCacheEntry(testDockerImageDigest, testDiskImageName)) val dockerImageCacheDiskOpt = pipelinesApiDockerCacheMappingOperationsMock.getDockerCacheDiskImageForAJob( dockerImageToCacheDiskImageMappingOpt = Option(dockerImageToCacheDiskImageMapping), @@ -89,7 +96,8 @@ class PipelinesApiDockerCacheMappingOperationsSpec val testDockerImageName = "test_madeup_docker_image" val testDockerImageDigest = "fake_docker_image_digest" val expectedDiskImageName = "fake_disk_image_name" - val dockerImageToCacheDiskImageMapping = Map(testDockerImageName -> DockerImageCacheEntry(testDockerImageDigest, expectedDiskImageName)) + val dockerImageToCacheDiskImageMapping = + Map(testDockerImageName -> DockerImageCacheEntry(testDockerImageDigest, expectedDiskImageName)) val dockerImageCacheDiskOpt = pipelinesApiDockerCacheMappingOperationsMock.getDockerCacheDiskImageForAJob( dockerImageToCacheDiskImageMappingOpt = Option(dockerImageToCacheDiskImageMapping), @@ -107,7 +115,8 @@ class PipelinesApiDockerCacheMappingOperationsSpec val testDockerImageNameSpecifiedByUser = "test_non_cached_docker_image" val testDockerImageDigest = "fake_docker_image_digest" val testDiskImageName = "fake_disk_image_name" - val dockerImageToCacheDiskImageMapping = Map(testCachedDockerImageName -> DockerImageCacheEntry(testDockerImageDigest, testDiskImageName)) + val dockerImageToCacheDiskImageMapping = + Map(testCachedDockerImageName -> DockerImageCacheEntry(testDockerImageDigest, testDiskImageName)) val dockerImageCacheDiskOpt = pipelinesApiDockerCacheMappingOperationsMock.getDockerCacheDiskImageForAJob( dockerImageToCacheDiskImageMappingOpt = Option(dockerImageToCacheDiskImageMapping), diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiGpuAttributesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiGpuAttributesSpec.scala index 23f4659b49c..52507e3d256 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiGpuAttributesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiGpuAttributesSpec.scala @@ -6,28 +6,22 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import wom.values.{WomFloat, WomInteger, WomSingleFile, WomString, WomValue} -class PipelinesApiGpuAttributesSpec - extends AnyWordSpecLike - with Matchers - with PipelinesApiRuntimeAttributesSpecsMixin { +class PipelinesApiGpuAttributesSpec extends AnyWordSpecLike with Matchers with PipelinesApiRuntimeAttributesSpecsMixin { val validGpuTypes = List( (Option(WomString("nvidia-tesla-k80")), Option(GpuType.NVIDIATeslaK80)), - (Option(WomString("nvidia-tesla-p100")), Option( GpuType.NVIDIATeslaP100)), - (Option(WomString("custom-gpu-24601")), Option( GpuType("custom-gpu-24601"))), - (None, None)) - val invalidGpuTypes = List( - WomSingleFile("nvidia-tesla-k80"), - WomInteger(100)) + (Option(WomString("nvidia-tesla-p100")), Option(GpuType.NVIDIATeslaP100)), + (Option(WomString("custom-gpu-24601")), Option(GpuType("custom-gpu-24601"))), + (None, None) + ) + val invalidGpuTypes = List(WomSingleFile("nvidia-tesla-k80"), WomInteger(100)) val validGpuCounts = List( (Option(WomInteger(1)), Option(1)), (Option(WomInteger(100)), Option(100)), (None, None) ) - val invalidGpuCounts = List( - WomString("ten"), - WomFloat(1.0)) + val invalidGpuCounts = List(WomString("ten"), WomFloat(1.0)) validGpuTypes foreach { case (validGpuType, expectedGpuTypeValue) => validGpuCounts foreach { case (validGpuCount, expectedGpuCountValue) => @@ -36,7 +30,8 @@ class PipelinesApiGpuAttributesSpec "docker" -> WomString("ubuntu:latest") ) ++ validGpuType.map(t => "gpuType" -> t) ++ validGpuCount.map(c => "gpuCount" -> c) - val actualRuntimeAttributes = toPapiRuntimeAttributes(runtimeAttributes, emptyWorkflowOptions, papiConfiguration) + val actualRuntimeAttributes = + toPapiRuntimeAttributes(runtimeAttributes, emptyWorkflowOptions, papiConfiguration) expectedGpuTypeValue match { case Some(v) => actualRuntimeAttributes.gpuResource.exists(_.gpuType == v) @@ -57,9 +52,9 @@ class PipelinesApiGpuAttributesSpec "docker" -> WomString("ubuntu:latest") ) ++ validGpuType.map(t => "gpuType" -> t) + ("gpuCount" -> invalidGpuCount) - assertPapiRuntimeAttributesFailedCreation( - runtimeAttributes, - s"Invalid gpu count. Expected positive Int but got") + assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, + s"Invalid gpu count. Expected positive Int but got" + ) } } } @@ -71,9 +66,9 @@ class PipelinesApiGpuAttributesSpec "docker" -> WomString("ubuntu:latest") ) + ("gpuType" -> invalidGpuType) + ("gpuCount" -> invalidGpuCount) - assertPapiRuntimeAttributesFailedCreation( - runtimeAttributes, - s"Invalid gpu count. Expected positive Int but got") + assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, + s"Invalid gpu count. Expected positive Int but got" + ) } } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala index 918a8a436ef..c51ab46e9f4 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala @@ -8,7 +8,12 @@ import com.typesafe.config.{Config, ConfigFactory} import cromwell.backend.BackendWorkflowInitializationActor.{InitializationFailed, InitializationSuccess, Initialize} import cromwell.backend.async.RuntimeAttributeValidationFailures import cromwell.backend.google.pipelines.common.PipelinesApiInitializationActorSpec._ -import cromwell.backend.google.pipelines.common.PipelinesApiTestConfig.{PapiGlobalConfig, genomicsFactory, googleConfiguration, papiAttributes} +import cromwell.backend.google.pipelines.common.PipelinesApiTestConfig.{ + genomicsFactory, + googleConfiguration, + papiAttributes, + PapiGlobalConfig +} import cromwell.backend.{BackendConfigurationDescriptor, BackendSpec, BackendWorkflowDescriptor} import cromwell.core.Dispatcher.BackendDispatcher import cromwell.core.TestKitSuite @@ -20,41 +25,54 @@ import wom.graph.CommandCallNode import scala.concurrent.duration._ -class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers - with ImplicitSender { +class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender { val Timeout: FiniteDuration = 30.second.dilated import BackendSpec._ val HelloWorld: String = s""" - |task hello { - | String addressee = "you" - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - |} + |task hello { + | String addressee = "you" + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + |} """.stripMargin private def getJesBackendProps(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], - jesConfiguration: PipelinesApiConfiguration): Props = { + jesConfiguration: PipelinesApiConfiguration + ): Props = { val ioActor = mockIoActor - val params = PipelinesApiInitializationActorParams(workflowDescriptor, ioActor, calls, jesConfiguration, emptyActor, restarting = false) + val params = PipelinesApiInitializationActorParams(workflowDescriptor, + ioActor, + calls, + jesConfiguration, + emptyActor, + restarting = false + ) Props(new PipelinesApiInitializationActor(params)).withDispatcher(BackendDispatcher) } - private def getJesBackend(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], conf: BackendConfigurationDescriptor) = { - val props = getJesBackendProps(workflowDescriptor, calls, new PipelinesApiConfiguration(conf, genomicsFactory, googleConfiguration, papiAttributes)) + private def getJesBackend(workflowDescriptor: BackendWorkflowDescriptor, + calls: Set[CommandCallNode], + conf: BackendConfigurationDescriptor + ) = { + val props = getJesBackendProps( + workflowDescriptor, + calls, + new PipelinesApiConfiguration(conf, genomicsFactory, googleConfiguration, papiAttributes) + ) system.actorOf(props, "TestableJesInitializationActor-" + UUID.randomUUID) } @@ -63,17 +81,16 @@ class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecL it should "log a warning message when there are unsupported runtime attributes" in { within(Timeout) { - val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, - runtime = """runtime { docker: "ubuntu/latest" test: true }""") - val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - defaultBackendConfig) + val workflowDescriptor = + buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { docker: "ubuntu/latest" test: true }""") + val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, defaultBackendConfig) val eventPattern = "Key/s [test] is/are not supported by backend. Unsupported attributes will not be part of job executions." EventFilter.warning(pattern = escapePattern(eventPattern), occurrences = 1) intercept { backend ! Initialize } expectMsgPF() { - case InitializationSuccess(_) => //Docker entry is present. + case InitializationSuccess(_) => // Docker entry is present. case InitializationFailed(failure) => fail(s"InitializationSuccess was expected but got $failure") } } @@ -82,36 +99,39 @@ class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecL it should "return InitializationFailed when docker runtime attribute key is not present" in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") - val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - defaultBackendConfig) + val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, defaultBackendConfig) backend ! Initialize - expectMsgPF() { - case InitializationFailed(failure) => - failure match { - case exception: RuntimeAttributeValidationFailures => - if (!exception.getMessage.equals("Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!")) - fail("Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'.") - } + expectMsgPF() { case InitializationFailed(failure) => + failure match { + case exception: RuntimeAttributeValidationFailures => + if ( + !exception.getMessage.equals( + "Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!" + ) + ) + fail( + "Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'." + ) + } } } } } object PipelinesApiInitializationActorSpec { - val globalConfig: Config = ConfigFactory.parseString( - """ - |google { - | - | application-name = "cromwell" - | - | auths = [ - | { - | name = "application-default" - | scheme = "mock" - | } - | ] - |} - |""".stripMargin) + val globalConfig: Config = ConfigFactory.parseString(""" + |google { + | + | application-name = "cromwell" + | + | auths = [ + | { + | name = "application-default" + | scheme = "mock" + | } + | ] + |} + |""".stripMargin) val backendConfigTemplate: String = """ @@ -158,25 +178,38 @@ object PipelinesApiInitializationActorSpec { |[DOCKERHUBCONFIG] |""".stripMargin - val backendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[VPCCONFIG]", "").replace("[DOCKERHUBCONFIG]", "")) - - val dockerBackendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[VPCCONFIG]", "").replace("[DOCKERHUBCONFIG]", - """ - |dockerhub { - | account = "my@docker.account" - | # no secrets here guys this is just `echo -n username:password | base64` - | token = "dXNlcm5hbWU6cGFzc3dvcmQ=" - |} - | """.stripMargin)) - - val vpcBackendConfig: Config = ConfigFactory.parseString(backendConfigTemplate.replace("[DOCKERHUBCONFIG]", "").replace("[VPCCONFIG]", - """ - |virtual-private-cloud { - | network-label-key = "cromwell-ci-network" - | subnetwork-label-key = "cromwell-ci-subnetwork" - | auth = "service_account" - |} - | """.stripMargin)) + val backendConfig: Config = + ConfigFactory.parseString(backendConfigTemplate.replace("[VPCCONFIG]", "").replace("[DOCKERHUBCONFIG]", "")) + + val dockerBackendConfig: Config = ConfigFactory.parseString( + backendConfigTemplate + .replace("[VPCCONFIG]", "") + .replace( + "[DOCKERHUBCONFIG]", + """ + |dockerhub { + | account = "my@docker.account" + | # no secrets here guys this is just `echo -n username:password | base64` + | token = "dXNlcm5hbWU6cGFzc3dvcmQ=" + |} + | """.stripMargin + ) + ) + + val vpcBackendConfig: Config = ConfigFactory.parseString( + backendConfigTemplate + .replace("[DOCKERHUBCONFIG]", "") + .replace( + "[VPCCONFIG]", + """ + |virtual-private-cloud { + | network-label-key = "cromwell-ci-network" + | subnetwork-label-key = "cromwell-ci-subnetwork" + | auth = "service_account" + |} + | """.stripMargin + ) + ) private val defaultBackendConfig = new BackendConfigurationDescriptor(backendConfig, globalConfig) { override private[backend] lazy val cromwellFileSystems = new CromwellFileSystems(PapiGlobalConfig) diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala index 37b2ba6f02c..50b9a4d0862 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala @@ -4,7 +4,11 @@ import akka.actor.{Actor, ActorRef, Props} import akka.testkit._ import cromwell.backend.BackendJobExecutionActor.{ExecuteJobCommand, JobFailedNonRetryableResponse} import cromwell.backend.google.pipelines.common.ControllableFailingPabjea.JabjeaExplode -import cromwell.backend.standard.{DefaultStandardSyncExecutionActorParams, StandardSyncExecutionActor, StandardSyncExecutionActorParams} +import cromwell.backend.standard.{ + DefaultStandardSyncExecutionActorParams, + StandardSyncExecutionActor, + StandardSyncExecutionActorParams +} import cromwell.backend.{BackendJobDescriptor, MinimumRuntimeSettings} import cromwell.core.TestKitSuite import org.scalatest.flatspec.AnyFlatSpecLike @@ -36,12 +40,21 @@ class PipelinesApiJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLik val parent = TestProbe("parent") val deathwatch = TestProbe("deathwatch") - val params = DefaultStandardSyncExecutionActorParams(PipelinesApiAsyncBackendJobExecutionActor.JesOperationIdKey, serviceRegistryActor, ioActor, - jobDescriptor, null, Option(initializationData), jesBackendSingletonActor, - classOf[PipelinesApiAsyncBackendJobExecutionActor], MinimumRuntimeSettings()) + val params = DefaultStandardSyncExecutionActorParams( + PipelinesApiAsyncBackendJobExecutionActor.JesOperationIdKey, + serviceRegistryActor, + ioActor, + jobDescriptor, + null, + Option(initializationData), + jesBackendSingletonActor, + classOf[PipelinesApiAsyncBackendJobExecutionActor], + MinimumRuntimeSettings() + ) val testJJEA = TestActorRef[TestPipelinesApiJobExecutionActor]( props = Props(new TestPipelinesApiJobExecutionActor(params, Props(new ConstructorFailingJABJEA))), - supervisor = parent.ref) + supervisor = parent.ref + ) deathwatch watch testJJEA // Nothing happens: @@ -50,9 +63,10 @@ class PipelinesApiJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLik testJJEA.tell(msg = ExecuteJobCommand, sender = parent.ref) - parent.expectMsgPF(max = TimeoutDuration) { - case JobFailedNonRetryableResponse(_, throwable, _) => - throwable.getMessage should be("PipelinesApiAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed.") + parent.expectMsgPF(max = TimeoutDuration) { case JobFailedNonRetryableResponse(_, throwable, _) => + throwable.getMessage should be( + "PipelinesApiAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed." + ) } } @@ -69,13 +83,23 @@ class PipelinesApiJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLik val parent = TestProbe("parent") val deathwatch = TestProbe("deathwatch") val jabjeaConstructionPromise = Promise[ActorRef]() - val params = DefaultStandardSyncExecutionActorParams(PipelinesApiAsyncBackendJobExecutionActor.JesOperationIdKey, serviceRegistryActor, ioActor, - jobDescriptor, null, Option(initializationData), jesBackendSingletonActor, + val params = DefaultStandardSyncExecutionActorParams( + PipelinesApiAsyncBackendJobExecutionActor.JesOperationIdKey, + serviceRegistryActor, + ioActor, + jobDescriptor, + null, + Option(initializationData), + jesBackendSingletonActor, classOf[PipelinesApiAsyncBackendJobExecutionActor], - MinimumRuntimeSettings()) + MinimumRuntimeSettings() + ) val testJJEA = TestActorRef[TestPipelinesApiJobExecutionActor]( - props = Props(new TestPipelinesApiJobExecutionActor(params, Props(new ControllableFailingPabjea(jabjeaConstructionPromise)))), - supervisor = parent.ref) + props = Props( + new TestPipelinesApiJobExecutionActor(params, Props(new ControllableFailingPabjea(jabjeaConstructionPromise))) + ), + supervisor = parent.ref + ) deathwatch watch testJJEA // Nothing happens: @@ -96,15 +120,16 @@ class PipelinesApiJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLik throw exception } - parent.expectMsgPF(max = TimeoutDuration) { - case JobFailedNonRetryableResponse(_, throwable, _) => - throwable.getMessage should be("PipelinesApiAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed.") + parent.expectMsgPF(max = TimeoutDuration) { case JobFailedNonRetryableResponse(_, throwable, _) => + throwable.getMessage should be( + "PipelinesApiAsyncBackendJobExecutionActor failed and didn't catch its exception. This condition has been handled and the job will be marked as failed." + ) } } } -class TestPipelinesApiJobExecutionActor(params: StandardSyncExecutionActorParams, - fakeJabjeaProps: Props) extends StandardSyncExecutionActor(params) { +class TestPipelinesApiJobExecutionActor(params: StandardSyncExecutionActorParams, fakeJabjeaProps: Props) + extends StandardSyncExecutionActor(params) { override def createAsyncProps(): Props = fakeJabjeaProps } @@ -116,12 +141,12 @@ class ConstructorFailingJABJEA extends ControllableFailingPabjea(Promise[ActorRe class ControllableFailingPabjea(constructionPromise: Promise[ActorRef]) extends Actor { def explode(): Unit = { val boom = 1 == 1 - if (boom) throw new RuntimeException("Test Exception! Don't panic if this appears during a test run!") - with NoStackTrace + if (boom) + throw new RuntimeException("Test Exception! Don't panic if this appears during a test run!") with NoStackTrace } constructionPromise.trySuccess(self) - override def receive: Receive = { - case JabjeaExplode => explode() + override def receive: Receive = { case JabjeaExplode => + explode() } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperationsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperationsSpec.scala index e9ce34f964a..7ae6944ed2f 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperationsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperationsSpec.scala @@ -22,41 +22,61 @@ class PipelinesApiReferenceFilesMappingOperationsSpec extends AnyFlatSpecLike wi private val papiReferenceFilesMappingOperationsMockObject: PipelinesApiReferenceFilesMappingOperations = new PipelinesApiReferenceFilesMappingOperations { - override def bulkValidateCrc32cs(gcsClient: Storage, filesWithValidPaths: Map[ReferenceFile, ValidFullGcsPath]): IO[Map[ReferenceFile, Boolean]] = + override def bulkValidateCrc32cs(gcsClient: Storage, + filesWithValidPaths: Map[ReferenceFile, ValidFullGcsPath] + ): IO[Map[ReferenceFile, Boolean]] = IO.pure(filesWithValidPaths.keySet.map(file => (file, file.path != refFile4Disk2MismatchingChecksum)).toMap) } private val refFileMappingsMock = papiReferenceFilesMappingOperationsMockObject.generateReferenceFilesMapping( MockAuthMode("default"), List( - ManifestFile(imageIdentifier = disk1.image, diskSizeGb = disk1.sizeGb, files = List( - ReferenceFile(path = refFile1Disk1, crc32c = 5), - ReferenceFile(path = refFile2Disk1, crc32c = 6) - )), - ManifestFile(imageIdentifier = disk2.image, diskSizeGb = disk2.sizeGb, files = List( - ReferenceFile(path = refFile3Disk2, crc32c = 7) - )) + ManifestFile(imageIdentifier = disk1.image, + diskSizeGb = disk1.sizeGb, + files = List( + ReferenceFile(path = refFile1Disk1, crc32c = 5), + ReferenceFile(path = refFile2Disk1, crc32c = 6) + ) + ), + ManifestFile(imageIdentifier = disk2.image, + diskSizeGb = disk2.sizeGb, + files = List( + ReferenceFile(path = refFile3Disk2, crc32c = 7) + ) + ) ) ) it should "correctly figure out which disks have to be mounted based on provided input file paths" in { val nonReferenceInputFilePaths = Set("gs://not/a/reference/file") - val forNonReferenceFile = papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, nonReferenceInputFilePaths) + val forNonReferenceFile = + papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, + nonReferenceInputFilePaths + ) forNonReferenceFile.isEmpty shouldBe true val referenceInputFilePathsFrom2Disks = Set(s"gs://$refFile1Disk1", s"gs://$refFile3Disk2") - val forReferencesFrom2Disks = papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, referenceInputFilePathsFrom2Disks) + val forReferencesFrom2Disks = + papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, + referenceInputFilePathsFrom2Disks + ) forReferencesFrom2Disks should contain theSameElementsAs List(disk1, disk2) val referenceInputFilePathsFromSingleDisk = Set(s"gs://$refFile1Disk1", s"gs://$refFile2Disk1") - val forReferencesFromSingleDisk = papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, referenceInputFilePathsFromSingleDisk) + val forReferencesFromSingleDisk = + papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, + referenceInputFilePathsFromSingleDisk + ) forReferencesFromSingleDisk.size shouldBe 1 forReferencesFromSingleDisk.head shouldBe disk1 } it should "not consider valid a reference file with mismatching checksum" in { val mismatchingChecksumReferenceFile = Set(refFile4Disk2MismatchingChecksum) - val forMismatchingChecksumReferenceFile = papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, mismatchingChecksumReferenceFile) + val forMismatchingChecksumReferenceFile = + papiReferenceFilesMappingOperationsMockObject.getReferenceDisksToMount(refFileMappingsMock, + mismatchingChecksumReferenceFile + ) forMismatchingChecksumReferenceFile.isEmpty shouldBe true } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala index aa9812e4b3c..a7a880cb28c 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala @@ -20,7 +20,7 @@ import wom.values._ import scala.util.{Failure, Success, Try} final class PipelinesApiRuntimeAttributesSpec - extends AnyWordSpecLike + extends AnyWordSpecLike with Matchers with PipelinesApiRuntimeAttributesSpecsMixin { @@ -34,7 +34,10 @@ final class PipelinesApiRuntimeAttributesSpec "use hardcoded defaults if not declared in task, workflow options, or config (except for docker)" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest")) val expectedRuntimeAttributes = expectedDefaults - assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, papiConfiguration = noDefaultsPapiConfiguration) + assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, + expectedRuntimeAttributes, + papiConfiguration = noDefaultsPapiConfiguration + ) } "validate a valid Docker entry" in { @@ -56,7 +59,10 @@ final class PipelinesApiRuntimeAttributesSpec "fail to validate an invalid failOnStderr entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "failOnStderr" -> WomString("yes")) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'" + ) } "validate a valid continueOnReturnCode integer entry" in { @@ -72,20 +78,29 @@ final class PipelinesApiRuntimeAttributesSpec } "validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), + "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))) + ) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "coerce then validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2")))) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), + "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2"))) + ) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid continueOnReturnCode entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomString("value")) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } "validate a valid cpu entry" in { @@ -113,18 +128,30 @@ final class PipelinesApiRuntimeAttributesSpec "fail to validate an invalid zones entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomInteger(1)) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]" + ) } "validate a valid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomStringType), List(WomString("us-central1-y"), WomString("us-central1-z")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "zones" -> WomArray(WomArrayType(WomStringType), + List(WomString("us-central1-y"), WomString("us-central1-z")) + ) + ) val expectedRuntimeAttributes = expectedDefaults.copy(zones = Vector("us-central1-y", "us-central1-z")) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))) + ) + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]" + ) } "validate a valid preemptible entry" in { @@ -136,7 +163,8 @@ final class PipelinesApiRuntimeAttributesSpec "fail to validate an invalid preemptible entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "preemptible" -> WomString("value")) assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, - "Expecting preemptible runtime attribute to be an Integer") + "Expecting preemptible runtime attribute to be an Integer" + ) } "validate a valid bootDiskSizeGb entry" in { @@ -147,29 +175,50 @@ final class PipelinesApiRuntimeAttributesSpec "fail to validate an invalid bootDiskSizeGb entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "bootDiskSizeGb" -> WomString("4GB")) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting bootDiskSizeGb runtime attribute to be an Integer") + assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, + "Expecting bootDiskSizeGb runtime attribute to be an Integer" + ) } "validate a valid disks entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("local-disk 20 SSD")) - val expectedRuntimeAttributes = expectedDefaults.copy(disks = Seq(PipelinesApiAttachedDisk.parse("local-disk 20 SSD").get)) + val expectedRuntimeAttributes = + expectedDefaults.copy(disks = Seq(PipelinesApiAttachedDisk.parse("local-disk 20 SSD").get)) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid disks entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomInteger(10)) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting disks runtime attribute to be a comma separated String or Array[String]") + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting disks runtime attribute to be a comma separated String or Array[String]" + ) } "validate a valid disks array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("local-disk 20 SSD"), WomString("local-disk 30 SSD")))) - val expectedRuntimeAttributes = expectedDefaults.copy(disks = Seq(PipelinesApiAttachedDisk.parse("local-disk 20 SSD").get, PipelinesApiAttachedDisk.parse("local-disk 30 SSD").get)) + val runtimeAttributes = Map( + "docker" -> WomString("ubuntu:latest"), + "disks" -> WomArray(WomArrayType(WomStringType), + List(WomString("local-disk 20 SSD"), WomString("local-disk 30 SSD")) + ) + ) + val expectedRuntimeAttributes = expectedDefaults.copy(disks = + Seq(PipelinesApiAttachedDisk.parse("local-disk 20 SSD").get, + PipelinesApiAttachedDisk.parse("local-disk 30 SSD").get + ) + ) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate a valid disks array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("blah"), WomString("blah blah")))) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE'") + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), + "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("blah"), WomString("blah blah"))) + ) + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE'" + ) } "validate a valid memory entry" in { @@ -180,7 +229,10 @@ final class PipelinesApiRuntimeAttributesSpec "fail to validate an invalid memory entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "memory" -> WomString("blah")) - assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting memory runtime attribute to be an Integer or String with format '8 GB'") + assertPapiRuntimeAttributesFailedCreation( + runtimeAttributes, + "Expecting memory runtime attribute to be an Integer or String with format '8 GB'" + ) } "validate a valid noAddress entry" in { @@ -192,7 +244,8 @@ final class PipelinesApiRuntimeAttributesSpec "fail to validate an invalid noAddress entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "noAddress" -> WomInteger(1)) assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, - "Expecting noAddress runtime attribute to be a Boolean") + "Expecting noAddress runtime attribute to be a Boolean" + ) } "override config default attributes with default attributes declared in workflow options" in { @@ -253,21 +306,39 @@ final class PipelinesApiRuntimeAttributesSpec trait PipelinesApiRuntimeAttributesSpecsMixin { this: TestSuite => - def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = { - WorkflowOptions(JsObject(Map( - "default_runtime_attributes" -> JsObject(defaults) - ))) - } - - val expectedDefaults = new PipelinesApiRuntimeAttributes(refineMV(1), None, None, Vector("us-central1-b", "us-central1-a"), 0, 10, - MemorySize(2, MemoryUnit.GB), Vector(PipelinesApiWorkingDisk(DiskType.SSD, 10)), "ubuntu:latest", false, - ContinueOnReturnCodeSet(Set(0)), false, false, None, None) + def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = + WorkflowOptions( + JsObject( + Map( + "default_runtime_attributes" -> JsObject(defaults) + ) + ) + ) + + val expectedDefaults = new PipelinesApiRuntimeAttributes( + refineMV(1), + None, + None, + Vector("us-central1-b", "us-central1-a"), + 0, + 10, + MemorySize(2, MemoryUnit.GB), + Vector(PipelinesApiWorkingDisk(DiskType.SSD, 10)), + "ubuntu:latest", + false, + ContinueOnReturnCodeSet(Set(0)), + false, + false, + None, + None + ) def assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes: Map[String, WomValue], expectedRuntimeAttributes: PipelinesApiRuntimeAttributes, workflowOptions: WorkflowOptions = emptyWorkflowOptions, defaultZones: NonEmptyList[String] = defaultZones, - papiConfiguration: PipelinesApiConfiguration = papiConfiguration): Unit = { + papiConfiguration: PipelinesApiConfiguration = papiConfiguration + ): Unit = { try { val actualRuntimeAttributes = toPapiRuntimeAttributes(runtimeAttributes, workflowOptions, papiConfiguration) assert(actualRuntimeAttributes == expectedRuntimeAttributes) @@ -279,34 +350,45 @@ trait PipelinesApiRuntimeAttributesSpecsMixin { this: TestSuite => def assertPapiRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], exMsgs: List[String], - workflowOptions: WorkflowOptions): Unit = { + workflowOptions: WorkflowOptions + ): Unit = { Try(toPapiRuntimeAttributes(runtimeAttributes, workflowOptions, papiConfiguration)) match { case Success(oops) => - fail(s"Expected error containing strings: ${exMsgs.map(s => s"'$s'").mkString(", ")} but instead got Success($oops)") - case Failure(ex) => exMsgs foreach { exMsg => assert(ex.getMessage.contains(exMsg)) } + fail( + s"Expected error containing strings: ${exMsgs.map(s => s"'$s'").mkString(", ")} but instead got Success($oops)" + ) + case Failure(ex) => exMsgs foreach { exMsg => assert(ex.getMessage.contains(exMsg)) } } () } def assertPapiRuntimeAttributesFailedCreation(runtimeAttributes: Map[String, WomValue], exMsg: String, - workflowOptions: WorkflowOptions = emptyWorkflowOptions): Unit = { + workflowOptions: WorkflowOptions = emptyWorkflowOptions + ): Unit = assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, List(exMsg), workflowOptions) - } def toPapiRuntimeAttributes(runtimeAttributes: Map[String, WomValue], workflowOptions: WorkflowOptions, - papiConfiguration: PipelinesApiConfiguration): PipelinesApiRuntimeAttributes = { + papiConfiguration: PipelinesApiConfiguration + ): PipelinesApiRuntimeAttributes = { val runtimeAttributesBuilder = PipelinesApiRuntimeAttributes.runtimeAttributesBuilder(papiConfiguration) - val defaultedAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes( - staticRuntimeAttributeDefinitions, workflowOptions)(runtimeAttributes) + val defaultedAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(staticRuntimeAttributeDefinitions, workflowOptions)( + runtimeAttributes + ) val validatedRuntimeAttributes = runtimeAttributesBuilder.build(defaultedAttributes, NOPLogger.NOP_LOGGER) PipelinesApiRuntimeAttributes(validatedRuntimeAttributes, papiConfiguration.runtimeConfig) } val emptyWorkflowOptions: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get val defaultZones: NonEmptyList[String] = NonEmptyList.of("us-central1-b", "us-central1-a") - val noDefaultsPapiConfiguration = new PipelinesApiConfiguration(PipelinesApiTestConfig.NoDefaultsConfigurationDescriptor, genomicsFactory, googleConfiguration, papiAttributes) + val noDefaultsPapiConfiguration = new PipelinesApiConfiguration( + PipelinesApiTestConfig.NoDefaultsConfigurationDescriptor, + genomicsFactory, + googleConfiguration, + papiAttributes + ) val staticRuntimeAttributeDefinitions: Set[RuntimeAttributeDefinition] = PipelinesApiRuntimeAttributes.runtimeAttributesBuilder(PipelinesApiTestConfig.papiConfiguration).definitions.toSet } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiTestConfig.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiTestConfig.scala index 2df2de9467d..8de64e27ff9 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiTestConfig.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiTestConfig.scala @@ -104,24 +104,21 @@ object PipelinesApiTestConfig { val PapiBackendConfig: Config = ConfigFactory.parseString(PapiBackendConfigString) val PapiGlobalConfig: Config = ConfigFactory.parseString(PapiGlobalConfigString) val PapiBackendNoDefaultConfig: Config = ConfigFactory.parseString(NoDefaultsConfigString) - val PapiBackendConfigurationDescriptor: BackendConfigurationDescriptor = { + val PapiBackendConfigurationDescriptor: BackendConfigurationDescriptor = new BackendConfigurationDescriptor(PapiBackendConfig, PapiGlobalConfig) { override private[backend] lazy val cromwellFileSystems = new CromwellFileSystems(PapiGlobalConfig) } - } val NoDefaultsConfigurationDescriptor: BackendConfigurationDescriptor = BackendConfigurationDescriptor(PapiBackendNoDefaultConfig, PapiGlobalConfig) val genomicsFactory: PipelinesApiFactoryInterface = new PipelinesApiFactoryInterface { - override def build(httpRequestInitializer: HttpRequestInitializer): PipelinesApiRequestFactory = { + override def build(httpRequestInitializer: HttpRequestInitializer): PipelinesApiRequestFactory = new PipelinesApiRequestFactory { override def cancelRequest(job: StandardAsyncJob) = throw new UnsupportedOperationException override def getRequest(job: StandardAsyncJob) = throw new UnsupportedOperationException - override def runRequest(createPipelineParameters: - PipelinesApiRequestFactory.CreatePipelineParameters, - jobLogger: JobLogger, - ) = throw new UnsupportedOperationException + override def runRequest(createPipelineParameters: PipelinesApiRequestFactory.CreatePipelineParameters, + jobLogger: JobLogger + ) = throw new UnsupportedOperationException } - } override def usesEncryptedDocker: Boolean = false } def pathBuilders()(implicit as: ActorSystem): List[PathBuilder] = @@ -129,5 +126,9 @@ object PipelinesApiTestConfig { val googleConfiguration: GoogleConfiguration = GoogleConfiguration(PapiGlobalConfig) val papiAttributes: PipelinesApiConfigurationAttributes = PipelinesApiConfigurationAttributes(googleConfiguration, PapiBackendConfig, "papi") - val papiConfiguration = new PipelinesApiConfiguration(PapiBackendConfigurationDescriptor, genomicsFactory, googleConfiguration, papiAttributes) + val papiConfiguration = new PipelinesApiConfiguration(PapiBackendConfigurationDescriptor, + genomicsFactory, + googleConfiguration, + papiAttributes + ) } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala index db7b189e79e..5c68eedeb82 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala @@ -24,7 +24,14 @@ class PipelinesApiWorkflowPathsSpec extends TestKitSuite with AnyFlatSpecLike wi SampleWdl.HelloWorld.workflowSource(), inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) - workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + workflowPaths = PipelinesApiWorkflowPaths( + workflowDescriptor, + NoCredentials.getInstance(), + NoCredentials.getInstance(), + papiConfiguration, + pathBuilders(), + PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper + ) } it should "map the correct paths" in { diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValuesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValuesSpec.scala index 4b4f2fe35a7..2c8572cf146 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValuesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/VpcAndSubnetworkProjectLabelValuesSpec.scala @@ -21,15 +21,15 @@ class VpcAndSubnetworkProjectLabelValuesSpec extends AnyFlatSpec with Matchers w s"slashed/$${projectId}/net", None, "slashed/my-project/net", - None, + None ), ( "a subnet with a project token", "slashed/net", Option(s"slashed/$${projectId}/sub"), "slashed/net", - Option("slashed/my-project/sub"), - ), + Option("slashed/my-project/sub") + ) ) forAll(labelsTests) { (description, network, subnetOption, networkName, subnetNameOption) => diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManagerSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManagerSpec.scala index 7da9484167a..3b7ccd59038 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManagerSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManagerSpec.scala @@ -33,20 +33,22 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w val registryProbe: ActorRef = TestProbe("registryProbe").ref val workflowId: WorkflowId = WorkflowId.randomId() - private def makePollRequest(snd: ActorRef, jobId: StandardAsyncJob) = new PAPIStatusPollRequest(workflowId, snd, null, jobId) { - override def contentLength = 0 - } + private def makePollRequest(snd: ActorRef, jobId: StandardAsyncJob) = + new PAPIStatusPollRequest(workflowId, snd, null, jobId) { + override def contentLength = 0 + } - private def makeCreateRequest(contentSize: Long, snd: ActorRef, workflowId: WorkflowId = workflowId) = new PAPIRunCreationRequest(workflowId, snd, null) { - override def contentLength: Long = contentSize - } + private def makeCreateRequest(contentSize: Long, snd: ActorRef, workflowId: WorkflowId = workflowId) = + new PAPIRunCreationRequest(workflowId, snd, null) { + override def contentLength: Long = contentSize + } it should "queue up and dispense status poll requests, in order" in { val statusPoller = TestProbe(name = "StatusPoller") val jaqmActor: TestActorRef[TestPipelinesApiRequestManager] = TestActorRef( props = TestPipelinesApiRequestManager.props(registryProbe, statusPoller.ref), - name = "jaqmActor-queue", + name = "jaqmActor-queue" ) var statusRequesters = ((0 until BatchSize * 2) map { i => i -> TestProbe(name = s"StatusRequester_$i") }).toMap @@ -92,7 +94,7 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w val jaqmActor: TestActorRef[TestPipelinesApiRequestManager] = TestActorRef( props = TestPipelinesApiRequestManager.props(registryProbe, statusPoller.ref), - name = "jaqmActor-reject-create", + name = "jaqmActor-reject-create" ) val statusRequester = TestProbe("statusRequester") @@ -111,7 +113,7 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // maxBatchSize is 14MB, which mean we can take 2 queries of 5MB but not 3 val jaqmActor: TestActorRef[TestPipelinesApiRequestManager] = TestActorRef( props = TestPipelinesApiRequestManager.props(registryProbe, statusPoller.ref), - name = "jaqmActor-maxBatchSize", + name = "jaqmActor-maxBatchSize" ) val statusRequester = TestProbe("statusRequester") @@ -149,22 +151,22 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w val statusPoller1 = TestActorRef[TestPapiWorkerActor]( props = Props(new TestPapiWorkerActor()), supervisor = TestActorRef(new AkkaTestUtil.StoppingSupervisor()), - name = s"statusPoller1-$name", + name = s"statusPoller1-$name" ) val statusPoller2 = TestActorRef[TestPapiWorkerActor]( props = Props(new TestPapiWorkerActor()), supervisor = TestActorRef(new AkkaTestUtil.StoppingSupervisor()), - name = s"statusPoller2-$name", + name = s"statusPoller2-$name" ) val statusPoller3 = TestActorRef[TestPapiWorkerActor]( props = Props(new TestPapiWorkerActor()), supervisor = TestActorRef(new AkkaTestUtil.StoppingSupervisor()), - name = s"statusPoller3-$name", + name = s"statusPoller3-$name" ) val jaqmActor: TestActorRef[TestPipelinesApiRequestManager] = TestActorRef( props = TestPipelinesApiRequestManager.props(registryProbe, statusPoller1, statusPoller2, statusPoller3), - name = s"TestJesApiQueryManage-$name", + name = s"TestJesApiQueryManage-$name" ) val emptyActor = system.actorOf(Props.empty, s"emptyActor-$name") @@ -177,8 +179,8 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // The work queue should be filled and the manager should have one poller active: eventually { - jaqmActor.underlyingActor.queueSize should be (BatchSize) - jaqmActor.underlyingActor.testPollerCreations should be (1) + jaqmActor.underlyingActor.queueSize should be(BatchSize) + jaqmActor.underlyingActor.testPollerCreations should be(1) jaqmActor.underlyingActor.statusPollers.size should be(1) jaqmActor.underlyingActor.statusPollers.head should be(statusPoller1) } @@ -188,7 +190,7 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // Therefore, there's no work left in the work queue because it's all been given to the worker: eventually { - jaqmActor.underlyingActor.queueSize should be (0) + jaqmActor.underlyingActor.queueSize should be(0) statusPoller1.underlyingActor.workToDo.map(_.size) should be(Some(BatchSize)) } @@ -197,8 +199,8 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // The work queue receives all the work that couldn't be completed, and a new poller is created: eventually { - jaqmActor.underlyingActor.queueSize should be (BatchSize) - jaqmActor.underlyingActor.testPollerCreations should be (2) + jaqmActor.underlyingActor.queueSize should be(BatchSize) + jaqmActor.underlyingActor.testPollerCreations should be(2) jaqmActor.underlyingActor.statusPollers.size should be(1) jaqmActor.underlyingActor.statusPollers.head should be(statusPoller2) } @@ -208,7 +210,7 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // The queue is emptied again and this time statusPoller2 has it: eventually { - jaqmActor.underlyingActor.queueSize should be (0) + jaqmActor.underlyingActor.queueSize should be(0) statusPoller2.underlyingActor.workToDo.map(_.size) should be(Some(BatchSize)) } @@ -217,8 +219,8 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // The work queue receives all the work that couldn't be completed *again*, and a new poller is created *again*: eventually { - jaqmActor.underlyingActor.queueSize should be (BatchSize) - jaqmActor.underlyingActor.testPollerCreations should be (3) + jaqmActor.underlyingActor.queueSize should be(BatchSize) + jaqmActor.underlyingActor.testPollerCreations should be(3) jaqmActor.underlyingActor.statusPollers.size should be(1) jaqmActor.underlyingActor.statusPollers.head should be(statusPoller3) } @@ -231,7 +233,7 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w val jaqmActor: TestActorRef[TestPipelinesApiRequestManager] = TestActorRef( props = TestPipelinesApiRequestManager.props(registryProbe, statusPoller.ref), - name = "jaqmActor-run", + name = "jaqmActor-run" ) // Enqueue 3 create requests @@ -247,7 +249,9 @@ class PipelinesApiRequestManagerSpec extends TestKitSuite with AnyFlatSpecLike w // It should remove all and only run requests for workflow A eventually { jaqmActor.underlyingActor.queueSize shouldBe 1 - jaqmActor.underlyingActor.workQueue.head.asInstanceOf[PipelinesApiRequestManager.PAPIRunCreationRequest].workflowId shouldBe workflowIdB + jaqmActor.underlyingActor.workQueue.head + .asInstanceOf[PipelinesApiRequestManager.PAPIRunCreationRequest] + .workflowId shouldBe workflowIdB } } } @@ -265,14 +269,14 @@ object TestPipelinesApiRequestManagerSpec { class TestPipelinesApiRequestManager(qps: Int Refined Positive, requestWorkers: Int Refined Positive, registry: ActorRef, - availableRequestWorkers: ActorRef*) - extends PipelinesApiRequestManager(qps, requestWorkers, registry)(new MockPipelinesRequestHandler) { + availableRequestWorkers: ActorRef* +) extends PipelinesApiRequestManager(qps, requestWorkers, registry)(new MockPipelinesRequestHandler) { var testProbeQueue: Queue[ActorRef] = _ var testPollerCreations: Int = _ - private def askForWorkReceive: PartialFunction[Any, Unit] = { - case afw: SpecOnly_TriggerRequestForWork => statusPollers.head ! afw + private def askForWorkReceive: PartialFunction[Any, Unit] = { case afw: SpecOnly_TriggerRequestForWork => + statusPollers.head ! afw } override def receive: PartialFunction[Any, Unit] = askForWorkReceive orElse super.receive @@ -284,7 +288,7 @@ class TestPipelinesApiRequestManager(qps: Int Refined Positive, override private[api] lazy val nbWorkers = 1 override private[api] def resetAllWorkers(): Unit = { - val pollers = Vector.fill(1) { makeAndWatchWorkerActor() } + val pollers = Vector.fill(1)(makeAndWatchWorkerActor()) statusPollers = pollers } @@ -325,28 +329,29 @@ class TestPapiWorkerActor() extends DeathTestActor with ActorLogging { case SpecOnly_TriggerRequestForWork(manager, batchSize) => requestWork(manager, batchSize) } - def requestWork(papiRequestManager: ActorRef, batchSize: Int): Unit = { + def requestWork(papiRequestManager: ActorRef, batchSize: Int): Unit = papiRequestManager ! PipelinesWorkerRequestWork(batchSize) - } } case class SpecOnly_TriggerRequestForWork(papiRequestManager: ActorRef, batchSize: Int) class MockPipelinesRequestHandler extends PipelinesApiRequestHandler { override def makeBatchRequest = throw new UnsupportedOperationException - override def enqueue[T <: PipelinesApiRequestManager.PAPIApiRequest](papiApiRequest: T, batchRequest: BatchRequest, pollingManager: ActorRef) - (implicit ec: ExecutionContext)= throw new UnsupportedOperationException + override def enqueue[T <: PipelinesApiRequestManager.PAPIApiRequest](papiApiRequest: T, + batchRequest: BatchRequest, + pollingManager: ActorRef + )(implicit ec: ExecutionContext) = throw new UnsupportedOperationException } object TestPipelinesApiRequestManager { import PipelinesApiTestConfig._ - def props(registryProbe: ActorRef, statusPollers: ActorRef*): Props = { - Props(new TestPipelinesApiRequestManager( - papiConfiguration.papiAttributes.qps, - papiConfiguration.papiAttributes.requestWorkers, - registryProbe, - statusPollers: _*) + def props(registryProbe: ActorRef, statusPollers: ActorRef*): Props = + Props( + new TestPipelinesApiRequestManager(papiConfiguration.papiAttributes.qps, + papiConfiguration.papiAttributes.requestWorkers, + registryProbe, + statusPollers: _* + ) ) - } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala index d189735eab0..0736b5c051c 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala @@ -8,8 +8,17 @@ import com.google.api.client.googleapis.batch.json.JsonBatchCallback import com.google.api.client.googleapis.json.GoogleJsonError import com.google.api.client.http.HttpRequest import cromwell.backend.google.pipelines.common.PipelinesApiConfiguration -import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{PAPIApiException, PAPIApiRequestFailed, PAPIStatusPollRequest, PipelinesWorkerRequestWork} -import cromwell.backend.google.pipelines.common.api.TestPipelinesApiRequestWorker.{CallbackFailure, CallbackSuccess, PipelinesApiBatchCallbackResponse} +import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{ + PAPIApiException, + PAPIApiRequestFailed, + PAPIStatusPollRequest, + PipelinesWorkerRequestWork +} +import cromwell.backend.google.pipelines.common.api.TestPipelinesApiRequestWorker.{ + CallbackFailure, + CallbackSuccess, + PipelinesApiBatchCallbackResponse +} import cromwell.core.{ExecutionEvent, TestKitSuite} import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric.Positive @@ -25,7 +34,11 @@ import scala.concurrent.{Future, Promise} import scala.util.Try abstract class PipelinesApiRequestWorkerSpec[O >: Null] - extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually with BeforeAndAfter { + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with Eventually + with BeforeAndAfter { implicit var batchHandler: TestPipelinesApiBatchHandler[O] @@ -45,12 +58,16 @@ abstract class PipelinesApiRequestWorkerSpec[O >: Null] } it should "query for work and wait for a reply" in { - managerProbe.expectMsgClass(max = TestExecutionTimeout, c = classOf[PipelinesApiRequestManager.PipelinesWorkerRequestWork]) + managerProbe.expectMsgClass(max = TestExecutionTimeout, + c = classOf[PipelinesApiRequestManager.PipelinesWorkerRequestWork] + ) managerProbe.expectNoMessage(max = AwaitAlmostNothing) } it should "respond correctly with various run statuses" in { - managerProbe.expectMsgClass(max = TestExecutionTimeout, c = classOf[PipelinesApiRequestManager.PipelinesWorkerRequestWork]) + managerProbe.expectMsgClass(max = TestExecutionTimeout, + c = classOf[PipelinesApiRequestManager.PipelinesWorkerRequestWork] + ) val requester1 = TestProbe("requester1") val query1 = PAPIStatusPollRequest(null, requester1.ref, null, null) @@ -72,13 +89,15 @@ abstract class PipelinesApiRequestWorkerSpec[O >: Null] machineType = None, zone = None, instanceName = None, - wasPreemptible = false, + wasPreemptible = false ) batchHandler.operationStatusResponses :+= successStatus batchHandler.operationStatusResponses :+= failureStatus - workerActor.tell(msg = PipelinesApiRequestManager.PipelinesApiWorkBatch(NonEmptyList(query1, List(query2, query3))), sender = managerProbe.ref) - eventually { batchHandler.runBatchRequested should be(true) } + workerActor.tell(msg = PipelinesApiRequestManager.PipelinesApiWorkBatch(NonEmptyList(query1, List(query2, query3))), + sender = managerProbe.ref + ) + eventually(batchHandler.runBatchRequested should be(true)) // The manager shouldn't have been asked for more work yet: managerProbe.expectNoMessage(max = AwaitAlmostNothing) @@ -91,10 +110,10 @@ abstract class PipelinesApiRequestWorkerSpec[O >: Null] requester3.expectNoMessage(max = AwaitAlmostNothing) // Requester3 expected nothing... Instead, the manager expects an API failure notification and then a request for more work: - managerProbe.expectMsgPF(TestExecutionTimeout) { - case failure: PAPIApiRequestFailed => - if (!failure.cause.isInstanceOf[PAPIApiException]) fail("Unexpected failure cause class: " + failure.cause.getClass.getSimpleName) - if (failure.query != query2 && failure.query != query3) fail("Unexpected query caused failure: " + failure.query) + managerProbe.expectMsgPF(TestExecutionTimeout) { case failure: PAPIApiRequestFailed => + if (!failure.cause.isInstanceOf[PAPIApiException]) + fail("Unexpected failure cause class: " + failure.cause.getClass.getSimpleName) + if (failure.query != query2 && failure.query != query3) fail("Unexpected query caused failure: " + failure.query) } managerProbe.expectMsg(PipelinesWorkerRequestWork(PipelinesApiRequestWorker.MaxBatchSize)) managerProbe.expectNoMessage(max = AwaitAlmostNothing) @@ -102,8 +121,9 @@ abstract class PipelinesApiRequestWorkerSpec[O >: Null] } //noinspection ScalaUnusedSymbol -class TestPipelinesApiRequestWorker(manager: ActorRef, qps: Int Refined Positive, registryProbe: ActorRef)(implicit batchHandler: TestPipelinesApiBatchHandler[_]) - extends PipelinesApiRequestWorker(manager, 10.milliseconds, registryProbe) { +class TestPipelinesApiRequestWorker(manager: ActorRef, qps: Int Refined Positive, registryProbe: ActorRef)(implicit + batchHandler: TestPipelinesApiBatchHandler[_] +) extends PipelinesApiRequestWorker(manager, 10.milliseconds, registryProbe) { override def createBatch(): BatchRequest = null override def runBatch(batch: BatchRequest): Unit = batchHandler.runBatch() } @@ -118,14 +138,15 @@ abstract class TestPipelinesApiBatchHandler[O >: Null] extends PipelinesApiReque def createBatch(genomicsInterface: O): BatchRequest = null def runBatch(): Unit = runBatchRequested = true - def executeBatch(): Unit = { - resultHandlers.zip(callbackResponses) foreach { case (handler, response) => response match { - case CallbackSuccess => handler.onSuccess(null, null) - case CallbackFailure => - val error: GoogleJsonError = new GoogleJsonError() - handler.onFailure(error, null) - }} - } + def executeBatch(): Unit = + resultHandlers.zip(callbackResponses) foreach { case (handler, response) => + response match { + case CallbackSuccess => handler.onSuccess(null, null) + case CallbackFailure => + val error: GoogleJsonError = new GoogleJsonError() + handler.onFailure(error, null) + } + } def enqueueStatusPollInBatch(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest): Future[Try[Unit]] = { val completionPromise = Promise[Try[Unit]]() @@ -134,9 +155,12 @@ abstract class TestPipelinesApiBatchHandler[O >: Null] extends PipelinesApiReque completionPromise.future } - def statusPollResultHandler(pollRequest: PAPIStatusPollRequest, completionPromise: Promise[Try[Unit]]): JsonBatchCallback[O] + def statusPollResultHandler(pollRequest: PAPIStatusPollRequest, + completionPromise: Promise[Try[Unit]] + ): JsonBatchCallback[O] - def addStatusPollToBatch(httpRequest: HttpRequest, batch: BatchRequest, resultHandler: JsonBatchCallback[O]): Unit = resultHandlers :+= resultHandler + def addStatusPollToBatch(httpRequest: HttpRequest, batch: BatchRequest, resultHandler: JsonBatchCallback[O]): Unit = + resultHandlers :+= resultHandler def mockStatusInterpreter(operation: O): RunStatus = { val (status, newQueue) = operationStatusResponses.dequeue @@ -146,14 +170,16 @@ abstract class TestPipelinesApiBatchHandler[O >: Null] extends PipelinesApiReque } object TestPipelinesApiRequestWorker { - def props(manager: ActorRef, jesConfiguration: PipelinesApiConfiguration, registryProbe: ActorRef) - (implicit batchHandler: TestPipelinesApiBatchHandler[_]): Props = { - Props(new TestPipelinesApiRequestWorker( - manager, - jesConfiguration.papiAttributes.qps, - registryProbe - )) - } + def props(manager: ActorRef, jesConfiguration: PipelinesApiConfiguration, registryProbe: ActorRef)(implicit + batchHandler: TestPipelinesApiBatchHandler[_] + ): Props = + Props( + new TestPipelinesApiRequestWorker( + manager, + jesConfiguration.papiAttributes.qps, + registryProbe + ) + ) sealed trait PipelinesApiBatchCallbackResponse case object CallbackSuccess extends PipelinesApiBatchCallbackResponse diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala index 1c40fa38590..368b631c6ce 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala @@ -3,7 +3,7 @@ package cromwell.backend.google.pipelines.common.callcaching import akka.event.NoLogging import akka.testkit.{ImplicitSender, TestFSMRef, TestProbe} import com.typesafe.config.ConfigFactory -import cromwell.backend.BackendCacheHitCopyingActor.{CopyOutputsCommand, CopyingOutputsFailedResponse} +import cromwell.backend.BackendCacheHitCopyingActor.{CopyingOutputsFailedResponse, CopyOutputsCommand} import cromwell.backend.BackendJobExecutionActor.JobSucceededResponse import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes._ import cromwell.backend.google.pipelines.common._ @@ -38,9 +38,13 @@ import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.{Success, Try} - -class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite - with AnyFlatSpecLike with Matchers with ImplicitSender with MockSugar with Eventually { +class PipelinesApiBackendCacheHitCopyingActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with Matchers + with ImplicitSender + with MockSugar + with Eventually { behavior of "PipelinesApiBackendCacheHitCopyingActor" @@ -72,7 +76,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite // Make sure we got the expected type of cache blacklistCache match { case _: GroupingBlacklistCache => - case bad => fail(s"Unexpected blacklist cache type, expected GroupingBlacklistCache: ${bad.getClass.getSimpleName}") + case bad => + fail(s"Unexpected blacklist cache type, expected GroupingBlacklistCache: ${bad.getClass.getSimpleName}") } { @@ -86,7 +91,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor = ioActor, fakeServiceRegistryActor = serviceRegistryActor, supervisor = supervisor, - grouping = grouping) + grouping = grouping + ) val copyCommand = buildCopyCommand(hitId = 0, bucket = WideOpenBucket) supervisor watch copyActor @@ -97,9 +103,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite copyActor.underlyingActor.stateName shouldBe WaitingForIoResponses } - ioActor.expectMsgPF(5 seconds) { - case ioCommand: DefaultIoCopyCommand => - ioActor.reply(IoSuccess(ioCommand, ())) + ioActor.expectMsgPF(5 seconds) { case ioCommand: DefaultIoCopyCommand => + ioActor.reply(IoSuccess(ioCommand, ())) } supervisor.expectMsgPF(5 seconds) { case _: JobSucceededResponse => } @@ -135,7 +140,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor = ioActor, fakeServiceRegistryActor = serviceRegistryActor, supervisor = supervisor, - grouping = grouping) + grouping = grouping + ) val command = buildCopyCommand(hitId = 1, bucket = LockedDownBucket) supervisor watch copyActor @@ -146,10 +152,9 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite copyActor.underlyingActor.stateName shouldBe WaitingForIoResponses } - ioActor.expectMsgPF(5 seconds) { - case ioCommand: DefaultIoCopyCommand => - val failedPath = command.jobDetritusFiles(JobPaths.ReturnCodePathKey) - ioActor.reply(IoReadForbiddenFailure(ioCommand, new RuntimeException(), failedPath)) + ioActor.expectMsgPF(5 seconds) { case ioCommand: DefaultIoCopyCommand => + val failedPath = command.jobDetritusFiles(JobPaths.ReturnCodePathKey) + ioActor.reply(IoReadForbiddenFailure(ioCommand, new RuntimeException(), failedPath)) } supervisor.expectMsgPF(5 seconds) { case _: CopyingOutputsFailedResponse => } @@ -193,7 +198,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor = ioActor, fakeServiceRegistryActor = serviceRegistryActor, supervisor = supervisor, - grouping = grouping) + grouping = grouping + ) supervisor watch copyActor @@ -234,7 +240,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor = ioActor, fakeServiceRegistryActor = serviceRegistryActor, supervisor = supervisor, - grouping = grouping) + grouping = grouping + ) supervisor watch copyActor @@ -245,12 +252,12 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite copyActor.underlyingActor.stateName shouldBe WaitingForIoResponses } - ioActor.expectMsgPF(5 seconds) { - case ioCommand: DefaultIoCopyCommand => - ioActor.reply(IoFailure(ioCommand, new RuntimeException())) + ioActor.expectMsgPF(5 seconds) { case ioCommand: DefaultIoCopyCommand => + ioActor.reply(IoFailure(ioCommand, new RuntimeException())) } - val List(readHit, readBucket, writeHit) = instrumentationCounts(n = 3, serviceRegistryActor = serviceRegistryActor) + val List(readHit, readBucket, writeHit) = + instrumentationCounts(n = 3, serviceRegistryActor = serviceRegistryActor) readHit.bucket.path.toList shouldBe expectedMetric(Hit, Read, UntestedCacheResult) readBucket.bucket.path.toList shouldBe expectedMetric(Bucket, Read, GoodCacheResult) @@ -286,15 +293,15 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor = ioActor, fakeServiceRegistryActor = serviceRegistryActor, supervisor = supervisor, - grouping = grouping) + grouping = grouping + ) supervisor watch copyActor val command = buildCopyCommand(hitId = 3, bucket = WideOpenBucket) copyActor ! command - supervisor.expectMsgPF(5 seconds) { - case _: CopyingOutputsFailedResponse => + supervisor.expectMsgPF(5 seconds) { case _: CopyingOutputsFailedResponse => } // The IoActor should not be consulted and the copying actor should simply stop itself without transitioning. supervisor.expectTerminated(copyActor) @@ -328,15 +335,15 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor = ioActor, fakeServiceRegistryActor = serviceRegistryActor, supervisor = supervisor, - grouping = grouping) + grouping = grouping + ) supervisor watch copyActor val command = buildCopyCommand(hitId = 4, bucket = LockedDownBucket) copyActor ! command - supervisor.expectMsgPF(5 seconds) { - case _: CopyingOutputsFailedResponse => + supervisor.expectMsgPF(5 seconds) { case _: CopyingOutputsFailedResponse => } // The IoActor should not be consulted and the copying actor should simply stop itself without transitioning. supervisor.expectTerminated(copyActor) @@ -364,7 +371,9 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite private def instrumentationCounts(n: Int, serviceRegistryActor: TestProbe): List[CromwellCount] = { val received = serviceRegistryActor.receiveN(n = n, max = 5 seconds).toList - val instrumentationCounts = received collect { case InstrumentationServiceMessage(c) => c } collect { case c: CromwellCount => c } + val instrumentationCounts = received collect { case InstrumentationServiceMessage(c) => c } collect { + case c: CromwellCount => c + } instrumentationCounts foreach { c => c.value shouldBe 1; c.sampling shouldBe 1.0 } instrumentationCounts @@ -399,7 +408,8 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite fakeIoActor: TestProbe, fakeServiceRegistryActor: TestProbe, supervisor: TestProbe, - grouping: Option[String]): TestFSMRefPipelinesApiBackendCacheHitCopyingActor = { + grouping: Option[String] + ): TestFSMRefPipelinesApiBackendCacheHitCopyingActor = { // Couldn't mock this, possibly due to the use of `Refined` in two parameters: // // Underlying exception : java.lang.IllegalArgumentException: Cannot cast to primitive type: int @@ -449,7 +459,7 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite attempt = 1 ) - //noinspection ScalaUnusedSymbol + // noinspection ScalaUnusedSymbol def mapper(jobPaths: PipelinesApiJobPaths, originalPath: String): String = originalPath val workflowDescriptor = mock[BackendWorkflowDescriptor] @@ -524,9 +534,7 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite CopyOutputsCommand( womValueSimpletons = List.empty, - jobDetritusFiles = Map( - JobPaths.CallRootPathKey -> callRoot, - JobPaths.ReturnCodePathKey -> rcFile), + jobDetritusFiles = Map(JobPaths.CallRootPathKey -> callRoot, JobPaths.ReturnCodePathKey -> rcFile), returnCode = Option(0), cacheHit = CallCachingEntryId(hitId) ) @@ -540,10 +548,15 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite case object Read extends CacheAccessType case object Write extends CacheAccessType - private def expectedMetric(hitOrBucket: BlacklistingType, accessType: CacheAccessType, status: BlacklistStatus): List[String] = { - List("job", "callcaching", "blacklist", - accessType.metricFormat, - hitOrBucket.metricFormat, - status.getClass.getSimpleName.dropRight(1)) - } + private def expectedMetric(hitOrBucket: BlacklistingType, + accessType: CacheAccessType, + status: BlacklistStatus + ): List[String] = + List("job", + "callcaching", + "blacklist", + accessType.metricFormat, + hitOrBucket.metricFormat, + status.getClass.getSimpleName.dropRight(1) + ) } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala index 7710f3de98e..3b38f578d6a 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala @@ -26,32 +26,29 @@ import wom.format.MemorySize import scala.jdk.CollectionConverters._ -case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, endpointUrl: URL)(implicit gcsTransferConfiguration: GcsTransferConfiguration) extends PipelinesApiFactoryInterface - with ContainerSetup - with MonitoringAction - with CheckpointingAction - with Localization - with UserAction - with Delocalization - with MemoryRetryCheckAction - with SSHAccessAction { +case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, endpointUrl: URL)(implicit + gcsTransferConfiguration: GcsTransferConfiguration +) extends PipelinesApiFactoryInterface + with ContainerSetup + with MonitoringAction + with CheckpointingAction + with Localization + with UserAction + with Delocalization + with MemoryRetryCheckAction + with SSHAccessAction { override def build(initializer: HttpRequestInitializer): PipelinesApiRequestFactory = new PipelinesApiRequestFactory { - val genomics: Genomics = new Genomics.Builder( - GoogleAuthMode.httpTransport, - GoogleAuthMode.jsonFactory, - initializer) + val genomics: Genomics = new Genomics.Builder(GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, initializer) .setApplicationName(applicationName) .setRootUrl(endpointUrl.toString) .build - override def cancelRequest(job: StandardAsyncJob): HttpRequest = { + override def cancelRequest(job: StandardAsyncJob): HttpRequest = genomics.projects().operations().cancel(job.jobId, new CancelOperationRequest()).buildHttpRequest() - } - override def getRequest(job: StandardAsyncJob): HttpRequest = { + override def getRequest(job: StandardAsyncJob): HttpRequest = genomics.projects().operations().get(job.jobId).buildHttpRequest() - } override def runRequest(createPipelineParameters: CreatePipelineParameters, jobLogger: JobLogger): HttpRequest = { def createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues: VpcAndSubnetworkProjectLabelValues): Network = { @@ -66,12 +63,11 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en network } - def createNetwork(): Network = { + def createNetwork(): Network = createPipelineParameters.vpcNetworkAndSubnetworkProjectLabels match { case Some(vpcAndSubnetworkProjectLabelValues) => createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues) case _ => new Network().setUsePrivateAddress(createPipelineParameters.runtimeAttributes.noAddress) } - } val allDisksToBeMounted = createPipelineParameters.adjustedSizeDisks ++ createPipelineParameters.referenceDisksForLocalizationOpt.getOrElse(List.empty) @@ -95,7 +91,8 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en // adding memory as environment variables makes it easy for a user to retrieve the new value of memory // on the machine to utilize in their command blocks if needed val runtimeMemory = createPipelineParameters.runtimeAttributes.memory - val environment = Map("MEM_UNIT" -> runtimeMemory.unit.toString, "MEM_SIZE" -> runtimeMemory.amount.toString).asJava + val environment = + Map("MEM_UNIT" -> runtimeMemory.unit.toString, "MEM_SIZE" -> runtimeMemory.amount.toString).asJava val sortedActions = ActionUtils.sortActions[Action]( @@ -109,13 +106,12 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en checkpointingStart = checkpointingStart, checkpointingShutdown = checkpointingShutdown, sshAccess = sshAccess, - isBackground = - action => - Option(action.getFlags) - .map(_.asScala) - .toList - .flatten - .contains(ActionFlag.RunInBackground.toString), + isBackground = action => + Option(action.getFlags) + .map(_.asScala) + .toList + .flatten + .contains(ActionFlag.RunInBackground.toString) ) val serviceAccount = new ServiceAccount() @@ -138,8 +134,7 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en val network: Network = createNetwork() - val accelerators = createPipelineParameters.runtimeAttributes - .gpuResource.map(toAccelerator).toList.asJava + val accelerators = createPipelineParameters.runtimeAttributes.gpuResource.map(toAccelerator).toList.asJava /* * Adjust using docker images used by Cromwell as well as the tool's docker image size if available @@ -147,12 +142,18 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en val adjustedBootDiskSize = { val fromRuntimeAttributes = createPipelineParameters.runtimeAttributes.bootDiskSize // Compute the decompressed size based on the information available - val userCommandImageSizeInBytes = createPipelineParameters.jobDescriptor.dockerSize.map(_.toFullSize(DockerConfiguration.instance.sizeCompressionFactor)).getOrElse(0L) - val userCommandImageSizeInGB = MemorySize(userCommandImageSizeInBytes.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB).amount + val userCommandImageSizeInBytes = createPipelineParameters.jobDescriptor.dockerSize + .map(_.toFullSize(DockerConfiguration.instance.sizeCompressionFactor)) + .getOrElse(0L) + val userCommandImageSizeInGB = + MemorySize(userCommandImageSizeInBytes.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB).amount val userCommandImageSizeRoundedUpInGB = userCommandImageSizeInGB.ceil.toInt - val totalSize = fromRuntimeAttributes + userCommandImageSizeRoundedUpInGB + ActionUtils.cromwellImagesSizeRoundedUpInGB - jobLogger.info(s"Adjusting boot disk size to $totalSize GB: $fromRuntimeAttributes GB (runtime attributes) + $userCommandImageSizeRoundedUpInGB GB (user command image) + ${ActionUtils.cromwellImagesSizeRoundedUpInGB} GB (Cromwell support images)") + val totalSize = + fromRuntimeAttributes + userCommandImageSizeRoundedUpInGB + ActionUtils.cromwellImagesSizeRoundedUpInGB + jobLogger.info( + s"Adjusting boot disk size to $totalSize GB: $fromRuntimeAttributes GB (runtime attributes) + $userCommandImageSizeRoundedUpInGB GB (user command image) + ${ActionUtils.cromwellImagesSizeRoundedUpInGB} GB (Cromwell support images)" + ) totalSize } @@ -166,10 +167,12 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en .setNetwork(network) .setAccelerators(accelerators) - if(createPipelineParameters.dockerImageCacheDiskOpt.isDefined) { - jobLogger.info("Docker image cache requested for the job, but the job is being executed by Google " + - "Genomics API v2alpha1, while the feature is only supported by from Google Life Scielnces API starting from" + - "the version v2beta") + if (createPipelineParameters.dockerImageCacheDiskOpt.isDefined) { + jobLogger.info( + "Docker image cache requested for the job, but the job is being executed by Google " + + "Genomics API v2alpha1, while the feature is only supported by from Google Life Scielnces API starting from" + + "the version v2beta" + ) } createPipelineParameters.runtimeAttributes.gpuResource foreach { resource => diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActor.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActor.scala index 481f7d6c5a8..d31d311fc28 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActor.scala @@ -17,7 +17,15 @@ import cromwell.filesystems.gcs.{GcsPath, GcsPathBuilder} import org.apache.commons.codec.digest.DigestUtils import wom.core.FullyQualifiedName import wom.expression.FileEvaluation -import wom.values.{GlobFunctions, WomFile, WomGlobFile, WomMaybeListedDirectory, WomMaybePopulatedFile, WomSingleFile, WomUnlistedDirectory} +import wom.values.{ + GlobFunctions, + WomFile, + WomGlobFile, + WomMaybeListedDirectory, + WomMaybePopulatedFile, + WomSingleFile, + WomUnlistedDirectory +} import java.io.FileNotFoundException import scala.concurrent.Future @@ -26,7 +34,7 @@ import scala.language.postfixOps import scala.util.control.NoStackTrace class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExecutionActorParams) - extends cromwell.backend.google.pipelines.common.PipelinesApiAsyncBackendJobExecutionActor(standardParams) + extends cromwell.backend.google.pipelines.common.PipelinesApiAsyncBackendJobExecutionActor(standardParams) with PipelinesApiReferenceFilesMappingOperations { // The original implementation assumes the WomFiles are all WomMaybePopulatedFiles and wraps everything in a PipelinesApiFileInput @@ -34,31 +42,45 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe override protected def pipelinesApiInputsFromWomFiles(inputName: String, remotePathArray: Seq[WomFile], localPathArray: Seq[WomFile], - jobDescriptor: BackendJobDescriptor): Iterable[PipelinesApiInput] = { + jobDescriptor: BackendJobDescriptor + ): Iterable[PipelinesApiInput] = (remotePathArray zip localPathArray) flatMap { case (remotePath: WomMaybeListedDirectory, localPath) => maybeListedDirectoryToPipelinesParameters(inputName, remotePath, localPath.valueString) case (remotePath: WomUnlistedDirectory, localPath) => - Seq(PipelinesApiDirectoryInput(inputName, getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) + Seq( + PipelinesApiDirectoryInput(inputName, + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) case (remotePath: WomMaybePopulatedFile, localPath) => maybePopulatedFileToPipelinesParameters(inputName, remotePath, localPath.valueString) case (remotePath, localPath) => - Seq(PipelinesApiFileInput(inputName, getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) + Seq( + PipelinesApiFileInput(inputName, + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) } - } // The original implementation recursively finds all non directory files, in V2 we can keep directory as is override protected lazy val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = jobDescriptor.localInputs map { case (key, womFile) => - key -> womFile.collectAsSeq({ + key -> womFile.collectAsSeq { case womFile: WomFile if !inputsToNotLocalize.contains(womFile) => womFile - }) + } } private lazy val gcsTransferLibrary = Source.fromInputStream(Thread.currentThread.getContextClassLoader.getResourceAsStream("gcs_transfer.sh")).mkString - private def gcsLocalizationTransferBundle[T <: PipelinesApiInput](gcsTransferConfiguration: GcsTransferConfiguration)(bucket: String, inputs: NonEmptyList[T]): String = { + private def gcsLocalizationTransferBundle[T <: PipelinesApiInput]( + gcsTransferConfiguration: GcsTransferConfiguration + )(bucket: String, inputs: NonEmptyList[T]): String = { val project = inputs.head.cloudPath.asInstanceOf[GcsPath].projectId val maxAttempts = gcsTransferConfiguration.transferAttempts @@ -73,27 +95,29 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe val filesByContainerParentDirectory = filesWithSameNames.groupBy(_.containerPath.parent.toString) // Deduplicate any inputs since parallel localization can't deal with this. - val uniqueFilesByContainerParentDirectory = filesByContainerParentDirectory map { case (p, fs) => p -> fs.toSet } + val uniqueFilesByContainerParentDirectory = filesByContainerParentDirectory map { case (p, fs) => p -> fs.toSet } - val filesWithSameNamesTransferBundles: List[String] = uniqueFilesByContainerParentDirectory.toList map { case (containerParent, filesWithSameParent) => - val arrayIdentifier = s"files_to_localize_" + DigestUtils.md5Hex(bucket + containerParent) - val entries = filesWithSameParent.map(_.cloudPath) mkString("\"", "\"\n| \"", "\"") + val filesWithSameNamesTransferBundles: List[String] = uniqueFilesByContainerParentDirectory.toList map { + case (containerParent, filesWithSameParent) => + val arrayIdentifier = s"files_to_localize_" + DigestUtils.md5Hex(bucket + containerParent) + val entries = filesWithSameParent.map(_.cloudPath) mkString ("\"", "\"\n| \"", "\"") - s""" - |# Localize files from source bucket '$bucket' to container parent directory '$containerParent'. - |$arrayIdentifier=( - | "$project" # project to use if requester pays - | "$maxAttempts" # max transfer attempts - | "${containerParent.ensureSlashed}" # container parent directory - | $entries - |) - | - |localize_files "$${$arrayIdentifier[@]}" + s""" + |# Localize files from source bucket '$bucket' to container parent directory '$containerParent'. + |$arrayIdentifier=( + | "$project" # project to use if requester pays + | "$maxAttempts" # max transfer attempts + | "${containerParent.ensureSlashed}" # container parent directory + | $entries + |) + | + |localize_files "$${$arrayIdentifier[@]}" """.stripMargin } val filesWithDifferentNamesTransferBundles = filesWithDifferentNames map { f => - val arrayIdentifier = s"singleton_file_to_localize_" + DigestUtils.md5Hex(f.cloudPath.pathAsString + f.containerPath.pathAsString) + val arrayIdentifier = + s"singleton_file_to_localize_" + DigestUtils.md5Hex(f.cloudPath.pathAsString + f.containerPath.pathAsString) s""" |# Localize singleton file '${f.cloudPath.pathAsString}' to '${f.containerPath.pathAsString}'. |$arrayIdentifier=( @@ -109,27 +133,31 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe // Only write a transfer bundle for directories if there are directories to be localized. Emptiness isn't a concern // for files since there is always at least the command script to be localized. - val directoryTransferBundle = if (directories.isEmpty) "" else { - val entries = directories flatMap { i => List(i.cloudPath, i.containerPath) } mkString("\"", "\"\n| \"", "\"") + val directoryTransferBundle = + if (directories.isEmpty) "" + else { + val entries = directories flatMap { i => List(i.cloudPath, i.containerPath) } mkString ("\"", "\"\n| \"", "\"") - val arrayIdentifier = s"directories_to_localize_" + DigestUtils.md5Hex(bucket) + val arrayIdentifier = s"directories_to_localize_" + DigestUtils.md5Hex(bucket) - s""" - |# Directories from source bucket '$bucket'. - |$arrayIdentifier=( - | "$project" # project to use if requester pays - | "$maxAttempts" # max transfer attempts - | $entries - |) - | - |localize_directories "$${$arrayIdentifier[@]}" + s""" + |# Directories from source bucket '$bucket'. + |$arrayIdentifier=( + | "$project" # project to use if requester pays + | "$maxAttempts" # max transfer attempts + | $entries + |) + | + |localize_directories "$${$arrayIdentifier[@]}" """.stripMargin - } + } (directoryTransferBundle :: (filesWithSameNamesTransferBundles ++ filesWithDifferentNamesTransferBundles)) mkString "\n\n" } - private def gcsDelocalizationTransferBundle[T <: PipelinesApiOutput](transferConfiguration: GcsTransferConfiguration)(bucket: String, outputs: NonEmptyList[T]): String = { + private def gcsDelocalizationTransferBundle[T <: PipelinesApiOutput]( + transferConfiguration: GcsTransferConfiguration + )(bucket: String, outputs: NonEmptyList[T]): String = { val project = outputs.head.cloudPath.asInstanceOf[GcsPath].projectId val maxAttempts = transferConfiguration.transferAttempts @@ -140,14 +168,16 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe case _: PipelinesApiDirectoryOutput => "directory" // a primary directory } - val optional = Option(output) collectFirst { case o: PipelinesApiFileOutput if o.secondary || o.optional => "optional" } getOrElse "required" + val optional = Option(output) collectFirst { + case o: PipelinesApiFileOutput if o.secondary || o.optional => "optional" + } getOrElse "required" val contentType = output.contentType.map(_.toString).getOrElse("") List(kind, output.cloudPath.toString, output.containerPath.toString, optional, contentType) - } mkString("\"", "\"\n| \"", "\"") + } mkString ("\"", "\"\n| \"", "\"") - val parallelCompositeUploadThreshold = jobDescriptor.workflowDescriptor.workflowOptions.getOrElse( - "parallel_composite_upload_threshold", transferConfiguration.parallelCompositeUploadThreshold) + val parallelCompositeUploadThreshold = jobDescriptor.workflowDescriptor.workflowOptions + .getOrElse("parallel_composite_upload_threshold", transferConfiguration.parallelCompositeUploadThreshold) // Use a digest as bucket names can contain characters that are not legal in bash identifiers. val arrayIdentifier = s"delocalize_" + DigestUtils.md5Hex(bucket) @@ -164,27 +194,27 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe """.stripMargin } - private def bracketTransfersWithMessages(activity: String)(transferBody: String): String = { + private def bracketTransfersWithMessages(activity: String)(transferBody: String): String = List( s"timestamped_message '$activity script execution started...'", transferBody, s"timestamped_message '$activity script execution complete.'" ) mkString "\n" - } import mouse.all._ private def generateGcsLocalizationScript(inputs: List[PipelinesApiInput], - referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { // Generate a mapping of reference inputs to their mounted paths and a section of the localization script to // "faux localize" these reference inputs with symlinks to their locations on mounted reference disks. import cromwell.backend.google.pipelines.common.action.ActionUtils.shellEscaped val referenceFilesLocalizationScript = { val symlinkCreationCommandsOpt = referenceInputsToMountedPathsOpt map { referenceInputsToMountedPaths => - referenceInputsToMountedPaths map { - case (input, absolutePathOnRefDisk) => - s"mkdir -p ${shellEscaped(input.containerPath.parent.pathAsString)} && ln -s ${shellEscaped(absolutePathOnRefDisk)} ${shellEscaped(input.containerPath.pathAsString)}" + referenceInputsToMountedPaths map { case (input, absolutePathOnRefDisk) => + s"mkdir -p ${shellEscaped(input.containerPath.parent.pathAsString)} && ln -s ${shellEscaped( + absolutePathOnRefDisk + )} ${shellEscaped(input.containerPath.pathAsString)}" } } @@ -206,9 +236,9 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe } val regularFilesLocalizationScript = { - val regularFiles = referenceInputsToMountedPathsOpt.map(maybeReferenceInputsToMountedPaths => - inputs diff maybeReferenceInputsToMountedPaths.keySet.toList - ).getOrElse(inputs) + val regularFiles = referenceInputsToMountedPathsOpt + .map(maybeReferenceInputsToMountedPaths => inputs diff maybeReferenceInputsToMountedPaths.keySet.toList) + .getOrElse(inputs) if (regularFiles.nonEmpty) { val bundleFunction = (gcsLocalizationTransferBundle(gcsTransferConfiguration) _).tupled generateGcsTransferScript(regularFiles, bundleFunction) @@ -227,45 +257,68 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe combinedLocalizationScript |> bracketTransfersWithMessages("Localization") } - private def generateGcsDelocalizationScript(outputs: List[PipelinesApiOutput])(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + private def generateGcsDelocalizationScript( + outputs: List[PipelinesApiOutput] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { val bundleFunction = (gcsDelocalizationTransferBundle(gcsTransferConfiguration) _).tupled generateGcsTransferScript(outputs, bundleFunction) |> bracketTransfersWithMessages("Delocalization") } - private def generateGcsTransferScript[T <: PipelinesParameter](items: List[T], bundleFunction: ((String, NonEmptyList[T])) => String): String = { + private def generateGcsTransferScript[T <: PipelinesParameter](items: List[T], + bundleFunction: ((String, NonEmptyList[T])) => String + ): String = { val gcsItems = items collect { case i if i.cloudPath.isInstanceOf[GcsPath] => i } groupParametersByGcsBucket(gcsItems) map bundleFunction mkString "\n" } override protected def uploadGcsTransferLibrary(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = { - + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = asyncIo.writeAsync(cloudPath, gcsTransferLibrary, Seq(CloudStorageOptions.withMimeType("text/plain"))) - } override def uploadGcsLocalizationScript(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, transferLibraryContainerPath: Path, gcsTransferConfiguration: GcsTransferConfiguration, - referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]]): Future[Unit] = { - val content = generateGcsLocalizationScript(createPipelineParameters.inputOutputParameters.fileInputParameters, referenceInputsToMountedPathsOpt)(gcsTransferConfiguration) - asyncIo.writeAsync(cloudPath, s"source '$transferLibraryContainerPath'\n\n" + content, Seq(CloudStorageOptions.withMimeType("text/plain"))) + referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]] + ): Future[Unit] = { + val content = generateGcsLocalizationScript(createPipelineParameters.inputOutputParameters.fileInputParameters, + referenceInputsToMountedPathsOpt + )(gcsTransferConfiguration) + asyncIo.writeAsync(cloudPath, + s"source '$transferLibraryContainerPath'\n\n" + content, + Seq(CloudStorageOptions.withMimeType("text/plain")) + ) } override def uploadGcsDelocalizationScript(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, transferLibraryContainerPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = { - val content = generateGcsDelocalizationScript(createPipelineParameters.inputOutputParameters.fileOutputParameters)(gcsTransferConfiguration) - asyncIo.writeAsync(cloudPath, s"source '$transferLibraryContainerPath'\n\n" + content, Seq(CloudStorageOptions.withMimeType("text/plain"))) + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = { + val content = generateGcsDelocalizationScript(createPipelineParameters.inputOutputParameters.fileOutputParameters)( + gcsTransferConfiguration + ) + asyncIo.writeAsync(cloudPath, + s"source '$transferLibraryContainerPath'\n\n" + content, + Seq(CloudStorageOptions.withMimeType("text/plain")) + ) } // Simply create a PipelinesApiDirectoryOutput in v2 instead of globbing - override protected def generateUnlistedDirectoryOutputs(unlistedDirectory: WomUnlistedDirectory, fileEvaluation: FileEvaluation): List[PipelinesApiOutput] = { + override protected def generateUnlistedDirectoryOutputs(unlistedDirectory: WomUnlistedDirectory, + fileEvaluation: FileEvaluation + ): List[PipelinesApiOutput] = { val destination = callRootPath.resolve(unlistedDirectory.value.stripPrefix("/")) val (relpath, disk) = relativePathAndAttachedDisk(unlistedDirectory.value, runtimeAttributes.disks) - val directoryOutput = PipelinesApiDirectoryOutput(makeSafeReferenceName(unlistedDirectory.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val directoryOutput = PipelinesApiDirectoryOutput(makeSafeReferenceName(unlistedDirectory.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(directoryOutput) } @@ -282,13 +335,27 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe // We need both the glob directory and the glob list: List( // The glob directory: - PipelinesApiDirectoryOutput(makeSafeReferenceName(globDirectory), gcsGlobDirectoryDestinationPath, DefaultPathBuilder.get(globDirectory), globDirectoryDisk, optional = false, secondary = false), + PipelinesApiDirectoryOutput( + makeSafeReferenceName(globDirectory), + gcsGlobDirectoryDestinationPath, + DefaultPathBuilder.get(globDirectory), + globDirectoryDisk, + optional = false, + secondary = false + ), // The glob list file: - PipelinesApiFileOutput(makeSafeReferenceName(globListFile), gcsGlobListFileDestinationPath, DefaultPathBuilder.get(globListFile), globDirectoryDisk, optional = false, secondary = false) + PipelinesApiFileOutput( + makeSafeReferenceName(globListFile), + gcsGlobListFileDestinationPath, + DefaultPathBuilder.get(globListFile), + globDirectoryDisk, + optional = false, + secondary = false + ) ) } - override def womFileToGcsPath(jesOutputs: Set[PipelinesApiOutput])(womFile: WomFile): WomFile = { + override def womFileToGcsPath(jesOutputs: Set[PipelinesApiOutput])(womFile: WomFile): WomFile = womFile mapFile { path => jesOutputs collectFirst { case jesOutput if jesOutput.name == makeSafeReferenceName(path) => @@ -305,65 +372,98 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe case _: ValidFullGcsPath => path /* - * Strip the prefixes in RuntimeOutputMapping.prefixFilters from the path, one at a time. - * For instance - * file:///cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt will progressively become - * - * /cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt - * bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt - * call-A/file.txt - * - * This code is called as part of a path mapper that will be applied to the WOMified cwl.output.json. - * The cwl.output.json when it's being read by Cromwell from the bucket still contains local paths - * (as they were created by the cwl tool). - * In order to keep things working we need to map those local paths to where they were actually delocalized, - * which is determined in cromwell.backend.google.pipelines.v2alpha1.api.Delocalization. - */ - case _ => (callRootPath / - RuntimeOutputMapping + * Strip the prefixes in RuntimeOutputMapping.prefixFilters from the path, one at a time. + * For instance + * file:///cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt will progressively become + * + * /cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt + * bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt + * call-A/file.txt + * + * This code is called as part of a path mapper that will be applied to the WOMified cwl.output.json. + * The cwl.output.json when it's being read by Cromwell from the bucket still contains local paths + * (as they were created by the cwl tool). + * In order to keep things working we need to map those local paths to where they were actually delocalized, + * which is determined in cromwell.backend.google.pipelines.v2alpha1.api.Delocalization. + */ + case _ => + (callRootPath / + RuntimeOutputMapping .prefixFilters(workflowPaths.workflowRoot) - .foldLeft(path)({ - case (newPath, prefix) => newPath.stripPrefix(prefix) - }) - ).pathAsString + .foldLeft(path) { case (newPath, prefix) => + newPath.stripPrefix(prefix) + }).pathAsString } } } - } - private def maybePopulatedFileToPipelinesParameters(inputName: String, maybePopulatedFile: WomMaybePopulatedFile, localPath: String) = { - val secondaryFiles = maybePopulatedFile.secondaryFiles.flatMap({ secondaryFile => - pipelinesApiInputsFromWomFiles(secondaryFile.valueString, List(secondaryFile), List(relativeLocalizationPath(secondaryFile)), jobDescriptor) - }) + private def maybePopulatedFileToPipelinesParameters(inputName: String, + maybePopulatedFile: WomMaybePopulatedFile, + localPath: String + ) = { + val secondaryFiles = maybePopulatedFile.secondaryFiles.flatMap { secondaryFile => + pipelinesApiInputsFromWomFiles(secondaryFile.valueString, + List(secondaryFile), + List(relativeLocalizationPath(secondaryFile)), + jobDescriptor + ) + } - Seq(PipelinesApiFileInput(inputName, getPath(maybePopulatedFile.valueString).get, DefaultPathBuilder.get(localPath), workingDisk)) ++ secondaryFiles + Seq( + PipelinesApiFileInput(inputName, + getPath(maybePopulatedFile.valueString).get, + DefaultPathBuilder.get(localPath), + workingDisk + ) + ) ++ secondaryFiles } - private def maybeListedDirectoryToPipelinesParameters(inputName: String, womMaybeListedDirectory: WomMaybeListedDirectory, localPath: String) = womMaybeListedDirectory match { + private def maybeListedDirectoryToPipelinesParameters(inputName: String, + womMaybeListedDirectory: WomMaybeListedDirectory, + localPath: String + ) = womMaybeListedDirectory match { // If there is a path, simply localize as a directory case WomMaybeListedDirectory(Some(path), _, _, _) => List(PipelinesApiDirectoryInput(inputName, getPath(path).get, DefaultPathBuilder.get(localPath), workingDisk)) // If there is a listing, recurse and call pipelinesApiInputsFromWomFiles on all the listed files case WomMaybeListedDirectory(_, Some(listing), _, _) if listing.nonEmpty => - listing.flatMap({ + listing.flatMap { case womFile: WomFile if isAdHocFile(womFile) => - pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), List(womFile), List(fileName(womFile)), jobDescriptor) + pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), + List(womFile), + List(fileName(womFile)), + jobDescriptor + ) case womFile: WomFile => - pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), List(womFile), List(relativeLocalizationPath(womFile)), jobDescriptor) - }) + pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), + List(womFile), + List(relativeLocalizationPath(womFile)), + jobDescriptor + ) + } case _ => List.empty } - override def generateSingleFileOutputs(womFile: WomSingleFile, fileEvaluation: FileEvaluation): List[PipelinesApiFileOutput] = { + override def generateSingleFileOutputs(womFile: WomSingleFile, + fileEvaluation: FileEvaluation + ): List[PipelinesApiFileOutput] = { val (relpath, disk) = relativePathAndAttachedDisk(womFile.value, runtimeAttributes.disks) // If the file is on a custom mount point, resolve it so that the full mount path will show up in the cloud path // For the default one (cromwell_root), the expectation is that it does not appear - val mountedPath = if (!disk.mountPoint.isSamePathAs(PipelinesApiWorkingDisk.Default.mountPoint)) disk.mountPoint.resolve(relpath) else relpath + val mountedPath = + if (!disk.mountPoint.isSamePathAs(PipelinesApiWorkingDisk.Default.mountPoint)) disk.mountPoint.resolve(relpath) + else relpath // Normalize the local path (to get rid of ".." and "."). Also strip any potential leading / so that it gets appended to the call root val normalizedPath = mountedPath.normalize().pathAsString.stripPrefix("/") val destination = callRootPath.resolve(normalizedPath) - val jesFileOutput = PipelinesApiFileOutput(makeSafeReferenceName(womFile.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val jesFileOutput = PipelinesApiFileOutput(makeSafeReferenceName(womFile.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(jesFileOutput) } } @@ -381,10 +481,12 @@ object PipelinesApiAsyncBackendJobExecutionActor { // - There must be at least one '/', followed by some content in the file name. // - Or, then, for directories: // - If we got this far, we already have a valid directory path. Allow it to optionally end with a `/` character. - private val gcsFilePathMatcher = "(?s)^gs://([a-zA-Z0-9][^/]+)(/[^/]+)*/[^/]+$".r + private val gcsFilePathMatcher = "(?s)^gs://([a-zA-Z0-9][^/]+)(/[^/]+)*/[^/]+$".r private val gcsDirectoryPathMatcher = "(?s)^gs://([a-zA-Z0-9][^/]+)(/[^/]+)*/?$".r - private [v2alpha1] def groupParametersByGcsBucket[T <: PipelinesParameter](parameters: List[T]): Map[String, NonEmptyList[T]] = { + private[v2alpha1] def groupParametersByGcsBucket[T <: PipelinesParameter]( + parameters: List[T] + ): Map[String, NonEmptyList[T]] = parameters.map { param => def pathTypeString = if (param.isFileParameter) "File" else "Directory" val regexToUse = if (param.isFileParameter) gcsFilePathMatcher else gcsDirectoryPathMatcher @@ -393,8 +495,9 @@ object PipelinesApiAsyncBackendJobExecutionActor { case regexToUse(bucket) => Map(bucket -> NonEmptyList.of(param)) case regexToUse(bucket, _) => Map(bucket -> NonEmptyList.of(param)) case other => - throw new Exception(s"$pathTypeString path '$other' did not match the expected regex: ${regexToUse.pattern.toString}") with NoStackTrace + throw new Exception( + s"$pathTypeString path '$other' did not match the expected regex: ${regexToUse.pattern.toString}" + ) with NoStackTrace } } combineAll - } } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiLifecycleActorFactory.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiLifecycleActorFactory.scala index aad8c49eec8..95d4965d6f6 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiLifecycleActorFactory.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiLifecycleActorFactory.scala @@ -2,17 +2,21 @@ package cromwell.backend.google.pipelines.v2alpha1 import akka.actor.{ActorRef, Props} import cromwell.backend.BackendConfigurationDescriptor -import cromwell.backend.google.pipelines.common.{PipelinesApiBackendLifecycleActorFactory, PipelinesApiBackendSingletonActor, PipelinesApiConfiguration} +import cromwell.backend.google.pipelines.common.{ + PipelinesApiBackendLifecycleActorFactory, + PipelinesApiBackendSingletonActor, + PipelinesApiConfiguration +} import cromwell.backend.google.pipelines.v2alpha1.api.request.RequestHandler import cromwell.backend.standard.StandardAsyncExecutionActor class PipelinesApiLifecycleActorFactory(name: String, configurationDescriptor: BackendConfigurationDescriptor) - extends PipelinesApiBackendLifecycleActorFactory(name, configurationDescriptor) { + extends PipelinesApiBackendLifecycleActorFactory(name, configurationDescriptor) { private val genomicsFactory = GenomicsFactory( googleConfig.applicationName, papiAttributes.auths.genomics, - papiAttributes.endpointUrl, + papiAttributes.endpointUrl )(papiAttributes.gcsTransferConfiguration) override protected val jesConfiguration = new PipelinesApiConfiguration(configurationDescriptor, genomicsFactory, googleConfig, papiAttributes) @@ -21,7 +25,7 @@ class PipelinesApiLifecycleActorFactory(name: String, configurationDescriptor: B new RequestHandler( googleConfig.applicationName, papiAttributes.endpointUrl, - papiAttributes.batchRequestTimeoutConfiguration, + papiAttributes.batchRequestTimeoutConfiguration ) PipelinesApiBackendSingletonActor.props( jesConfiguration.papiAttributes.qps, diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala index 19774a4cc41..dc21f3ea11c 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala @@ -21,14 +21,14 @@ import simulacrum.typeclass trait PipelinesParameterConversions { implicit val fileInputToParameter: ToParameter[PipelinesApiFileInput] = new ToParameter[PipelinesApiFileInput] { - override def toActions(fileInput: PipelinesApiFileInput, mounts: List[Mount]) - (implicit retryPolicy: GcsTransferConfiguration): List[Action] = { + override def toActions(fileInput: PipelinesApiFileInput, mounts: List[Mount])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Action] = { lazy val config = ConfigFactory.load val labels = ActionBuilder.parameterLabels(fileInput) fileInput.cloudPath match { case drsPath: DrsPath => - import scala.jdk.CollectionConverters._ val drsFileSystemProvider = drsPath.drsPath.getFileSystem.provider.asInstanceOf[DrsCloudNioFileSystemProvider] @@ -49,13 +49,12 @@ trait PipelinesParameterConversions { case sraPath: SraPath => val sraConfig = config.getConfig("filesystems.sra") - def getString(key: String): Option[String] = { + def getString(key: String): Option[String] = if (sraConfig.hasPath(key)) { Some(sraConfig.getString(key)) } else { None } - } val image = getString("docker-image") getOrElse "fusera/fusera:alpine" val (createNgc, ngcArgs) = getString("ngc") match { @@ -83,31 +82,33 @@ trait PipelinesParameterConversions { implicit val directoryInputToParameter: ToParameter[PipelinesApiDirectoryInput] = new ToParameter[PipelinesApiDirectoryInput] { - override def toActions(directoryInput: PipelinesApiDirectoryInput, mounts: List[Mount]) - (implicit retryPolicy: GcsTransferConfiguration): List[Action] = { - directoryInput.cloudPath match { - case _: GcsPath => Nil // GCS paths will be localized with a separate localization script. - case _ => - val labels = ActionBuilder.parameterLabels(directoryInput) - val describeAction = ActionBuilder.describeParameter(directoryInput, labels) - val localizationAction = cloudSdkShellAction( - localizeDirectory(directoryInput.cloudPath, directoryInput.containerPath) - )(mounts = mounts, labels = labels) - List(describeAction, localizationAction) - } + override def toActions(directoryInput: PipelinesApiDirectoryInput, mounts: List[Mount])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Action] = + directoryInput.cloudPath match { + case _: GcsPath => Nil // GCS paths will be localized with a separate localization script. + case _ => + val labels = ActionBuilder.parameterLabels(directoryInput) + val describeAction = ActionBuilder.describeParameter(directoryInput, labels) + val localizationAction = cloudSdkShellAction( + localizeDirectory(directoryInput.cloudPath, directoryInput.containerPath) + )(mounts = mounts, labels = labels) + List(describeAction, localizationAction) + } } - } implicit val fileOutputToParameter: ToParameter[PipelinesApiFileOutput] = new ToParameter[PipelinesApiFileOutput] { - override def toActions(fileOutput: PipelinesApiFileOutput, mounts: List[Mount]) - (implicit retryPolicy: GcsTransferConfiguration): List[Action] = { + override def toActions(fileOutput: PipelinesApiFileOutput, mounts: List[Mount])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Action] = { // If the output is a "secondary file", it actually could be a directory but we won't know before runtime. // The fileOrDirectory method will generate a command that can cover both cases - lazy val copy = if (fileOutput.secondary) - delocalizeFileOrDirectory(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) - else - delocalizeFile(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) + lazy val copy = + if (fileOutput.secondary) + delocalizeFileOrDirectory(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) + else + delocalizeFile(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) lazy val copyOnlyIfExists = ifExist(fileOutput.containerPath) { copy @@ -150,32 +151,34 @@ trait PipelinesParameterConversions { implicit val directoryOutputToParameter: ToParameter[PipelinesApiDirectoryOutput] = new ToParameter[PipelinesApiDirectoryOutput] { - override def toActions(directoryOutput: PipelinesApiDirectoryOutput, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { - directoryOutput.cloudPath match { - case _: GcsPath => Nil // GCS paths will be delocalized with a separate delocalization script. - case _ => - val labels = ActionBuilder.parameterLabels(directoryOutput) - val describeAction = ActionBuilder.describeParameter(directoryOutput, labels) - val delocalizationAction = cloudSdkShellAction( - delocalizeDirectory(directoryOutput.containerPath, directoryOutput.cloudPath, None) - )(mounts = mounts, flags = List(ActionFlag.AlwaysRun), labels = labels) - List(describeAction, delocalizationAction) - } + override def toActions(directoryOutput: PipelinesApiDirectoryOutput, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = + directoryOutput.cloudPath match { + case _: GcsPath => Nil // GCS paths will be delocalized with a separate delocalization script. + case _ => + val labels = ActionBuilder.parameterLabels(directoryOutput) + val describeAction = ActionBuilder.describeParameter(directoryOutput, labels) + val delocalizationAction = cloudSdkShellAction( + delocalizeDirectory(directoryOutput.containerPath, directoryOutput.cloudPath, None) + )(mounts = mounts, flags = List(ActionFlag.AlwaysRun), labels = labels) + List(describeAction, delocalizationAction) + } } - } implicit val inputToParameter: ToParameter[PipelinesApiInput] = new ToParameter[PipelinesApiInput] { - override def toActions(p: PipelinesApiInput, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = p match { + override def toActions(p: PipelinesApiInput, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = p match { case fileInput: PipelinesApiFileInput => fileInputToParameter.toActions(fileInput, mounts) case directoryInput: PipelinesApiDirectoryInput => directoryInputToParameter.toActions(directoryInput, mounts) } } implicit val outputToParameter: ToParameter[PipelinesApiOutput] = new ToParameter[PipelinesApiOutput] { - override def toActions(p: PipelinesApiOutput, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = p match { + override def toActions(p: PipelinesApiOutput, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = p match { case fileOutput: PipelinesApiFileOutput => fileOutputToParameter.toActions(fileOutput, mounts) case directoryOutput: PipelinesApiDirectoryOutput => directoryOutputToParameter.toActions(directoryOutput, mounts) } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesUtilityConversions.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesUtilityConversions.scala index 41ed815eb73..be672df5df2 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesUtilityConversions.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesUtilityConversions.scala @@ -12,14 +12,15 @@ import scala.language.postfixOps import scala.util.Try trait PipelinesUtilityConversions { - def toAccelerator(gpuResource: GpuResource): Accelerator = new Accelerator().setCount(gpuResource.gpuCount.value.toLong).setType(gpuResource.gpuType.toString) + def toAccelerator(gpuResource: GpuResource): Accelerator = + new Accelerator().setCount(gpuResource.gpuCount.value.toLong).setType(gpuResource.gpuType.toString) def toMachineType(jobLogger: JobLogger)(attributes: PipelinesApiRuntimeAttributes): String = MachineConstraints.machineType( memory = attributes.memory, cpu = attributes.cpu, cpuPlatformOption = attributes.cpuPlatform, googleLegacyMachineSelection = attributes.googleLegacyMachineSelection, - jobLogger = jobLogger, + jobLogger = jobLogger ) def toMounts(disks: Seq[PipelinesApiAttachedDisk]): List[Mount] = disks.map(toMount).toList def toDisks(disks: Seq[PipelinesApiAttachedDisk]): List[Disk] = disks.map(toDisk).toList @@ -58,7 +59,9 @@ trait PipelinesUtilityConversions { // There are both "Started pulling" and "Stopped pulling" events but these are confusing for metadata, especially on the // timing diagram. Create a single "Pulling " grouping to absorb these events. def groupingFromPull: Option[String] = List("Started", "Stopped") flatMap { k => - Option(event.getDescription) collect { case d if d.startsWith(s"$k pulling") => "Pulling" + d.substring(s"$k pulling".length)} + Option(event.getDescription) collect { + case d if d.startsWith(s"$k pulling") => "Pulling" + d.substring(s"$k pulling".length) + } } headOption ExecutionEvent( diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala index 0d38d9f2398..82787262cf5 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala @@ -36,7 +36,7 @@ object ActionBuilder { * Useful for any externally provided images that _might_ have entrypoints already set. This is a workaround for * the issue detailed in BA-6406. See underlying google issue in that ticket for more info. */ - def withEntrypointCommand(command: String*): Action = { + def withEntrypointCommand(command: String*): Action = action .setEntrypoint(command.headOption.orNull) .setCommands( @@ -45,7 +45,6 @@ object ActionBuilder { .map(_.asJava) .orNull ) - } def withFlags(flags: List[ActionFlag]): Action = action.setFlags(flags |> javaFlags) def withMounts(mounts: List[Mount]): Action = action.setMounts(mounts.asJava) @@ -55,7 +54,6 @@ object ActionBuilder { case _ => action } - def scalaLabels: Map[String, String] = { val list = for { keyValueList <- Option(action.getLabels).toList @@ -70,8 +68,9 @@ object ActionBuilder { def withImage(image: String): Action = new Action() .setImageUri(image) - def monitoringImageScriptAction(cloudPath: Path, containerPath: Path, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + def monitoringImageScriptAction(cloudPath: Path, containerPath: Path, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): Action = { val command = ActionCommands.localizeFile(cloudPath, containerPath) val labels = Map(Key.Tag -> Value.Localization) ActionBuilder.cloudSdkShellAction(command)(mounts = mounts, labels = labels) @@ -80,8 +79,8 @@ object ActionBuilder { def backgroundAction(image: String, command: List[String], environment: Map[String, String], - mounts: List[Mount], - ): Action = { + mounts: List[Mount] + ): Action = new Action() .setImageUri(image) .withEntrypointCommand(command: _*) @@ -90,7 +89,6 @@ object ActionBuilder { .setEnvironment(environment.asJava) .withLabels(Map(Key.Tag -> Value.Monitoring)) .setPidNamespace(backgroundActionPidNamespace) - } def terminateBackgroundActionsAction(): Action = cloudSdkShellAction(terminateAllBackgroundActionsCommand)( @@ -109,13 +107,16 @@ object ActionBuilder { mounts: List[Mount], jobShell: String, privateDockerKeyAndToken: Option[CreatePipelineDockerKeyAndToken], - fuseEnabled: Boolean): Action = { + fuseEnabled: Boolean + ): Action = { val dockerImageIdentifier = DockerImageIdentifier.fromString(docker) val secret = for { imageId <- dockerImageIdentifier.toOption - if DockerHub.isValidDockerHubHost(imageId.host) // This token only works for Docker Hub and not other repositories. + if DockerHub.isValidDockerHubHost( + imageId.host + ) // This token only works for Docker Hub and not other repositories. keyAndToken <- privateDockerKeyAndToken s = new Secret().setKeyName(keyAndToken.key).setCipherText(keyAndToken.encryptedToken) } yield s @@ -130,22 +131,22 @@ object ActionBuilder { .setFlags((if (fuseEnabled) List(ActionFlag.EnableFuse.toString) else List.empty).asJava) } - def checkForMemoryRetryAction(retryLookupKeys: List[String], mounts: List[Mount]): Action = { + def checkForMemoryRetryAction(retryLookupKeys: List[String], mounts: List[Mount]): Action = cloudSdkShellAction(ActionCommands.checkIfStderrContainsRetryKeys(retryLookupKeys))( mounts = mounts, - labels = Map(Key.Tag -> Value.RetryWithMoreMemory), + labels = Map(Key.Tag -> Value.RetryWithMoreMemory) ).withFlags(List(ActionFlag.AlwaysRun)) - } def cloudSdkShellAction(shellCommand: String)(mounts: List[Mount] = List.empty, flags: List[ActionFlag] = List.empty, labels: Map[String, String] = Map.empty, - timeout: Duration = Duration.Inf): Action = + timeout: Duration = Duration.Inf + ): Action = cloudSdkAction .withEntrypointCommand( "/bin/sh", "-c", - if (shellCommand.contains("\n")) shellCommand |> ActionCommands.multiLineCommand else shellCommand, + if (shellCommand.contains("\n")) shellCommand |> ActionCommands.multiLineCommand else shellCommand ) .withFlags(flags) .withMounts(mounts) @@ -158,7 +159,7 @@ object ActionBuilder { * @param pipelinesParameter Input or output parameter to label. * @return The labels. */ - def parameterLabels(pipelinesParameter: PipelinesParameter): Map[String, String] = { + def parameterLabels(pipelinesParameter: PipelinesParameter): Map[String, String] = pipelinesParameter match { case _: PipelinesApiInput => Map( @@ -171,7 +172,6 @@ object ActionBuilder { Key.OutputName -> pipelinesParameter.name ) } - } /** * Surrounds the list of Actions with a pair of starting and done Actions. @@ -182,8 +182,9 @@ object ActionBuilder { * @param actions The list of Actions to surround. * @return The starting Action, the passed in list, and then a done Action. */ - def annotateTimestampedActions(description: String, loggingLabelValue: String, isAlwaysRun: Boolean = false) - (actions: List[Action]): List[Action] = { + def annotateTimestampedActions(description: String, loggingLabelValue: String, isAlwaysRun: Boolean = false)( + actions: List[Action] + ): List[Action] = { val flags = if (isAlwaysRun) List(ActionFlag.AlwaysRun) else List() val labels = Map(Key.Logging -> loggingLabelValue) val starting = List(logTimestampedAction(s"Starting $description.", flags, labels)) @@ -192,32 +193,29 @@ object ActionBuilder { } /** Creates an Action that describes the parameter localization or delocalization. */ - def describeParameter(pipelinesParameter: PipelinesParameter, - actionLabels: Map[String, String]): Action = { + def describeParameter(pipelinesParameter: PipelinesParameter, actionLabels: Map[String, String]): Action = pipelinesParameter match { case _: PipelinesApiInput => val message = "Localizing input %s -> %s".format( shellEscaped(pipelinesParameter.cloudPath), - shellEscaped(pipelinesParameter.containerPath), + shellEscaped(pipelinesParameter.containerPath) ) ActionBuilder.logTimestampedAction(message, List(), actionLabels) case _: PipelinesApiOutput => val message = "Delocalizing output %s -> %s".format( shellEscaped(pipelinesParameter.containerPath), - shellEscaped(pipelinesParameter.cloudPath), + shellEscaped(pipelinesParameter.cloudPath) ) ActionBuilder.logTimestampedAction(message, List(ActionFlag.AlwaysRun), actionLabels) } - } /** Creates an Action that logs the docker command for the passed in action. */ - def describeDocker(description: String, action: Action): Action = { + def describeDocker(description: String, action: Action): Action = ActionBuilder.logTimestampedAction( s"Running $description: ${ActionBuilder.toDockerRun(action)}", Nil, action.scalaLabels ) - } /** * Creates an Action that logs the time as UTC plus prints the message. The original actionLabels will also be @@ -233,7 +231,8 @@ object ActionBuilder { */ private def logTimestampedAction(message: String, actionFlags: List[ActionFlag], - actionLabels: Map[String, String]): Action = { + actionLabels: Map[String, String] + ): Action = // Uses the cloudSdk image as that image will be used for other operations as well. cloudSdkShellAction( timestampedMessage(message) @@ -245,7 +244,6 @@ object ActionBuilder { }, timeout = 300.seconds ) - } /** Converts an Action to a `docker run ...` command runnable in the shell. */ private[api] def toDockerRun(action: Action): String = { @@ -266,7 +264,8 @@ object ActionBuilder { val environmentArgs: String = Option(action.getEnvironment) match { case Some(environment) => environment.asScala map { - case (key, value) if Option(key).isDefined && Option(value).isDefined => s" -e ${shellEscaped(s"$key:$value")}" + case (key, value) if Option(key).isDefined && Option(value).isDefined => + s" -e ${shellEscaped(s"$key:$value")}" case (key, _) if Option(key).isDefined => s" -e ${shellEscaped(key)}" case _ => "" } mkString "" @@ -319,15 +318,15 @@ object ActionBuilder { } Array("docker run", - nameArg, - mountArgs, - environmentArgs, - pidNamespaceArg, - flagsArgs, - portMappingArgs, - entrypointArg, - imageArg, - commandArgs, + nameArg, + mountArgs, + environmentArgs, + pidNamespaceArg, + flagsArgs, + portMappingArgs, + entrypointArg, + imageArg, + commandArgs ).mkString } } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/CheckpointingAction.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/CheckpointingAction.scala index c723fca7800..1688863dc48 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/CheckpointingAction.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/CheckpointingAction.scala @@ -6,19 +6,21 @@ import cromwell.backend.google.pipelines.common.action.ActionUtils import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters trait CheckpointingAction { - def checkpointingSetupActions(createPipelineParameters: CreatePipelineParameters, - mounts: List[Mount] - ): List[Action] = + def checkpointingSetupActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]): List[Action] = createPipelineParameters.runtimeAttributes.checkpointFilename map { checkpointFilename => val checkpointingImage = ActionUtils.CloudSdkImage - val checkpointingCommand = createPipelineParameters.checkpointingConfiguration.checkpointingCommand(checkpointFilename, ActionCommands.multiLineBinBashCommand) + val checkpointingCommand = + createPipelineParameters.checkpointingConfiguration.checkpointingCommand(checkpointFilename, + ActionCommands.multiLineBinBashCommand + ) val checkpointingEnvironment = Map.empty[String, String] // Initial sync from cloud: val initialCheckpointSyncAction = ActionBuilder.cloudSdkShellAction( createPipelineParameters.checkpointingConfiguration.localizePreviousCheckpointCommand(checkpointFilename) )(mounts = mounts) - val describeInitialCheckpointingSyncAction = ActionBuilder.describeDocker("initial checkpointing sync", initialCheckpointSyncAction) + val describeInitialCheckpointingSyncAction = + ActionBuilder.describeDocker("initial checkpointing sync", initialCheckpointSyncAction) // Background upload action: val backgroundCheckpointingAction = ActionBuilder.backgroundAction( @@ -27,19 +29,28 @@ trait CheckpointingAction { environment = checkpointingEnvironment, mounts = mounts ) - val describeBackgroundCheckpointingAction = ActionBuilder.describeDocker("begin checkpointing background action", backgroundCheckpointingAction) + val describeBackgroundCheckpointingAction = + ActionBuilder.describeDocker("begin checkpointing background action", backgroundCheckpointingAction) - List(describeInitialCheckpointingSyncAction, initialCheckpointSyncAction, describeBackgroundCheckpointingAction, backgroundCheckpointingAction) - } getOrElse(Nil) + List(describeInitialCheckpointingSyncAction, + initialCheckpointSyncAction, + describeBackgroundCheckpointingAction, + backgroundCheckpointingAction + ) + } getOrElse Nil def checkpointingShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = createPipelineParameters.runtimeAttributes.checkpointFilename map { checkpointFilename => val terminationAction = ActionBuilder.terminateBackgroundActionsAction() val describeTerminationAction = ActionBuilder.describeDocker("terminate checkpointing action", terminationAction) - val deleteCheckpointAction = ActionBuilder.gcsFileDeletionAction(createPipelineParameters.checkpointingConfiguration.checkpointFileCloud(checkpointFilename)) - val deleteTmpCheckpointAction = ActionBuilder.gcsFileDeletionAction(createPipelineParameters.checkpointingConfiguration.tmpCheckpointFileCloud(checkpointFilename)) + val deleteCheckpointAction = ActionBuilder.gcsFileDeletionAction( + createPipelineParameters.checkpointingConfiguration.checkpointFileCloud(checkpointFilename) + ) + val deleteTmpCheckpointAction = ActionBuilder.gcsFileDeletionAction( + createPipelineParameters.checkpointingConfiguration.tmpCheckpointFileCloud(checkpointFilename) + ) List(describeTerminationAction, terminationAction, deleteCheckpointAction, deleteTmpCheckpointAction) - } getOrElse(Nil) + } getOrElse Nil } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala index efb3b017f89..41a5dbbc81c 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala @@ -21,42 +21,54 @@ import wom.runtime.WomOutputRuntimeExtractor import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ -object Delocalization { -} +object Delocalization {} trait Delocalization { - private def delocalizeLogsAction(gcsLogPath: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = { + private def delocalizeLogsAction(gcsLogPath: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = cloudSdkShellAction( - delocalizeDirectory(DefaultPathBuilder.build(logsRoot).get, gcsLogPath, PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType) + delocalizeDirectory(DefaultPathBuilder.build(logsRoot).get, + gcsLogPath, + PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType + ) )(flags = List(ActionFlag.AlwaysRun), labels = Map(Key.Tag -> Value.Delocalization)) - } // Used for the final copy of the logs to make sure we have the most up to date version before terminating the job - private def copyAggregatedLogToLegacyPath(gcsLegacyLogPath: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + private def copyAggregatedLogToLegacyPath( + gcsLegacyLogPath: Path + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = cloudSdkShellAction( - delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, gcsLegacyLogPath, PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType) + delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, + gcsLegacyLogPath, + PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType + ) )(flags = List(ActionFlag.AlwaysRun), labels = Map(Key.Tag -> Value.Delocalization)) - } // Periodically copies the logs out to GCS - private def copyAggregatedLogToLegacyPathPeriodic(gcsLegacyLogPath: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + private def copyAggregatedLogToLegacyPathPeriodic( + gcsLegacyLogPath: Path + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = cloudSdkShellAction( - every(30.seconds) { delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, gcsLegacyLogPath, PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType) } + every(30.seconds) { + delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, + gcsLegacyLogPath, + PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType + ) + } )(flags = List(ActionFlag.RunInBackground), labels = Map(Key.Tag -> Value.Background)) - } private def runtimeOutputExtractorAction(containerCallRoot: String, - outputFile: String, - mounts: List[Mount], - womOutputRuntimeExtractor: WomOutputRuntimeExtractor): Action = { + outputFile: String, + mounts: List[Mount], + womOutputRuntimeExtractor: WomOutputRuntimeExtractor + ): Action = { val commands = List( "-c", // Create the directory where the fofn will be written s"mkdir -p $$(dirname $outputFile) && " + - s"cd $containerCallRoot && " + - """echo "Runtime output files to be delocalized:" && """ + - s"${womOutputRuntimeExtractor.command} | tee $outputFile" + s"cd $containerCallRoot && " + + """echo "Runtime output files to be delocalized:" && """ + + s"${womOutputRuntimeExtractor.command} | tee $outputFile" ) ActionBuilder @@ -69,7 +81,9 @@ trait Delocalization { .withLabels(Map(Key.Tag -> Value.Delocalization)) } - private def delocalizeRuntimeOutputsScript(fofnPath: String, workflowRoot: Path, cloudCallRoot: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = { + private def delocalizeRuntimeOutputsScript(fofnPath: String, workflowRoot: Path, cloudCallRoot: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ) = { val gsutilCommand: String => String = { flag => s"""rm -f $$HOME/.config/gcloud/gce && gsutil -m $flag cp -r $$line "${cloudCallRoot.pathAsString.ensureSlashed}$$gcs_path"""" } @@ -100,13 +114,21 @@ trait Delocalization { |fi""".stripMargin } - private def delocalizeRuntimeOutputsAction(cloudCallRoot: Path, inputFile: String, workflowRoot: Path, mounts: List[Mount])(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + private def delocalizeRuntimeOutputsAction(cloudCallRoot: Path, + inputFile: String, + workflowRoot: Path, + mounts: List[Mount] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { val command = multiLineCommand(delocalizeRuntimeOutputsScript(inputFile, workflowRoot, cloudCallRoot)) - ActionBuilder.cloudSdkShellAction(command)(mounts, flags = List(ActionFlag.DisableImagePrefetch), labels = Map(Key.Tag -> Value.Delocalization)) + ActionBuilder.cloudSdkShellAction(command)(mounts, + flags = List(ActionFlag.DisableImagePrefetch), + labels = Map(Key.Tag -> Value.Delocalization) + ) } - def deLocalizeActions(createPipelineParameters: CreatePipelineParameters, - mounts: List[Mount])(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { + def deLocalizeActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = { val cloudCallRoot = createPipelineParameters.cloudCallRoot val callExecutionContainerRoot = createPipelineParameters.commandScriptContainerPath.parent @@ -117,21 +139,31 @@ trait Delocalization { * Ideally temporaryFofnForRuntimeOutputFiles should be somewhere else than the execution directory (we could mount anther directory) * However because it runs after everything else there's no risk of polluting the task's results and the random ID ensures we don't override anything */ - val temporaryFofnDirectoryForRuntimeOutputFiles = callExecutionContainerRoot.pathAsString.ensureSlashed + UUID.randomUUID().toString.split("-")(0) + val temporaryFofnDirectoryForRuntimeOutputFiles = + callExecutionContainerRoot.pathAsString.ensureSlashed + UUID.randomUUID().toString.split("-")(0) val temporaryFofnForRuntimeOutputFiles = temporaryFofnDirectoryForRuntimeOutputFiles + "/runtime_output_files.txt" val runtimeExtractionActions = createPipelineParameters.womOutputRuntimeExtractor.toList flatMap { extractor => - List ( - runtimeOutputExtractorAction(callExecutionContainerRoot.pathAsString, temporaryFofnForRuntimeOutputFiles, mounts, extractor), - delocalizeRuntimeOutputsAction(cloudCallRoot, temporaryFofnForRuntimeOutputFiles, createPipelineParameters.cloudWorkflowRoot, mounts) + List( + runtimeOutputExtractorAction(callExecutionContainerRoot.pathAsString, + temporaryFofnForRuntimeOutputFiles, + mounts, + extractor + ), + delocalizeRuntimeOutputsAction(cloudCallRoot, + temporaryFofnForRuntimeOutputFiles, + createPipelineParameters.cloudWorkflowRoot, + mounts + ) ) } - val gcsDelocalizationContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) + val gcsDelocalizationContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) val delocalizationLabel = Map(Key.Tag -> Value.Delocalization) - val runGcsDelocalizationScript: Action = cloudSdkShellAction( - s"/bin/bash $gcsDelocalizationContainerPath")(mounts = mounts, labels = delocalizationLabel) + val runGcsDelocalizationScript: Action = + cloudSdkShellAction(s"/bin/bash $gcsDelocalizationContainerPath")(mounts = mounts, labels = delocalizationLabel) ActionBuilder.annotateTimestampedActions("delocalization", Value.Delocalization)( runGcsDelocalizationScript :: diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala index f2883b6b285..57d9f7b60ce 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala @@ -25,17 +25,19 @@ import scala.util.{Failure, Success, Try} * This uses reflection and is therefore not great performance-wise. * However it is only used once, when the job completes, which should limit the performance hit. */ -private [api] object Deserialization { - def findEvent[T <: GenericJson](events: List[Event], - filter: T => Boolean = Function.const(true)(_: T)) - (implicit tag: ClassTag[T]): Option[RequestContextReader[Option[T]]] = - events.to(LazyList) +private[api] object Deserialization { + def findEvent[T <: GenericJson](events: List[Event], filter: T => Boolean = Function.const(true)(_: T))(implicit + tag: ClassTag[T] + ): Option[RequestContextReader[Option[T]]] = + events + .to(LazyList) .map(_.details(tag)) - .collectFirst({ + .collectFirst { case Some(event) if event.map(filter).getOrElse(false) => event.toErrorOr.fallBack - }) + } implicit class EventDeserialization(val event: Event) extends AnyVal { + /** * Attempts to deserialize the details map to T * Returns None if the details are not of type T @@ -62,17 +64,19 @@ private [api] object Deserialization { } implicit class OperationDeserialization(val operation: Operation) extends AnyVal { + /** * Deserializes the events to com.google.api.services.genomics.v2alpha1.model.Event */ def events: ErrorOr[List[Event]] = { val eventsErrorOrOption = for { eventsMap <- metadata.get("events") - eventsErrorOr <- Option(eventsMap - .asInstanceOf[JArrayList[JMap[String, Object]]] - .asScala - .toList - .traverse[ErrorOr, Event](deserializeTo[Event](_).toErrorOr) + eventsErrorOr <- Option( + eventsMap + .asInstanceOf[JArrayList[JMap[String, Object]]] + .asScala + .toList + .traverse[ErrorOr, Event](deserializeTo[Event](_).toErrorOr) ) } yield eventsErrorOr eventsErrorOrOption.getOrElse(Nil.validNel) @@ -81,11 +85,10 @@ private [api] object Deserialization { /** * Deserializes the pipeline to com.google.api.services.genomics.v2alpha1.model.Pipeline */ - def pipeline: Option[Try[Pipeline]] = { + def pipeline: Option[Try[Pipeline]] = metadata .get("pipeline") .map(_.asInstanceOf[JMap[String, Object]] |> deserializeTo[Pipeline]) - } // If there's a WorkerAssignedEvent it means a VM was created - which we consider as the job started // Note that the VM might still be booting @@ -103,19 +106,23 @@ private [api] object Deserialization { /** * Deserializes a java.util.Map[String, Object] to an instance of T */ - private [api] def deserializeTo[T <: GenericJson](attributes: JMap[String, Object])(implicit tag: ClassTag[T]): Try[T] = Try { + private[api] def deserializeTo[T <: GenericJson]( + attributes: JMap[String, Object] + )(implicit tag: ClassTag[T]): Try[T] = Try { // Create a new instance, because it's a GenericJson there's always a 0-arg constructor val newT = tag.runtimeClass.asInstanceOf[Class[T]].getConstructor().newInstance() // Optionally returns the field with the given name def field(name: String) = Option(newT.getClassInfo.getField(name)) - def handleMap(key: String, value: Object) = { + def handleMap(key: String, value: Object) = (field(key), value) match { // If the serialized value is a list, we need to check if its elements need to be deserialized case (Some(f), list: java.util.List[java.util.Map[String, Object]] @unchecked) => // Try to get the generic type of the declared field (the field should have a list type since the value is a list) - Try(f.getGenericType.asInstanceOf[ParameterizedType].getActualTypeArguments.toList.head.asInstanceOf[Class[_]]) match { + Try( + f.getGenericType.asInstanceOf[ParameterizedType].getActualTypeArguments.toList.head.asInstanceOf[Class[_]] + ) match { // If we can get it and its a GenericJson, it means we need to deserialize the elements to their proper type case Success(genericListType) if classOf[GenericJson].isAssignableFrom(genericListType) => // The get throws at the first error and hence doesn't aggregate the errors but it seems @@ -134,7 +141,8 @@ private [api] object Deserialization { // If it can't be assigned and the value is a map, it is very likely that the field "key" of T is of some type U // but has been deserialized to a Map[String, Object]. In this case we retrieve the type U from the field and recurse // to deserialize properly - case (Some(f), map: java.util.Map[String, Object] @unchecked) if classOf[GenericJson].isAssignableFrom(f.getType) => + case (Some(f), map: java.util.Map[String, Object] @unchecked) + if classOf[GenericJson].isAssignableFrom(f.getType) => // This whole function is wrapped in a try so just .get to throw val deserializedInnerAttribute = deserializeTo(map)(ClassTag[GenericJson](f.getType)).get newT.set(key, deserializedInnerAttribute) @@ -151,7 +159,6 @@ private [api] object Deserialization { // and losing properly deserialized attributes case _ => } - } // Go over the map entries and use the "set" method of GenericJson to set the attributes. Option(attributes).map(_.asScala).getOrElse(Map.empty).foreach((handleMap _).tupled) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Localization.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Localization.scala index 1d4666a4cf8..23bb29224b6 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Localization.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Localization.scala @@ -10,32 +10,42 @@ import cromwell.backend.google.pipelines.v2alpha1.PipelinesConversions._ import cromwell.backend.google.pipelines.v2alpha1.ToParameter.ops._ import cromwell.backend.google.pipelines.v2alpha1.api.ActionBuilder.cloudSdkShellAction - trait Localization { - def localizeActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { + def localizeActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = { val localizationLabel = Map(Key.Tag -> Value.Localization) - val gcsTransferLibraryContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsTransferLibraryName) - val localizeGcsTransferLibrary = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / GcsTransferLibraryName, - containerPath = gcsTransferLibraryContainerPath))(mounts = mounts, labels = localizationLabel) - - val gcsLocalizationContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsLocalizationScriptName) - val localizeGcsLocalizationScript = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / GcsLocalizationScriptName, - containerPath = gcsLocalizationContainerPath))(mounts = mounts, labels = localizationLabel) - - val gcsDelocalizationContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) - val localizeGcsDelocalizationScript = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / GcsDelocalizationScriptName, - containerPath = gcsDelocalizationContainerPath))(mounts = mounts, labels = localizationLabel) - - val runGcsLocalizationScript = cloudSdkShellAction( - s"/bin/bash $gcsLocalizationContainerPath")(mounts = mounts, labels = localizationLabel) + val gcsTransferLibraryContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsTransferLibraryName) + val localizeGcsTransferLibrary = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / GcsTransferLibraryName, + containerPath = gcsTransferLibraryContainerPath + ) + )(mounts = mounts, labels = localizationLabel) + + val gcsLocalizationContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsLocalizationScriptName) + val localizeGcsLocalizationScript = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / GcsLocalizationScriptName, + containerPath = gcsLocalizationContainerPath + ) + )(mounts = mounts, labels = localizationLabel) + + val gcsDelocalizationContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) + val localizeGcsDelocalizationScript = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / GcsDelocalizationScriptName, + containerPath = gcsDelocalizationContainerPath + ) + )(mounts = mounts, labels = localizationLabel) + + val runGcsLocalizationScript = + cloudSdkShellAction(s"/bin/bash $gcsLocalizationContainerPath")(mounts = mounts, labels = localizationLabel) // Any "classic" PAPI v2 one-at-a-time localizations for non-GCS inputs. - val singletonLocalizations = createPipelineParameters.inputOutputParameters.fileInputParameters.flatMap(_.toActions(mounts).toList) + val singletonLocalizations = + createPipelineParameters.inputOutputParameters.fileInputParameters.flatMap(_.toActions(mounts).toList) val localizations = localizeGcsTransferLibrary :: diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MemoryRetryCheckAction.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MemoryRetryCheckAction.scala index 46e3fc9dbb9..099ba635c8c 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MemoryRetryCheckAction.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MemoryRetryCheckAction.scala @@ -5,10 +5,11 @@ import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.C trait MemoryRetryCheckAction { - def checkForMemoryRetryActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]): List[Action] = { + def checkForMemoryRetryActions(createPipelineParameters: CreatePipelineParameters, + mounts: List[Mount] + ): List[Action] = createPipelineParameters.retryWithMoreMemoryKeys match { case Some(keys) => List(ActionBuilder.checkForMemoryRetryAction(keys, mounts)) case None => List.empty[Action] } - } } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MonitoringAction.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MonitoringAction.scala index b65177480a8..d0edefbb24a 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MonitoringAction.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/MonitoringAction.scala @@ -5,9 +5,9 @@ import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttribu import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters trait MonitoringAction { - def monitoringSetupActions(createPipelineParameters: CreatePipelineParameters, - mounts: List[Mount] - )(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { + def monitoringSetupActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = { val monitoringImageScriptActions = createPipelineParameters.monitoringImage.monitoringImageScriptOption match { @@ -16,12 +16,12 @@ trait MonitoringAction { ActionBuilder.monitoringImageScriptAction( script, createPipelineParameters.monitoringImage.monitoringImageScriptContainerPath, - mounts, + mounts ) val describeLocalizeScriptAction = ActionBuilder.describeDocker( "localizing monitoring image script action", - localizeScriptAction, + localizeScriptAction ) List(describeLocalizeScriptAction, localizeScriptAction) case None => Nil @@ -30,7 +30,6 @@ trait MonitoringAction { val monitoringImageActions = createPipelineParameters.monitoringImage.monitoringImageOption match { case Some(image) => - val monitoringImage = image val monitoringImageCommand = createPipelineParameters.monitoringImage.monitoringImageCommand val monitoringImageEnvironment = createPipelineParameters.monitoringImage.monitoringImageEnvironment @@ -39,7 +38,7 @@ trait MonitoringAction { monitoringImage, monitoringImageCommand, monitoringImageEnvironment(mounts.map(_.getPath)), - mounts, + mounts ) val describeMonitoringAction = ActionBuilder.describeDocker("monitoring action", monitoringAction) @@ -52,7 +51,7 @@ trait MonitoringAction { monitoringImageScriptActions ++ monitoringImageActions } - def monitoringShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = { + def monitoringShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = createPipelineParameters.monitoringImage.monitoringImageOption match { case Some(_) => val terminationAction = ActionBuilder.terminateBackgroundActionsAction() @@ -62,5 +61,4 @@ trait MonitoringAction { List(describeTerminationAction, terminationAction) case None => Nil } - } } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala index a1fd56bf1d1..feab74eb2a3 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala @@ -8,16 +8,16 @@ import scala.jdk.CollectionConverters._ trait SSHAccessAction { - def sshAccessActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]) : List[Action] = { + def sshAccessActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]): List[Action] = if (createPipelineParameters.enableSshAccess) { sshAccessAction(mounts) } else { Nil } - } private def sshAccessAction(mounts: List[Mount]): List[Action] = { - val sshAction = ActionBuilder.withImage(ActionUtils.sshImage) + val sshAction = ActionBuilder + .withImage(ActionUtils.sshImage) .setEntrypoint(ActionUtils.sshEntryPoint) .setPortMappings(ActionUtils.sshPortMappings.asJava) .setFlags(List(ActionFlag.RunInBackground.toString).asJava) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/AbortRequestHandler.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/AbortRequestHandler.scala index 8806e88cd94..fce24460ee7 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/AbortRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/AbortRequestHandler.scala @@ -6,7 +6,10 @@ import com.google.api.client.googleapis.json.GoogleJsonError import com.google.api.client.http.HttpHeaders import com.typesafe.scalalogging.LazyLogging import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager._ -import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiAbortClient.{PAPIAbortRequestSuccessful, PAPIOperationIsAlreadyTerminal} +import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiAbortClient.{ + PAPIAbortRequestSuccessful, + PAPIOperationIsAlreadyTerminal +} import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import org.apache.commons.lang3.StringUtils @@ -14,7 +17,11 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} trait AbortRequestHandler extends LazyLogging { this: RequestHandler => - protected def handleGoogleError(abortQuery: PAPIAbortRequest, pollingManager: ActorRef, e: GoogleJsonError, responseHeaders: HttpHeaders): Try[Unit] = { + protected def handleGoogleError(abortQuery: PAPIAbortRequest, + pollingManager: ActorRef, + e: GoogleJsonError, + responseHeaders: HttpHeaders + ): Try[Unit] = // This condition is telling us that the job we tried to cancel is already in a terminal state. Technically PAPI // was not able to cancel the job because the job could not be transitioned from 'Running' to 'Cancelled'. But from // Cromwell's perspective a job cancellation is really just a request for the job to be in a terminal state, so @@ -23,30 +30,34 @@ trait AbortRequestHandler extends LazyLogging { this: RequestHandler => // If/when Google implements https://partnerissuetracker.corp.google.com/issues/171993833 we could break these cases // out and make our logging more specific if we wanted to. if (Option(e.getCode).contains(400) || StringUtils.contains(e.getMessage, "Precondition check failed")) { - logger.info(s"PAPI declined to abort job ${abortQuery.jobId.jobId} in workflow ${abortQuery.workflowId}, most likely because it is no longer running. Marking as finished. Message: ${e.getMessage}") + logger.info( + s"PAPI declined to abort job ${abortQuery.jobId.jobId} in workflow ${abortQuery.workflowId}, most likely because it is no longer running. Marking as finished. Message: ${e.getMessage}" + ) abortQuery.requester ! PAPIOperationIsAlreadyTerminal(abortQuery.jobId.jobId) Success(()) } else { - pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, new SystemPAPIApiException(GoogleJsonException(e, responseHeaders))) + pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, + new SystemPAPIApiException(GoogleJsonException(e, responseHeaders)) + ) Failure(new Exception(mkErrorString(e))) } - } // The Genomics batch endpoint doesn't seem to be able to handle abort requests on V2 operations at the moment // For now, don't batch the request and execute it on its own - def handleRequest(abortQuery: PAPIAbortRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = { + def handleRequest(abortQuery: PAPIAbortRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit + ec: ExecutionContext + ): Future[Try[Unit]] = Future(abortQuery.httpRequest.execute()) map { case response if response.isSuccessStatusCode => abortQuery.requester ! PAPIAbortRequestSuccessful(abortQuery.jobId.jobId) Success(()) - case response => for { - asGoogleError <- Try(GoogleJsonError.parse(GoogleAuthMode.jsonFactory, response)) - handled <- handleGoogleError(abortQuery, pollingManager, asGoogleError, response.getHeaders) - } yield handled - } recover { - case e => - pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, new SystemPAPIApiException(e)) - Failure(e) + case response => + for { + asGoogleError <- Try(GoogleJsonError.parse(GoogleAuthMode.jsonFactory, response)) + handled <- handleGoogleError(abortQuery, pollingManager, asGoogleError, response.getHeaders) + } yield handled + } recover { case e => + pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, new SystemPAPIApiException(e)) + Failure(e) } - } } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala index 9b88a2f54bd..70cadeb229f 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala @@ -22,28 +22,27 @@ object ErrorReporter { // This can be used to log non-critical deserialization failures and not fail the task implicit class ErrorOrLogger[A](val t: ErrorOr[A]) extends AnyVal { - private def logErrors(errors: NonEmptyList[String], workflowId: WorkflowId, operation: Operation): Unit = { - logger.error(s"[$workflowId] Failed to parse PAPI response. Operation Id: ${operation.getName}" + s"${errors.toList.mkString(", ")}") + private def logErrors(errors: NonEmptyList[String], workflowId: WorkflowId, operation: Operation): Unit = + logger.error( + s"[$workflowId] Failed to parse PAPI response. Operation Id: ${operation.getName}" + s"${errors.toList.mkString(", ")}" + ) + + def fallBack: RequestContextReader[Option[A]] = Reader { case (workflowId, operation) => + t match { + case Valid(s) => Option(s) + case Invalid(f) => + logErrors(f, workflowId, operation) + None + } } - def fallBack: RequestContextReader[Option[A]] = Reader { - case (workflowId, operation) => - t match { - case Valid(s) => Option(s) - case Invalid(f) => - logErrors(f, workflowId, operation) - None - } - } - - def fallBackTo(to: A): RequestContextReader[A] = Reader { - case (workflowId, operation) => - t match { - case Valid(s) => s - case Invalid(f) => - logErrors(f, workflowId, operation) - to - } + def fallBackTo(to: A): RequestContextReader[A] = Reader { case (workflowId, operation) => + t match { + case Valid(s) => s + case Invalid(f) => + logErrors(f, workflowId, operation) + to + } } } } @@ -55,7 +54,8 @@ class ErrorReporter(machineType: Option[String], instanceName: Option[String], actions: List[Action], operation: Operation, - workflowId: WorkflowId) { + workflowId: WorkflowId +) { import ErrorReporter._ def toUnsuccessfulRunStatus(error: Status, events: List[Event]): UnsuccessfulRunStatus = { @@ -67,7 +67,11 @@ class ErrorReporter(machineType: Option[String], val status = statusOption.getOrElse(GStatus.UNAVAILABLE) val builder = status match { case GStatus.UNAVAILABLE if wasPreemptible => Preempted.apply _ - case GStatus.ABORTED if wasPreemptible && Option(error.getMessage).exists(_.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style)) => Preempted.apply _ + case GStatus.ABORTED + if wasPreemptible && Option(error.getMessage).exists( + _.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style) + ) => + Preempted.apply _ case GStatus.CANCELLED => Cancelled.apply _ case _ => Failed.apply _ } @@ -80,7 +84,7 @@ class ErrorReporter(machineType: Option[String], } // There's maybe one FailedEvent per operation with a summary error message - private def unexpectedExitStatusErrorStrings(events: List[Event], actions: List[Action]): List[String] = { + private def unexpectedExitStatusErrorStrings(events: List[Event], actions: List[Action]): List[String] = for { event <- events detail <- event.details[UnexpectedExitStatusEvent].flatMap(_.toErrorOr.fallBack(workflowId -> operation)) @@ -90,33 +94,32 @@ class ErrorReporter(machineType: Option[String], labelTag = action.flatMap(actionLabelTag) inputNameTag = action.flatMap(actionLabelInputName) } yield unexpectedStatusErrorString(event, stderr, labelTag, inputNameTag) - } // It would probably be good to define a richer error structure than String, but right now that's what the backend interface expects - private def unexpectedStatusErrorString(event: Event, stderr: Option[String], labelTag: Option[String], inputNameTag: Option[String]) = { + private def unexpectedStatusErrorString(event: Event, + stderr: Option[String], + labelTag: Option[String], + inputNameTag: Option[String] + ) = labelTag.map("[" + _ + "] ").getOrElse("") + - inputNameTag.map("Input name: " + _ + " - ").getOrElse("") + + inputNameTag.map("Input name: " + _ + " - ").getOrElse("") + Option(event).flatMap(eventValue => Option(eventValue.getDescription)).getOrElse("") + stderr.map(": " + _).getOrElse("") - } // There may be one FailedEvent per operation with a summary error message - private def summaryFailure(events: List[Event]): Option[String] = { + private def summaryFailure(events: List[Event]): Option[String] = findEvent[FailedEvent](events) .flatMap(_(workflowId -> operation)) .map(_.getCause) - } // Try to find the stderr for the given action ID - private def stderrForAction(events: List[Event])(actionId: Integer) = { + private def stderrForAction(events: List[Event])(actionId: Integer) = findEvent[ContainerStoppedEvent](events, _.getActionId == actionId) .flatMap(_(workflowId -> operation)) .map(_.getStderr) - } - private def actionLabelValue(action: Action, k: String): Option[String] = { + private def actionLabelValue(action: Action, k: String): Option[String] = Option(action).flatMap(actionValue => Option(actionValue.getLabels)).map(_.asScala).flatMap(_.get(k)) - } private def actionLabelTag(action: Action) = actionLabelValue(action, Key.Tag) private def actionLabelInputName(action: Action) = actionLabelValue(action, Key.InputName) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala index 9a1c239fbbe..15fefc02910 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala @@ -22,37 +22,46 @@ import org.apache.commons.lang3.exception.ExceptionUtils import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.language.postfixOps -import scala.util.{Failure, Try, Success => TrySuccess} +import scala.util.{Failure, Success => TrySuccess, Try} trait GetRequestHandler { this: RequestHandler => // the Genomics batch endpoint doesn't seem to be able to handle get requests on V2 operations at the moment // For now, don't batch the request and execute it on its own - def handleRequest(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = Future(pollingRequest.httpRequest.execute()) map { + def handleRequest(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit + ec: ExecutionContext + ): Future[Try[Unit]] = Future(pollingRequest.httpRequest.execute()) map { case response if response.isSuccessStatusCode => val operation = response.parseAs(classOf[Operation]) pollingRequest.requester ! interpretOperationStatus(operation, pollingRequest) TrySuccess(()) case response => val failure = Try(GoogleJsonError.parse(GoogleAuthMode.jsonFactory, response)) match { - case TrySuccess(googleError) => new SystemPAPIApiException(GoogleJsonException(googleError, response.getHeaders)) - case Failure(_) => new SystemPAPIApiException(new RuntimeException(s"Failed to get status for operation ${pollingRequest.jobId.jobId}: HTTP Status Code: ${response.getStatusCode}")) + case TrySuccess(googleError) => + new SystemPAPIApiException(GoogleJsonException(googleError, response.getHeaders)) + case Failure(_) => + new SystemPAPIApiException( + new RuntimeException( + s"Failed to get status for operation ${pollingRequest.jobId.jobId}: HTTP Status Code: ${response.getStatusCode}" + ) + ) } pollingManager ! PipelinesApiStatusQueryFailed(pollingRequest, failure) Failure(failure) - } recover { - case e => - pollingManager ! PipelinesApiStatusQueryFailed(pollingRequest, new SystemPAPIApiException(e)) - Failure(e) + } recover { case e => + pollingManager ! PipelinesApiStatusQueryFailed(pollingRequest, new SystemPAPIApiException(e)) + Failure(e) } - private [request] def interpretOperationStatus(operation: Operation, pollingRequest: PAPIStatusPollRequest): RunStatus = { + private[request] def interpretOperationStatus(operation: Operation, + pollingRequest: PAPIStatusPollRequest + ): RunStatus = if (Option(operation).isEmpty) { // It is possible to receive a null via an HTTP 200 with no response. If that happens, handle it and don't crash. // https://github.com/googleapis/google-http-java-client/blob/v1.28.0/google-http-client/src/main/java/com/google/api/client/http/HttpResponse.java#L456-L458 val errorMessage = "Operation returned as empty" UnsuccessfulRunStatus(Status.UNKNOWN, Option(errorMessage), Nil, None, None, None, wasPreemptible = false) } else { - try { + try if (operation.getDone) { val metadata = operation.metadata // Deserialize the response @@ -66,7 +75,7 @@ trait GetRequestHandler { this: RequestHandler => .toList .flatten val workerEvent: Option[WorkerAssignedEvent] = - findEvent[WorkerAssignedEvent](events).flatMap(_ (pollingRequest.workflowId -> operation)) + findEvent[WorkerAssignedEvent](events).flatMap(_(pollingRequest.workflowId -> operation)) val executionEvents = getEventList(metadata, events, actions) val virtualMachineOption = for { pipelineValue <- pipeline @@ -111,7 +120,7 @@ trait GetRequestHandler { this: RequestHandler => } else { Initializing } - } catch { + catch { case nullPointerException: NullPointerException => throw new RuntimeException( s"Caught NPE while interpreting operation ${operation.getName}: " + @@ -121,20 +130,25 @@ trait GetRequestHandler { this: RequestHandler => ) } } - } - private def getEventList(metadata: Map[String, AnyRef], events: List[Event], actions: List[Action]): List[ExecutionEvent] = { - val starterEvent: Option[ExecutionEvent] = { - metadata.get("createTime") map { time => ExecutionEvent("waiting for quota", OffsetDateTime.parse(time.toString)) } - } + private def getEventList(metadata: Map[String, AnyRef], + events: List[Event], + actions: List[Action] + ): List[ExecutionEvent] = { + val starterEvent: Option[ExecutionEvent] = + metadata.get("createTime") map { time => + ExecutionEvent("waiting for quota", OffsetDateTime.parse(time.toString)) + } - val completionEvent: Option[ExecutionEvent] = { - metadata.get("endTime") map { time => ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse(time.toString)) } - } + val completionEvent: Option[ExecutionEvent] = + metadata.get("endTime") map { time => + ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse(time.toString)) + } // Map action indexes to event types. Action indexes are 1-based for some reason. val actionIndexToEventType: Map[Int, String] = List(Key.Logging, Key.Tag).flatMap { k => - actions.zipWithIndex collect { case (a, i) if a.getLabels.containsKey(k) => (i + 1) -> a.getLabels.get(k) } } toMap + actions.zipWithIndex collect { case (a, i) if a.getLabels.containsKey(k) => (i + 1) -> a.getLabels.get(k) } + } toMap val executionEvents = events.map(toExecutionEvent(actionIndexToEventType)) // The Docker image used for CWL output parsing causes some complications for the timing diagram. Docker image @@ -152,7 +166,10 @@ trait GetRequestHandler { this: RequestHandler => val filteredExecutionEvents = startDelocalization match { case None => executionEvents // Can't do filtering without a start time for Delocalization. case Some(start) => - executionEvents filterNot { e => (e.name.startsWith("Started pulling ") || e.name.startsWith("Stopped pulling ")) && e.offsetDateTime.compareTo(start.offsetDateTime) > 0 } + executionEvents filterNot { e => + (e.name.startsWith("Started pulling ") || e.name.startsWith("Stopped pulling ")) && e.offsetDateTime + .compareTo(start.offsetDateTime) > 0 + } } starterEvent.toList ++ filteredExecutionEvents ++ completionEvent diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala index 6cb2b148a58..50ec0fd239e 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala @@ -5,7 +5,11 @@ import com.google.api.client.googleapis.batch.BatchRequest import com.google.api.client.googleapis.json.GoogleJsonError import com.google.api.services.genomics.v2alpha1.Genomics import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes.BatchRequestTimeoutConfiguration -import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{PAPIAbortRequest, PAPIRunCreationRequest, PAPIStatusPollRequest} +import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{ + PAPIAbortRequest, + PAPIRunCreationRequest, + PAPIStatusPollRequest +} import cromwell.backend.google.pipelines.common.api.{PipelinesApiRequestHandler, PipelinesApiRequestManager} import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import org.slf4j.{Logger, LoggerFactory} @@ -21,18 +25,18 @@ object RequestHandler { class RequestHandler(applicationName: String, endpointUrl: URL, - batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration) - extends PipelinesApiRequestHandler - with RunRequestHandler - with GetRequestHandler - with AbortRequestHandler { + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration +) extends PipelinesApiRequestHandler + with RunRequestHandler + with GetRequestHandler + with AbortRequestHandler { override def makeBatchRequest: BatchRequest = { val builder = new Genomics.Builder( GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, - initializeHttpRequest(batchRequestTimeoutConfiguration) _, + initializeHttpRequest(batchRequestTimeoutConfiguration) _ ) .setApplicationName(applicationName) .setRootUrl(endpointUrl.toString) @@ -42,9 +46,8 @@ class RequestHandler(applicationName: String, override def enqueue[T <: PipelinesApiRequestManager.PAPIApiRequest](papiApiRequest: T, batchRequest: BatchRequest, - pollingManager: ActorRef) - (implicit ec: ExecutionContext) - : Future[Try[Unit]] = papiApiRequest match { + pollingManager: ActorRef + )(implicit ec: ExecutionContext): Future[Try[Unit]] = papiApiRequest match { case create: PAPIRunCreationRequest => handleRequest(create, batchRequest, pollingManager) case status: PAPIStatusPollRequest => handleRequest(status, batchRequest, pollingManager) case abort: PAPIAbortRequest => handleRequest(abort, batchRequest, pollingManager) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RunRequestHandler.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RunRequestHandler.scala index 1b655b41edb..8b791125335 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RunRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RunRequestHandler.scala @@ -14,7 +14,10 @@ import scala.concurrent.{Future, Promise} import scala.util.{Failure, Success, Try} trait RunRequestHandler { this: RequestHandler => - private def runCreationResultHandler(originalRequest: PAPIApiRequest, completionPromise: Promise[Try[Unit]], pollingManager: ActorRef) = new JsonBatchCallback[Operation] { + private def runCreationResultHandler(originalRequest: PAPIApiRequest, + completionPromise: Promise[Try[Unit]], + pollingManager: ActorRef + ) = new JsonBatchCallback[Operation] { override def onSuccess(operation: Operation, responseHeaders: HttpHeaders): Unit = { originalRequest.requester ! getJob(operation) completionPromise.trySuccess(Success(())) @@ -41,17 +44,23 @@ trait RunRequestHandler { this: RequestHandler => } } - def handleRequest(runCreationQuery: PAPIRunCreationRequest, batch: BatchRequest, pollingManager: ActorRef): Future[Try[Unit]] = { + def handleRequest(runCreationQuery: PAPIRunCreationRequest, + batch: BatchRequest, + pollingManager: ActorRef + ): Future[Try[Unit]] = { val completionPromise = Promise[Try[Unit]]() val resultHandler = runCreationResultHandler(runCreationQuery, completionPromise, pollingManager) addRunCreationToBatch(runCreationQuery.httpRequest, batch, resultHandler) completionPromise.future } - private def addRunCreationToBatch(request: HttpRequest, batch: BatchRequest, resultHandler: JsonBatchCallback[Operation]): Unit = { + private def addRunCreationToBatch(request: HttpRequest, + batch: BatchRequest, + resultHandler: JsonBatchCallback[Operation] + ): Unit = { /* - * Manually enqueue the request instead of doing it through the RunPipelineRequest - * as it would unnecessarily rebuild the request (which we already have) + * Manually enqueue the request instead of doing it through the RunPipelineRequest + * as it would unnecessarily rebuild the request (which we already have) */ batch.queue(request, classOf[Operation], classOf[GoogleJsonErrorContainer], resultHandler) () diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala index 510f4a80742..3a119c1cf77 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala @@ -10,8 +10,11 @@ import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers - with MockSugar { +class PipelinesApiAsyncBackendJobExecutionActorSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with MockSugar { behavior of "PipelinesParameterConversions" it should "group files by bucket" in { @@ -39,8 +42,8 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Cro val expected = Map("foo" -> (NonEmptyList.of(0, 1) map inputs.apply)) ++ - Map("bar" -> (NonEmptyList.of(2, 3, 4) map inputs.apply)) ++ - Map("baz" -> NonEmptyList.of(inputs(5))) + Map("bar" -> (NonEmptyList.of(2, 3, 4) map inputs.apply)) ++ + Map("baz" -> NonEmptyList.of(inputs(5))) PipelinesApiAsyncBackendJobExecutionActor.groupParametersByGcsBucket(inputs) shouldEqual expected } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala index 18c84ee66eb..a325c8c0a01 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala @@ -34,13 +34,18 @@ class PipelinesConversionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with private lazy val fakeCredentials = NoCredentials.getInstance private val drsReadInterpreter: DrsReadInterpreter = (_, _) => - throw new UnsupportedOperationException("Currently PipelinesConversionsSpec doesn't need to use drs read interpreter.") + throw new UnsupportedOperationException( + "Currently PipelinesConversionsSpec doesn't need to use drs read interpreter." + ) it should "create a DRS input parameter" in { val drsPathBuilder = DrsPathBuilder( - new DrsCloudNioFileSystemProvider(drsResolverConfig, GoogleOauthDrsCredentials(fakeCredentials, 1.minutes), drsReadInterpreter), - None, + new DrsCloudNioFileSystemProvider(drsResolverConfig, + GoogleOauthDrsCredentials(fakeCredentials, 1.minutes), + drsReadInterpreter + ), + None ) val drsPath = drsPathBuilder.build("drs://drs.example.org/aaaabbbb-cccc-dddd-eeee-abcd0000dcba").get val containerRelativePath = DefaultPathBuilder.get("path/to/file.bai") @@ -62,10 +67,10 @@ class PipelinesConversionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with ) logging.get("flags") should be(a[java.util.List[_]]) - logging.get("flags").asInstanceOf[java.util.List[_]] should be (empty) + logging.get("flags").asInstanceOf[java.util.List[_]] should be(empty) logging.get("mounts") should be(a[java.util.List[_]]) - logging.get("mounts").asInstanceOf[java.util.List[_]] should be (empty) + logging.get("mounts").asInstanceOf[java.util.List[_]] should be(empty) logging.get("imageUri") should be(ActionUtils.CloudSdkImage) @@ -86,7 +91,7 @@ class PipelinesConversionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with ) action.get("mounts") should be(a[java.util.List[_]]) - action.get("mounts").asInstanceOf[java.util.List[_]] should be (empty) + action.get("mounts").asInstanceOf[java.util.List[_]] should be(empty) action.get("imageUri") should be("somerepo/drs-downloader:tagged") diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala index 83d1be04e74..4cd37c0cc60 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala @@ -19,35 +19,37 @@ class ActionBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ("description", "action", "command"), ("a cloud sdk action", ActionBuilder.cloudSdkAction, s"docker run ${ActionUtils.CloudSdkImage}"), ("a cloud sdk action with args", - ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello").asJava), - s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello" + ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello").asJava), + s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello" ), ("a cloud sdk action with quotes in the args", - ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello m'lord").asJava), - s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\ m\\'lord" + ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello m'lord").asJava), + s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\ m\\'lord" ), ("a cloud sdk action with a newline in the args", - ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello\\\nworld").asJava), - s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\\\world" + ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello\\\nworld").asJava), + s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\\\world" ), ("an action with multiple args", - new Action() - .setImageUri("ubuntu") - .setEnvironment(Map("ENV" -> "dev").asJava) - .setEntrypoint("") - .setCommands(List("bash", "-c", "echo hello").asJava) - .setFlags(List(ActionFlag.PublishExposedPorts, ActionFlag.AlwaysRun).map(_.toString).asJava) - .setMounts(List( - new Mount().setDisk("read-only-disk").setPath("/read/only/container").setReadOnly(true), - new Mount().setDisk("read-write-disk").setPath("/read/write/container"), - ).asJava) - .setName("my_container_name") - .setPidNamespace("host") - .setPortMappings(Map("8008" -> Int.box(8000)).asJava), - "docker run --name my_container_name" + - " -v /mnt/read-only-disk:/read/only/container:ro -v /mnt/read-write-disk:/read/write/container" + - " -e ENV:dev --pid=host -P -p 8008:8000 --entrypoint= ubuntu bash -c echo\\ hello" - ), + new Action() + .setImageUri("ubuntu") + .setEnvironment(Map("ENV" -> "dev").asJava) + .setEntrypoint("") + .setCommands(List("bash", "-c", "echo hello").asJava) + .setFlags(List(ActionFlag.PublishExposedPorts, ActionFlag.AlwaysRun).map(_.toString).asJava) + .setMounts( + List( + new Mount().setDisk("read-only-disk").setPath("/read/only/container").setReadOnly(true), + new Mount().setDisk("read-write-disk").setPath("/read/write/container") + ).asJava + ) + .setName("my_container_name") + .setPidNamespace("host") + .setPortMappings(Map("8008" -> Int.box(8000)).asJava), + "docker run --name my_container_name" + + " -v /mnt/read-only-disk:/read/only/container:ro -v /mnt/read-write-disk:/read/write/container" + + " -e ENV:dev --pid=host -P -p 8008:8000 --entrypoint= ubuntu bash -c echo\\ hello" + ) ) forAll(dockerRunActions) { (description, action, command) => @@ -56,15 +58,13 @@ class ActionBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche } } - private val memoryRetryExpectedEntrypoint = "/bin/sh" - def memoryRetryExpectedCommand(lookupString: String): util.List[String] = { + def memoryRetryExpectedCommand(lookupString: String): util.List[String] = List( "-c", s"grep -E -q '$lookupString' /cromwell_root/stderr ; echo $$? > /cromwell_root/memory_retry_rc" ).asJava - } val mounts = List(new Mount().setDisk("read-only-disk").setPath("/read/only/container")) private val memoryRetryActionExpectedFlags = List(ActionFlag.AlwaysRun.toString).asJava diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala index 31d1300d4db..20c9e1fe6e3 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala @@ -19,7 +19,7 @@ class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match mock[Path], mock[com.google.api.services.storage.Storage], mock[com.google.cloud.storage.Storage], - "my-project", + "my-project" ) val recovered = recoverRequesterPaysError(path) { flag => s"flag is $flag" @@ -46,21 +46,23 @@ class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } it should "use GcsTransferConfiguration to set the number of localization retries" in { - implicit val gcsTransferConfiguration: GcsTransferConfiguration = GcsTransferConfiguration( - transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") - retry("I'm very flaky") shouldBe """for i in $(seq 31380); do - | ( - | I'm very flaky - | ) - | RC=$? - | if [ "$RC" = "0" ]; then - | break - | fi - | if [ $i -lt 31380 ]; then - | printf '%s %s\n' "$(date -u '+%Y/%m/%d %H:%M:%S')" Waiting\ 5\ seconds\ and\ retrying - | sleep 5 - | fi - |done - |exit "$RC"""".stripMargin + implicit val gcsTransferConfiguration: GcsTransferConfiguration = + GcsTransferConfiguration(transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") + retry( + "I'm very flaky" + ) shouldBe """for i in $(seq 31380); do + | ( + | I'm very flaky + | ) + | RC=$? + | if [ "$RC" = "0" ]; then + | break + | fi + | if [ $i -lt 31380 ]; then + | printf '%s %s\n' "$(date -u '+%Y/%m/%d %H:%M:%S')" Waiting\ 5\ seconds\ and\ retrying + | sleep 5 + | fi + |done + |exit "$RC"""".stripMargin } } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala index 7f10580b700..f78f2dce9f4 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala @@ -120,7 +120,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc "resources" -> Map[String, Object]( "projectId" -> "project", "virtualMachine" -> Map( - "machineType" -> "custom-1-1024", + "machineType" -> "custom-1-1024" ).asJava ).asJava ).asJava @@ -153,15 +153,19 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ) ).asJava - val metadataMapStarted = makeMetadata(Map[String, Object]( - "@type" -> "WorkerAssignedEvent", - "zone" -> "event 1 Zone", - "instance" -> "event 1 Instance" - )) + val metadataMapStarted = makeMetadata( + Map[String, Object]( + "@type" -> "WorkerAssignedEvent", + "zone" -> "event 1 Zone", + "instance" -> "event 1 Instance" + ) + ) val metadataMapNotStarted = makeMetadata(Map.empty) - val metadataMapNotStarted2 = makeMetadata(Map[String, Object]( - "@type" -> "ContainerStartedEvent" - )) + val metadataMapNotStarted2 = makeMetadata( + Map[String, Object]( + "@type" -> "ContainerStartedEvent" + ) + ) operation.setMetadata(metadataMapStarted) operation.hasStarted shouldBe true @@ -174,8 +178,8 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "deserialize big decimals correctly" in { val valueMap = Map[String, Object]( "integerValue" -> BigDecimal(5), - "doubleValue" -> BigDecimal.decimal(6D), - "floatValue" -> BigDecimal.decimal(7F), + "doubleValue" -> BigDecimal.decimal(6d), + "floatValue" -> BigDecimal.decimal(7f), "longValue" -> BigDecimal.decimal(8L) ).asJava @@ -183,8 +187,8 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc deserialized.isSuccess shouldBe true val deserializedSuccess = deserialized.get deserializedSuccess.integerValue shouldBe 5 - deserializedSuccess.doubleValue shouldBe 6D - deserializedSuccess.floatValue shouldBe 7F + deserializedSuccess.doubleValue shouldBe 6d + deserializedSuccess.floatValue shouldBe 7f deserializedSuccess.longValue shouldBe 8L } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandlerSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandlerSpec.scala index 6efca44f5a5..5bc340f22ed 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandlerSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandlerSpec.scala @@ -25,7 +25,7 @@ class GetRequestHandlerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma private val requestHandler: GetRequestHandler = new RequestHandler( "GetRequestHandlerSpec", new URL("file:///getrequesthandlerspec"), - BatchRequestTimeoutConfiguration(None, None), + BatchRequestTimeoutConfiguration(None, None) ) private val workflowId = WorkflowId.randomId() @@ -37,260 +37,270 @@ class GetRequestHandlerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma private val interpretedStatus = Table( ("description", "json", "status"), - ("parse null operation json", null, UnsuccessfulRunStatus( - Status.UNKNOWN, - Option("Operation returned as empty"), - Nil, - None, - None, - None, - wasPreemptible = false - )), + ("parse null operation json", + null, + UnsuccessfulRunStatus( + Status.UNKNOWN, + Option("Operation returned as empty"), + Nil, + None, + None, + None, + wasPreemptible = false + ) + ), ("parse empty operation json", "{}", Initializing), ("parse error operation json without resources", - """|{ - | "done": true, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json without virtualMachine", - """|{ - | "done": true, - | "resources": { - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "resources": { + | }, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json without preemptible", - """|{ - | "done": true, - | "resources": { - | "virtualMachine": { - | } - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "resources": { + | "virtualMachine": { + | } + | }, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json with preemptible true", - """|{ - | "done": true, - | "resources": { - | "virtualMachine": { - | "preemptible": true - | } - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "resources": { + | "virtualMachine": { + | "preemptible": true + | } + | }, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json with preemptible false", - """|{ - | "done": true, - | "resources": { - | "virtualMachine": { - | "preemptible": false - | } - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) - ), - ("check that we classify error code 10 as a preemption on a preemptible VM", - """{ + """|{ | "done": true, - | "error": { - | "code": 10, - | "message": "The assigned worker has failed to complete the operation" + | "resources": { + | "virtualMachine": { + | "preemptible": false + | } | }, - | "metadata": { - | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", - | "createTime": "2019-08-18T12:04:38.082650Z", - | "endTime": "2019-08-18T15:58:26.659602622Z", - | "events": [], - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "pipeline": { - | "actions": [], - | "environment": {}, - | "resources": { - | "projectId": "", - | "regions": [], - | "virtualMachine": { - | "accelerators": [], - | "bootDiskSizeGb": 11, - | "bootImage": "asdfasdf", - | "cpuPlatform": "", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 41, - | "sourceImage": "", - | "type": "pd-standard" - | } - | ], - | "enableStackdriverMonitoring": false, - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "goog-pipelines-worker": "true", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "machineType": "custom-2-7168", - | "network": { - | "name": "", - | "subnetwork": "", - | "usePrivateAddress": false - | }, - | "nvidiaDriverVersion": "", - | "preemptible": true, - | "serviceAccount": { - | "email": "default", - | "scopes": [ - | "https://www.googleapis.com/auth/genomics", - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | } - | }, - | "zones": [ - | "us-central1-a", - | "us-central1-b", - | "us-east1-d", - | "us-central1-c", - | "us-central1-f", - | "us-east1-c" - | ] - | }, - | "timeout": "604800s" - | }, - | "startTime": "2019-08-18T12:04:39.192909594Z" - | }, - | "name": "asdfasdf" - |}""".stripMargin, - Preempted( - Status.ABORTED, - None, - Nil, - List( - ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"),None), - ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"),None), - ), - Some("custom-2-7168"), - None, - None) + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + ), + ("check that we classify error code 10 as a preemption on a preemptible VM", + """{ + | "done": true, + | "error": { + | "code": 10, + | "message": "The assigned worker has failed to complete the operation" + | }, + | "metadata": { + | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", + | "createTime": "2019-08-18T12:04:38.082650Z", + | "endTime": "2019-08-18T15:58:26.659602622Z", + | "events": [], + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "pipeline": { + | "actions": [], + | "environment": {}, + | "resources": { + | "projectId": "", + | "regions": [], + | "virtualMachine": { + | "accelerators": [], + | "bootDiskSizeGb": 11, + | "bootImage": "asdfasdf", + | "cpuPlatform": "", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 41, + | "sourceImage": "", + | "type": "pd-standard" + | } + | ], + | "enableStackdriverMonitoring": false, + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "goog-pipelines-worker": "true", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "machineType": "custom-2-7168", + | "network": { + | "name": "", + | "subnetwork": "", + | "usePrivateAddress": false + | }, + | "nvidiaDriverVersion": "", + | "preemptible": true, + | "serviceAccount": { + | "email": "default", + | "scopes": [ + | "https://www.googleapis.com/auth/genomics", + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | } + | }, + | "zones": [ + | "us-central1-a", + | "us-central1-b", + | "us-east1-d", + | "us-central1-c", + | "us-central1-f", + | "us-east1-c" + | ] + | }, + | "timeout": "604800s" + | }, + | "startTime": "2019-08-18T12:04:39.192909594Z" + | }, + | "name": "asdfasdf" + |}""".stripMargin, + Preempted( + Status.ABORTED, + None, + Nil, + List( + ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"), None), + ExecutionEvent("Complete in GCE / Cromwell Poll Interval", + OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"), + None + ) + ), + Some("custom-2-7168"), + None, + None + ) ), ("check that we classify error code 10 as a failure on a non-preemptible VM", - """{ - | "done": true, - | "error": { - | "code": 10, - | "message": "The assigned worker has failed to complete the operation" - | }, - | "metadata": { - | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", - | "createTime": "2019-08-18T12:04:38.082650Z", - | "endTime": "2019-08-18T15:58:26.659602622Z", - | "events": [], - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "pipeline": { - | "actions": [], - | "environment": {}, - | "resources": { - | "projectId": "", - | "regions": [], - | "virtualMachine": { - | "accelerators": [], - | "bootDiskSizeGb": 11, - | "bootImage": "asdfasdf", - | "cpuPlatform": "", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 41, - | "sourceImage": "", - | "type": "pd-standard" - | } - | ], - | "enableStackdriverMonitoring": false, - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "goog-pipelines-worker": "true", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "machineType": "custom-2-7168", - | "network": { - | "name": "", - | "subnetwork": "", - | "usePrivateAddress": false - | }, - | "nvidiaDriverVersion": "", - | "preemptible": false, - | "serviceAccount": { - | "email": "default", - | "scopes": [ - | "https://www.googleapis.com/auth/genomics", - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | } - | }, - | "zones": [ - | "us-central1-a", - | "us-central1-b", - | "us-east1-d", - | "us-central1-c", - | "us-central1-f", - | "us-east1-c" - | ] - | }, - | "timeout": "604800s" - | }, - | "startTime": "2019-08-18T12:04:39.192909594Z" - | }, - | "name": "asdfasdf" - |}""".stripMargin, - Failed( - Status.ABORTED, - None, - Nil, - List( - ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"),None), - ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"),None), - ), - Some("custom-2-7168"), - None, - None - ) + """{ + | "done": true, + | "error": { + | "code": 10, + | "message": "The assigned worker has failed to complete the operation" + | }, + | "metadata": { + | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", + | "createTime": "2019-08-18T12:04:38.082650Z", + | "endTime": "2019-08-18T15:58:26.659602622Z", + | "events": [], + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "pipeline": { + | "actions": [], + | "environment": {}, + | "resources": { + | "projectId": "", + | "regions": [], + | "virtualMachine": { + | "accelerators": [], + | "bootDiskSizeGb": 11, + | "bootImage": "asdfasdf", + | "cpuPlatform": "", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 41, + | "sourceImage": "", + | "type": "pd-standard" + | } + | ], + | "enableStackdriverMonitoring": false, + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "goog-pipelines-worker": "true", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "machineType": "custom-2-7168", + | "network": { + | "name": "", + | "subnetwork": "", + | "usePrivateAddress": false + | }, + | "nvidiaDriverVersion": "", + | "preemptible": false, + | "serviceAccount": { + | "email": "default", + | "scopes": [ + | "https://www.googleapis.com/auth/genomics", + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | } + | }, + | "zones": [ + | "us-central1-a", + | "us-central1-b", + | "us-east1-d", + | "us-central1-c", + | "us-central1-f", + | "us-east1-c" + | ] + | }, + | "timeout": "604800s" + | }, + | "startTime": "2019-08-18T12:04:39.192909594Z" + | }, + | "name": "asdfasdf" + |}""".stripMargin, + Failed( + Status.ABORTED, + None, + Nil, + List( + ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"), None), + ExecutionEvent("Complete in GCE / Cromwell Poll Interval", + OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"), + None + ) + ), + Some("custom-2-7168"), + None, + None + ) ) ) @@ -299,7 +309,7 @@ class GetRequestHandlerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma // Operation responses could come back as null. Handle it and don't crash. // https://github.com/googleapis/google-http-java-client/blob/v1.28.0/google-http-client/src/main/java/com/google/api/client/http/HttpResponse.java#L456-L458 val operation = - Option(json).map(GoogleAuthMode.jsonFactory.createJsonParser).map(_.parse(classOf[Operation])).orNull + Option(json).map(GoogleAuthMode.jsonFactory.createJsonParser).map(_.parse(classOf[Operation])).orNull val runStatus = requestHandler.interpretOperationStatus(operation, pollingRequest) runStatus should be(expectedStatus) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala index 8d8f52d3773..2c601007e2e 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala @@ -26,32 +26,35 @@ import wom.format.MemorySize import scala.jdk.CollectionConverters._ -case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode, endpointUrl: URL, location: String)(implicit gcsTransferConfiguration: GcsTransferConfiguration) extends PipelinesApiFactoryInterface - with ContainerSetup - with MonitoringAction - with CheckpointingAction - with Localization - with UserAction - with Delocalization - with MemoryRetryCheckAction - with SSHAccessAction { +case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode, endpointUrl: URL, location: String)( + implicit gcsTransferConfiguration: GcsTransferConfiguration +) extends PipelinesApiFactoryInterface + with ContainerSetup + with MonitoringAction + with CheckpointingAction + with Localization + with UserAction + with Delocalization + with MemoryRetryCheckAction + with SSHAccessAction { override def build(initializer: HttpRequestInitializer): PipelinesApiRequestFactory = new PipelinesApiRequestFactory { - val lifeSciences: CloudLifeSciences = new CloudLifeSciences.Builder( - GoogleAuthMode.httpTransport, - GoogleAuthMode.jsonFactory, - initializer) - .setApplicationName(applicationName) - .setRootUrl(endpointUrl.toString) - .build - - override def cancelRequest(job: StandardAsyncJob): HttpRequest = { - lifeSciences.projects().locations().operations().cancel(job.jobId, new CancelOperationRequest()).buildHttpRequest() - } - - override def getRequest(job: StandardAsyncJob): HttpRequest = { + val lifeSciences: CloudLifeSciences = + new CloudLifeSciences.Builder(GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, initializer) + .setApplicationName(applicationName) + .setRootUrl(endpointUrl.toString) + .build + + override def cancelRequest(job: StandardAsyncJob): HttpRequest = + lifeSciences + .projects() + .locations() + .operations() + .cancel(job.jobId, new CancelOperationRequest()) + .buildHttpRequest() + + override def getRequest(job: StandardAsyncJob): HttpRequest = lifeSciences.projects().locations().operations().get(job.jobId).buildHttpRequest() - } override def runRequest(createPipelineParameters: CreatePipelineParameters, jobLogger: JobLogger): HttpRequest = { def createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues: VpcAndSubnetworkProjectLabelValues): Network = { @@ -66,12 +69,11 @@ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode network } - def createNetwork(): Network = { + def createNetwork(): Network = createPipelineParameters.vpcNetworkAndSubnetworkProjectLabels match { case Some(vpcAndSubnetworkProjectLabelValues) => createNetworkWithVPC(vpcAndSubnetworkProjectLabelValues) case _ => new Network().setUsePrivateAddress(createPipelineParameters.runtimeAttributes.noAddress) } - } val allDisksToBeMounted = createPipelineParameters.adjustedSizeDisks ++ createPipelineParameters.referenceDisksForLocalizationOpt.getOrElse(List.empty) @@ -95,7 +97,8 @@ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode // adding memory as environment variables makes it easy for a user to retrieve the new value of memory // on the machine to utilize in their command blocks if needed val runtimeMemory = createPipelineParameters.runtimeAttributes.memory - val environment = Map("MEM_UNIT" -> runtimeMemory.unit.toString, "MEM_SIZE" -> runtimeMemory.amount.toString).asJava + val environment = + Map("MEM_UNIT" -> runtimeMemory.unit.toString, "MEM_SIZE" -> runtimeMemory.amount.toString).asJava val sortedActions: List[Action] = ActionUtils.sortActions[Action]( @@ -109,7 +112,7 @@ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode checkpointingStart = checkpointingStart, checkpointingShutdown = checkpointingShutdown, sshAccess = sshAccess, - isBackground = _.getRunInBackground, + isBackground = _.getRunInBackground ) val serviceAccount = new ServiceAccount() @@ -131,8 +134,7 @@ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode val network: Network = createNetwork() - val accelerators = createPipelineParameters.runtimeAttributes - .gpuResource.map(toAccelerator).toList.asJava + val accelerators = createPipelineParameters.runtimeAttributes.gpuResource.map(toAccelerator).toList.asJava val virtualMachine = new VirtualMachine() .setDisks(disks.asJava) @@ -153,18 +155,24 @@ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode val adjustedBootDiskSize = { val fromRuntimeAttributes = createPipelineParameters.runtimeAttributes.bootDiskSize // Compute the decompressed size based on the information available - val userCommandImageSizeInBytes = createPipelineParameters.jobDescriptor.dockerSize.map(_.toFullSize(DockerConfiguration.instance.sizeCompressionFactor)).getOrElse(0L) - val userCommandImageSizeInGB = MemorySize(userCommandImageSizeInBytes.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB).amount + val userCommandImageSizeInBytes = createPipelineParameters.jobDescriptor.dockerSize + .map(_.toFullSize(DockerConfiguration.instance.sizeCompressionFactor)) + .getOrElse(0L) + val userCommandImageSizeInGB = + MemorySize(userCommandImageSizeInBytes.toDouble, MemoryUnit.Bytes).to(MemoryUnit.GB).amount val userCommandImageSizeRoundedUpInGB = userCommandImageSizeInGB.ceil.toInt val totalSize = fromRuntimeAttributes + - createPipelineParameters - .dockerImageCacheDiskOpt - .map(_ => 0) // if we are using docker image cache then we don't need this additional volume for the boot disk + createPipelineParameters.dockerImageCacheDiskOpt + .map(_ => + 0 + ) // if we are using docker image cache then we don't need this additional volume for the boot disk .getOrElse(userCommandImageSizeRoundedUpInGB + ActionUtils.cromwellImagesSizeRoundedUpInGB) if (totalSize != fromRuntimeAttributes) { - jobLogger.info(s"Adjusting boot disk size to $totalSize GB: $fromRuntimeAttributes GB (runtime attributes) + $userCommandImageSizeRoundedUpInGB GB (user command image) + ${ActionUtils.cromwellImagesSizeRoundedUpInGB} GB (Cromwell support images)") + jobLogger.info( + s"Adjusting boot disk size to $totalSize GB: $fromRuntimeAttributes GB (runtime attributes) + $userCommandImageSizeRoundedUpInGB GB (user command image) + ${ActionUtils.cromwellImagesSizeRoundedUpInGB} GB (Cromwell support images)" + ) } totalSize diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActor.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActor.scala index 3c0f5ca63f9..c664aadf066 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActor.scala @@ -20,7 +20,15 @@ import org.apache.commons.csv.{CSVFormat, CSVPrinter} import org.apache.commons.io.output.ByteArrayOutputStream import wom.core.FullyQualifiedName import wom.expression.FileEvaluation -import wom.values.{GlobFunctions, WomFile, WomGlobFile, WomMaybeListedDirectory, WomMaybePopulatedFile, WomSingleFile, WomUnlistedDirectory} +import wom.values.{ + GlobFunctions, + WomFile, + WomGlobFile, + WomMaybeListedDirectory, + WomMaybePopulatedFile, + WomSingleFile, + WomUnlistedDirectory +} import java.nio.charset.Charset @@ -31,7 +39,7 @@ import scala.language.postfixOps import scala.util.control.NoStackTrace class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExecutionActorParams) - extends cromwell.backend.google.pipelines.common.PipelinesApiAsyncBackendJobExecutionActor(standardParams) + extends cromwell.backend.google.pipelines.common.PipelinesApiAsyncBackendJobExecutionActor(standardParams) with PipelinesApiReferenceFilesMappingOperations { // The original implementation assumes the WomFiles are all WomMaybePopulatedFiles and wraps everything in a PipelinesApiFileInput @@ -39,31 +47,45 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe override protected def pipelinesApiInputsFromWomFiles(inputName: String, remotePathArray: Seq[WomFile], localPathArray: Seq[WomFile], - jobDescriptor: BackendJobDescriptor): Iterable[PipelinesApiInput] = { + jobDescriptor: BackendJobDescriptor + ): Iterable[PipelinesApiInput] = (remotePathArray zip localPathArray) flatMap { case (remotePath: WomMaybeListedDirectory, localPath) => maybeListedDirectoryToPipelinesParameters(inputName, remotePath, localPath.valueString) case (remotePath: WomUnlistedDirectory, localPath) => - Seq(PipelinesApiDirectoryInput(inputName, getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) + Seq( + PipelinesApiDirectoryInput(inputName, + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) case (remotePath: WomMaybePopulatedFile, localPath) => maybePopulatedFileToPipelinesParameters(inputName, remotePath, localPath.valueString) case (remotePath, localPath) => - Seq(PipelinesApiFileInput(inputName, getPath(remotePath.valueString).get, DefaultPathBuilder.get(localPath.valueString), workingDisk)) + Seq( + PipelinesApiFileInput(inputName, + getPath(remotePath.valueString).get, + DefaultPathBuilder.get(localPath.valueString), + workingDisk + ) + ) } - } // The original implementation recursively finds all non directory files, in V2 we can keep directory as is override protected lazy val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = jobDescriptor.localInputs map { case (key, womFile) => - key -> womFile.collectAsSeq({ + key -> womFile.collectAsSeq { case womFile: WomFile if !inputsToNotLocalize.contains(womFile) => womFile - }) + } } private lazy val gcsTransferLibrary = Source.fromInputStream(Thread.currentThread.getContextClassLoader.getResourceAsStream("gcs_transfer.sh")).mkString - private def gcsLocalizationTransferBundle[T <: PipelinesApiInput](gcsTransferConfiguration: GcsTransferConfiguration)(bucket: String, inputs: NonEmptyList[T]): String = { + private def gcsLocalizationTransferBundle[T <: PipelinesApiInput]( + gcsTransferConfiguration: GcsTransferConfiguration + )(bucket: String, inputs: NonEmptyList[T]): String = { val project = inputs.head.cloudPath.asInstanceOf[GcsPath].projectId val maxAttempts = gcsTransferConfiguration.transferAttempts @@ -78,27 +100,29 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe val filesByContainerParentDirectory = filesWithSameNames.groupBy(_.containerPath.parent.toString) // Deduplicate any inputs since parallel localization can't deal with this. - val uniqueFilesByContainerParentDirectory = filesByContainerParentDirectory map { case (p, fs) => p -> fs.toSet } + val uniqueFilesByContainerParentDirectory = filesByContainerParentDirectory map { case (p, fs) => p -> fs.toSet } - val filesWithSameNamesTransferBundles: List[String] = uniqueFilesByContainerParentDirectory.toList map { case (containerParent, filesWithSameParent) => - val arrayIdentifier = s"files_to_localize_" + DigestUtils.md5Hex(bucket + containerParent) - val entries = filesWithSameParent.map(_.cloudPath) mkString("\"", "\"\n| \"", "\"") + val filesWithSameNamesTransferBundles: List[String] = uniqueFilesByContainerParentDirectory.toList map { + case (containerParent, filesWithSameParent) => + val arrayIdentifier = s"files_to_localize_" + DigestUtils.md5Hex(bucket + containerParent) + val entries = filesWithSameParent.map(_.cloudPath) mkString ("\"", "\"\n| \"", "\"") - s""" - |# Localize files from source bucket '$bucket' to container parent directory '$containerParent'. - |$arrayIdentifier=( - | "$project" # project to use if requester pays - | "$maxAttempts" # max transfer attempts - | "${containerParent.ensureSlashed}" # container parent directory - | $entries - |) - | - |localize_files "$${$arrayIdentifier[@]}" + s""" + |# Localize files from source bucket '$bucket' to container parent directory '$containerParent'. + |$arrayIdentifier=( + | "$project" # project to use if requester pays + | "$maxAttempts" # max transfer attempts + | "${containerParent.ensureSlashed}" # container parent directory + | $entries + |) + | + |localize_files "$${$arrayIdentifier[@]}" """.stripMargin } val filesWithDifferentNamesTransferBundles = filesWithDifferentNames map { f => - val arrayIdentifier = s"singleton_file_to_localize_" + DigestUtils.md5Hex(f.cloudPath.pathAsString + f.containerPath.pathAsString) + val arrayIdentifier = + s"singleton_file_to_localize_" + DigestUtils.md5Hex(f.cloudPath.pathAsString + f.containerPath.pathAsString) s""" |# Localize singleton file '${f.cloudPath.pathAsString}' to '${f.containerPath.pathAsString}'. |$arrayIdentifier=( @@ -114,27 +138,31 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe // Only write a transfer bundle for directories if there are directories to be localized. Emptiness isn't a concern // for files since there is always at least the command script to be localized. - val directoryTransferBundle = if (directories.isEmpty) "" else { - val entries = directories flatMap { i => List(i.cloudPath, i.containerPath) } mkString("\"", "\"\n| \"", "\"") + val directoryTransferBundle = + if (directories.isEmpty) "" + else { + val entries = directories flatMap { i => List(i.cloudPath, i.containerPath) } mkString ("\"", "\"\n| \"", "\"") - val arrayIdentifier = s"directories_to_localize_" + DigestUtils.md5Hex(bucket) + val arrayIdentifier = s"directories_to_localize_" + DigestUtils.md5Hex(bucket) - s""" - |# Directories from source bucket '$bucket'. - |$arrayIdentifier=( - | "$project" # project to use if requester pays - | "$maxAttempts" # max transfer attempts - | $entries - |) - | - |localize_directories "$${$arrayIdentifier[@]}" + s""" + |# Directories from source bucket '$bucket'. + |$arrayIdentifier=( + | "$project" # project to use if requester pays + | "$maxAttempts" # max transfer attempts + | $entries + |) + | + |localize_directories "$${$arrayIdentifier[@]}" """.stripMargin - } + } (directoryTransferBundle :: (filesWithSameNamesTransferBundles ++ filesWithDifferentNamesTransferBundles)) mkString "\n\n" } - private def gcsDelocalizationTransferBundle[T <: PipelinesApiOutput](transferConfiguration: GcsTransferConfiguration)(bucket: String, outputs: NonEmptyList[T]): String = { + private def gcsDelocalizationTransferBundle[T <: PipelinesApiOutput]( + transferConfiguration: GcsTransferConfiguration + )(bucket: String, outputs: NonEmptyList[T]): String = { val project = outputs.head.cloudPath.asInstanceOf[GcsPath].projectId val maxAttempts = transferConfiguration.transferAttempts @@ -145,14 +173,16 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe case _: PipelinesApiDirectoryOutput => "directory" // a primary directory } - val optional = Option(output) collectFirst { case o: PipelinesApiFileOutput if o.secondary || o.optional => "optional" } getOrElse "required" + val optional = Option(output) collectFirst { + case o: PipelinesApiFileOutput if o.secondary || o.optional => "optional" + } getOrElse "required" val contentType = output.contentType.map(_.toString).getOrElse("") List(kind, output.cloudPath.toString, output.containerPath.toString, optional, contentType) - } mkString("\"", "\"\n| \"", "\"") + } mkString ("\"", "\"\n| \"", "\"") - val parallelCompositeUploadThreshold = jobDescriptor.workflowDescriptor.workflowOptions.getOrElse( - "parallel_composite_upload_threshold", transferConfiguration.parallelCompositeUploadThreshold) + val parallelCompositeUploadThreshold = jobDescriptor.workflowDescriptor.workflowOptions + .getOrElse("parallel_composite_upload_threshold", transferConfiguration.parallelCompositeUploadThreshold) // Use a digest as bucket names can contain characters that are not legal in bash identifiers. val arrayIdentifier = s"delocalize_" + DigestUtils.md5Hex(bucket) @@ -169,17 +199,18 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe """.stripMargin } - private def bracketTransfersWithMessages(activity: String)(transferBody: String): String = { + private def bracketTransfersWithMessages(activity: String)(transferBody: String): String = List( s"timestamped_message '$activity script execution started...'", transferBody, s"timestamped_message '$activity script execution complete.'" ) mkString "\n" - } import mouse.all._ - override def uploadDrsLocalizationManifest(createPipelineParameters: CreatePipelineParameters, cloudPath: Path): Future[Unit] = { + override def uploadDrsLocalizationManifest(createPipelineParameters: CreatePipelineParameters, + cloudPath: Path + ): Future[Unit] = { val content = generateDrsLocalizerManifest(createPipelineParameters.inputOutputParameters.fileInputParameters) if (content.nonEmpty) asyncIo.writeAsync(cloudPath, content, Seq(CloudStorageOptions.withMimeType("text/plain"))) @@ -188,16 +219,17 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe } private def generateGcsLocalizationScript(inputs: List[PipelinesApiInput], - referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { // Generate a mapping of reference inputs to their mounted paths and a section of the localization script to // "faux localize" these reference inputs with symlinks to their locations on mounted reference disks. import cromwell.backend.google.pipelines.common.action.ActionUtils.shellEscaped val referenceFilesLocalizationScript = { val symlinkCreationCommandsOpt = referenceInputsToMountedPathsOpt map { referenceInputsToMountedPaths => - referenceInputsToMountedPaths map { - case (input, absolutePathOnRefDisk) => - s"mkdir -p ${shellEscaped(input.containerPath.parent.pathAsString)} && ln -s ${shellEscaped(absolutePathOnRefDisk)} ${shellEscaped(input.containerPath.pathAsString)}" + referenceInputsToMountedPaths map { case (input, absolutePathOnRefDisk) => + s"mkdir -p ${shellEscaped(input.containerPath.parent.pathAsString)} && ln -s ${shellEscaped( + absolutePathOnRefDisk + )} ${shellEscaped(input.containerPath.pathAsString)}" } } @@ -219,9 +251,9 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe } val regularFilesLocalizationScript = { - val regularFiles = referenceInputsToMountedPathsOpt.map(maybeReferenceInputsToMountedPaths => - inputs diff maybeReferenceInputsToMountedPaths.keySet.toList - ).getOrElse(inputs) + val regularFiles = referenceInputsToMountedPathsOpt + .map(maybeReferenceInputsToMountedPaths => inputs diff maybeReferenceInputsToMountedPaths.keySet.toList) + .getOrElse(inputs) if (regularFiles.nonEmpty) { val bundleFunction = (gcsLocalizationTransferBundle(gcsTransferConfiguration) _).tupled generateGcsTransferScript(regularFiles, bundleFunction) @@ -240,45 +272,68 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe combinedLocalizationScript |> bracketTransfersWithMessages("Localization") } - private def generateGcsDelocalizationScript(outputs: List[PipelinesApiOutput])(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { + private def generateGcsDelocalizationScript( + outputs: List[PipelinesApiOutput] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): String = { val bundleFunction = (gcsDelocalizationTransferBundle(gcsTransferConfiguration) _).tupled generateGcsTransferScript(outputs, bundleFunction) |> bracketTransfersWithMessages("Delocalization") } - private def generateGcsTransferScript[T <: PipelinesParameter](items: List[T], bundleFunction: ((String, NonEmptyList[T])) => String): String = { + private def generateGcsTransferScript[T <: PipelinesParameter](items: List[T], + bundleFunction: ((String, NonEmptyList[T])) => String + ): String = { val gcsItems = items collect { case i if i.cloudPath.isInstanceOf[GcsPath] => i } groupParametersByGcsBucket(gcsItems) map bundleFunction mkString "\n" } override protected def uploadGcsTransferLibrary(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = { - + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = asyncIo.writeAsync(cloudPath, gcsTransferLibrary, Seq(CloudStorageOptions.withMimeType("text/plain"))) - } override def uploadGcsLocalizationScript(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, transferLibraryContainerPath: Path, gcsTransferConfiguration: GcsTransferConfiguration, - referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]]): Future[Unit] = { - val content = generateGcsLocalizationScript(createPipelineParameters.inputOutputParameters.fileInputParameters, referenceInputsToMountedPathsOpt)(gcsTransferConfiguration) - asyncIo.writeAsync(cloudPath, s"source '$transferLibraryContainerPath'\n\n" + content, Seq(CloudStorageOptions.withMimeType("text/plain"))) + referenceInputsToMountedPathsOpt: Option[Map[PipelinesApiInput, String]] + ): Future[Unit] = { + val content = generateGcsLocalizationScript(createPipelineParameters.inputOutputParameters.fileInputParameters, + referenceInputsToMountedPathsOpt + )(gcsTransferConfiguration) + asyncIo.writeAsync(cloudPath, + s"source '$transferLibraryContainerPath'\n\n" + content, + Seq(CloudStorageOptions.withMimeType("text/plain")) + ) } override def uploadGcsDelocalizationScript(createPipelineParameters: CreatePipelineParameters, cloudPath: Path, transferLibraryContainerPath: Path, - gcsTransferConfiguration: GcsTransferConfiguration): Future[Unit] = { - val content = generateGcsDelocalizationScript(createPipelineParameters.inputOutputParameters.fileOutputParameters)(gcsTransferConfiguration) - asyncIo.writeAsync(cloudPath, s"source '$transferLibraryContainerPath'\n\n" + content, Seq(CloudStorageOptions.withMimeType("text/plain"))) + gcsTransferConfiguration: GcsTransferConfiguration + ): Future[Unit] = { + val content = generateGcsDelocalizationScript(createPipelineParameters.inputOutputParameters.fileOutputParameters)( + gcsTransferConfiguration + ) + asyncIo.writeAsync(cloudPath, + s"source '$transferLibraryContainerPath'\n\n" + content, + Seq(CloudStorageOptions.withMimeType("text/plain")) + ) } // Simply create a PipelinesApiDirectoryOutput in v2 instead of globbing - override protected def generateUnlistedDirectoryOutputs(unlistedDirectory: WomUnlistedDirectory, fileEvaluation: FileEvaluation): List[PipelinesApiOutput] = { + override protected def generateUnlistedDirectoryOutputs(unlistedDirectory: WomUnlistedDirectory, + fileEvaluation: FileEvaluation + ): List[PipelinesApiOutput] = { val destination = callRootPath.resolve(unlistedDirectory.value.stripPrefix("/")) val (relpath, disk) = relativePathAndAttachedDisk(unlistedDirectory.value, runtimeAttributes.disks) - val directoryOutput = PipelinesApiDirectoryOutput(makeSafeReferenceName(unlistedDirectory.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val directoryOutput = PipelinesApiDirectoryOutput(makeSafeReferenceName(unlistedDirectory.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(directoryOutput) } @@ -295,13 +350,27 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe // We need both the glob directory and the glob list: List( // The glob directory: - PipelinesApiDirectoryOutput(makeSafeReferenceName(globDirectory), gcsGlobDirectoryDestinationPath, DefaultPathBuilder.get(globDirectory), globDirectoryDisk, optional = false, secondary = false), + PipelinesApiDirectoryOutput( + makeSafeReferenceName(globDirectory), + gcsGlobDirectoryDestinationPath, + DefaultPathBuilder.get(globDirectory), + globDirectoryDisk, + optional = false, + secondary = false + ), // The glob list file: - PipelinesApiFileOutput(makeSafeReferenceName(globListFile), gcsGlobListFileDestinationPath, DefaultPathBuilder.get(globListFile), globDirectoryDisk, optional = false, secondary = false) + PipelinesApiFileOutput( + makeSafeReferenceName(globListFile), + gcsGlobListFileDestinationPath, + DefaultPathBuilder.get(globListFile), + globDirectoryDisk, + optional = false, + secondary = false + ) ) } - override def womFileToGcsPath(jesOutputs: Set[PipelinesApiOutput])(womFile: WomFile): WomFile = { + override def womFileToGcsPath(jesOutputs: Set[PipelinesApiOutput])(womFile: WomFile): WomFile = womFile mapFile { path => jesOutputs collectFirst { case jesOutput if jesOutput.name == makeSafeReferenceName(path) => @@ -318,65 +387,98 @@ class PipelinesApiAsyncBackendJobExecutionActor(standardParams: StandardAsyncExe case _: ValidFullGcsPath => path /* - * Strip the prefixes in RuntimeOutputMapping.prefixFilters from the path, one at a time. - * For instance - * file:///cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt will progressively become - * - * /cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt - * bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt - * call-A/file.txt - * - * This code is called as part of a path mapper that will be applied to the WOMified cwl.output.json. - * The cwl.output.json when it's being read by Cromwell from the bucket still contains local paths - * (as they were created by the cwl tool). - * In order to keep things working we need to map those local paths to where they were actually delocalized, - * which is determined in cromwell.backend.google.pipelines.v2beta.api.Delocalization. - */ - case _ => (callRootPath / - RuntimeOutputMapping + * Strip the prefixes in RuntimeOutputMapping.prefixFilters from the path, one at a time. + * For instance + * file:///cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt will progressively become + * + * /cromwell_root/bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt + * bucket/workflow_name/6d777414-5ee7-4c60-8b9e-a02ec44c398e/call-A/file.txt + * call-A/file.txt + * + * This code is called as part of a path mapper that will be applied to the WOMified cwl.output.json. + * The cwl.output.json when it's being read by Cromwell from the bucket still contains local paths + * (as they were created by the cwl tool). + * In order to keep things working we need to map those local paths to where they were actually delocalized, + * which is determined in cromwell.backend.google.pipelines.v2beta.api.Delocalization. + */ + case _ => + (callRootPath / + RuntimeOutputMapping .prefixFilters(workflowPaths.workflowRoot) - .foldLeft(path)({ - case (newPath, prefix) => newPath.stripPrefix(prefix) - }) - ).pathAsString + .foldLeft(path) { case (newPath, prefix) => + newPath.stripPrefix(prefix) + }).pathAsString } } } - } - private def maybePopulatedFileToPipelinesParameters(inputName: String, maybePopulatedFile: WomMaybePopulatedFile, localPath: String) = { - val secondaryFiles = maybePopulatedFile.secondaryFiles.flatMap({ secondaryFile => - pipelinesApiInputsFromWomFiles(secondaryFile.valueString, List(secondaryFile), List(relativeLocalizationPath(secondaryFile)), jobDescriptor) - }) + private def maybePopulatedFileToPipelinesParameters(inputName: String, + maybePopulatedFile: WomMaybePopulatedFile, + localPath: String + ) = { + val secondaryFiles = maybePopulatedFile.secondaryFiles.flatMap { secondaryFile => + pipelinesApiInputsFromWomFiles(secondaryFile.valueString, + List(secondaryFile), + List(relativeLocalizationPath(secondaryFile)), + jobDescriptor + ) + } - Seq(PipelinesApiFileInput(inputName, getPath(maybePopulatedFile.valueString).get, DefaultPathBuilder.get(localPath), workingDisk)) ++ secondaryFiles + Seq( + PipelinesApiFileInput(inputName, + getPath(maybePopulatedFile.valueString).get, + DefaultPathBuilder.get(localPath), + workingDisk + ) + ) ++ secondaryFiles } - private def maybeListedDirectoryToPipelinesParameters(inputName: String, womMaybeListedDirectory: WomMaybeListedDirectory, localPath: String) = womMaybeListedDirectory match { + private def maybeListedDirectoryToPipelinesParameters(inputName: String, + womMaybeListedDirectory: WomMaybeListedDirectory, + localPath: String + ) = womMaybeListedDirectory match { // If there is a path, simply localize as a directory case WomMaybeListedDirectory(Some(path), _, _, _) => List(PipelinesApiDirectoryInput(inputName, getPath(path).get, DefaultPathBuilder.get(localPath), workingDisk)) // If there is a listing, recurse and call pipelinesApiInputsFromWomFiles on all the listed files case WomMaybeListedDirectory(_, Some(listing), _, _) if listing.nonEmpty => - listing.flatMap({ + listing.flatMap { case womFile: WomFile if isAdHocFile(womFile) => - pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), List(womFile), List(fileName(womFile)), jobDescriptor) + pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), + List(womFile), + List(fileName(womFile)), + jobDescriptor + ) case womFile: WomFile => - pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), List(womFile), List(relativeLocalizationPath(womFile)), jobDescriptor) - }) + pipelinesApiInputsFromWomFiles(makeSafeReferenceName(womFile.valueString), + List(womFile), + List(relativeLocalizationPath(womFile)), + jobDescriptor + ) + } case _ => List.empty } - override def generateSingleFileOutputs(womFile: WomSingleFile, fileEvaluation: FileEvaluation): List[PipelinesApiFileOutput] = { + override def generateSingleFileOutputs(womFile: WomSingleFile, + fileEvaluation: FileEvaluation + ): List[PipelinesApiFileOutput] = { val (relpath, disk) = relativePathAndAttachedDisk(womFile.value, runtimeAttributes.disks) // If the file is on a custom mount point, resolve it so that the full mount path will show up in the cloud path // For the default one (cromwell_root), the expectation is that it does not appear - val mountedPath = if (!disk.mountPoint.isSamePathAs(PipelinesApiWorkingDisk.Default.mountPoint)) disk.mountPoint.resolve(relpath) else relpath + val mountedPath = + if (!disk.mountPoint.isSamePathAs(PipelinesApiWorkingDisk.Default.mountPoint)) disk.mountPoint.resolve(relpath) + else relpath // Normalize the local path (to get rid of ".." and "."). Also strip any potential leading / so that it gets appended to the call root val normalizedPath = mountedPath.normalize().pathAsString.stripPrefix("/") val destination = callRootPath.resolve(normalizedPath) - val jesFileOutput = PipelinesApiFileOutput(makeSafeReferenceName(womFile.value), destination, relpath, disk, fileEvaluation.optional, fileEvaluation.secondary) + val jesFileOutput = PipelinesApiFileOutput(makeSafeReferenceName(womFile.value), + destination, + relpath, + disk, + fileEvaluation.optional, + fileEvaluation.secondary + ) List(jesFileOutput) } } @@ -394,10 +496,12 @@ object PipelinesApiAsyncBackendJobExecutionActor { // - There must be at least one '/', followed by some content in the file name. // - Or, then, for directories: // - If we got this far, we already have a valid directory path. Allow it to optionally end with a `/` character. - private val gcsFilePathMatcher = "(?s)^gs://([a-zA-Z0-9][^/]+)(/[^/]+)*/[^/]+$".r + private val gcsFilePathMatcher = "(?s)^gs://([a-zA-Z0-9][^/]+)(/[^/]+)*/[^/]+$".r private val gcsDirectoryPathMatcher = "(?s)^gs://([a-zA-Z0-9][^/]+)(/[^/]+)*/?$".r - private [v2beta] def groupParametersByGcsBucket[T <: PipelinesParameter](parameters: List[T]): Map[String, NonEmptyList[T]] = { + private[v2beta] def groupParametersByGcsBucket[T <: PipelinesParameter]( + parameters: List[T] + ): Map[String, NonEmptyList[T]] = parameters.map { param => def pathTypeString = if (param.isFileParameter) "File" else "Directory" val regexToUse = if (param.isFileParameter) gcsFilePathMatcher else gcsDirectoryPathMatcher @@ -406,16 +510,17 @@ object PipelinesApiAsyncBackendJobExecutionActor { case regexToUse(bucket) => Map(bucket -> NonEmptyList.of(param)) case regexToUse(bucket, _) => Map(bucket -> NonEmptyList.of(param)) case other => - throw new Exception(s"$pathTypeString path '$other' did not match the expected regex: ${regexToUse.pattern.toString}") with NoStackTrace + throw new Exception( + s"$pathTypeString path '$other' did not match the expected regex: ${regexToUse.pattern.toString}" + ) with NoStackTrace } } combineAll - } - private [v2beta] def generateDrsLocalizerManifest(inputs: List[PipelinesApiInput]): String = { + private[v2beta] def generateDrsLocalizerManifest(inputs: List[PipelinesApiInput]): String = { val outputStream = new ByteArrayOutputStream() val csvPrinter = new CSVPrinter(new OutputStreamWriter(outputStream), CSVFormat.DEFAULT) - val drsFileInputs = inputs collect { - case drsInput@PipelinesApiFileInput(_, drsPath: DrsPath, _, _) => (drsInput, drsPath) + val drsFileInputs = inputs collect { case drsInput @ PipelinesApiFileInput(_, drsPath: DrsPath, _, _) => + (drsInput, drsPath) } drsFileInputs foreach { case (drsInput, drsPath) => csvPrinter.printRecord(drsPath.pathAsString, drsInput.containerPath.pathAsString) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiLifecycleActorFactory.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiLifecycleActorFactory.scala index 9a0f3b7334e..ff43845ccd1 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiLifecycleActorFactory.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiLifecycleActorFactory.scala @@ -2,18 +2,22 @@ package cromwell.backend.google.pipelines.v2beta import akka.actor.{ActorRef, Props} import cromwell.backend.BackendConfigurationDescriptor -import cromwell.backend.google.pipelines.common.{PipelinesApiBackendLifecycleActorFactory, PipelinesApiBackendSingletonActor, PipelinesApiConfiguration} +import cromwell.backend.google.pipelines.common.{ + PipelinesApiBackendLifecycleActorFactory, + PipelinesApiBackendSingletonActor, + PipelinesApiConfiguration +} import cromwell.backend.google.pipelines.v2beta.api.request.RequestHandler import cromwell.backend.standard.StandardAsyncExecutionActor class PipelinesApiLifecycleActorFactory(name: String, configurationDescriptor: BackendConfigurationDescriptor) - extends PipelinesApiBackendLifecycleActorFactory(name, configurationDescriptor) { + extends PipelinesApiBackendLifecycleActorFactory(name, configurationDescriptor) { private val genomicsFactory = LifeSciencesFactory( googleConfig.applicationName, papiAttributes.auths.genomics, papiAttributes.endpointUrl, - papiAttributes.location, + papiAttributes.location )(papiAttributes.gcsTransferConfiguration) override protected val jesConfiguration = new PipelinesApiConfiguration(configurationDescriptor, genomicsFactory, googleConfig, papiAttributes) @@ -22,7 +26,7 @@ class PipelinesApiLifecycleActorFactory(name: String, configurationDescriptor: B new RequestHandler( googleConfig.applicationName, papiAttributes.endpointUrl, - papiAttributes.batchRequestTimeoutConfiguration, + papiAttributes.batchRequestTimeoutConfiguration ) PipelinesApiBackendSingletonActor.props( jesConfiguration.papiAttributes.qps, diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala index 64365fe404f..0ce16f1e4a8 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala @@ -20,8 +20,9 @@ import simulacrum.typeclass trait PipelinesParameterConversions { implicit val fileInputToParameter: ToParameter[PipelinesApiFileInput] = new ToParameter[PipelinesApiFileInput] { - override def toActions(fileInput: PipelinesApiFileInput, mounts: List[Mount]) - (implicit retryPolicy: GcsTransferConfiguration): List[Action] = { + override def toActions(fileInput: PipelinesApiFileInput, mounts: List[Mount])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Action] = { lazy val config = ConfigFactory.load val labels = ActionBuilder.parameterLabels(fileInput) @@ -29,13 +30,12 @@ trait PipelinesParameterConversions { case sraPath: SraPath => val sraConfig = config.getConfig("filesystems.sra") - def getString(key: String): Option[String] = { + def getString(key: String): Option[String] = if (sraConfig.hasPath(key)) { Some(sraConfig.getString(key)) } else { None } - } val image = getString("docker-image") getOrElse "fusera/fusera:alpine" val (createNgc, ngcArgs) = getString("ngc") match { @@ -67,31 +67,33 @@ trait PipelinesParameterConversions { implicit val directoryInputToParameter: ToParameter[PipelinesApiDirectoryInput] = new ToParameter[PipelinesApiDirectoryInput] { - override def toActions(directoryInput: PipelinesApiDirectoryInput, mounts: List[Mount]) - (implicit retryPolicy: GcsTransferConfiguration): List[Action] = { - directoryInput.cloudPath match { - case _: GcsPath => Nil // GCS paths will be localized with a separate localization script. - case _ => - val labels = ActionBuilder.parameterLabels(directoryInput) - val describeAction = ActionBuilder.describeParameter(directoryInput, labels) - val localizationAction = cloudSdkShellAction( - localizeDirectory(directoryInput.cloudPath, directoryInput.containerPath) - )(mounts = mounts, labels = labels) - List(describeAction, localizationAction) - } + override def toActions(directoryInput: PipelinesApiDirectoryInput, mounts: List[Mount])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Action] = + directoryInput.cloudPath match { + case _: GcsPath => Nil // GCS paths will be localized with a separate localization script. + case _ => + val labels = ActionBuilder.parameterLabels(directoryInput) + val describeAction = ActionBuilder.describeParameter(directoryInput, labels) + val localizationAction = cloudSdkShellAction( + localizeDirectory(directoryInput.cloudPath, directoryInput.containerPath) + )(mounts = mounts, labels = labels) + List(describeAction, localizationAction) + } } - } implicit val fileOutputToParameter: ToParameter[PipelinesApiFileOutput] = new ToParameter[PipelinesApiFileOutput] { - override def toActions(fileOutput: PipelinesApiFileOutput, mounts: List[Mount]) - (implicit retryPolicy: GcsTransferConfiguration): List[Action] = { + override def toActions(fileOutput: PipelinesApiFileOutput, mounts: List[Mount])(implicit + retryPolicy: GcsTransferConfiguration + ): List[Action] = { // If the output is a "secondary file", it actually could be a directory but we won't know before runtime. // The fileOrDirectory method will generate a command that can cover both cases - lazy val copy = if (fileOutput.secondary) - delocalizeFileOrDirectory(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) - else - delocalizeFile(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) + lazy val copy = + if (fileOutput.secondary) + delocalizeFileOrDirectory(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) + else + delocalizeFile(fileOutput.containerPath, fileOutput.cloudPath, fileOutput.contentType) lazy val copyOnlyIfExists = ifExist(fileOutput.containerPath) { copy @@ -106,7 +108,8 @@ trait PipelinesParameterConversions { case _ => val describeAction = ActionBuilder.describeParameter(fileOutput, labels) - val delocalizationAction = cloudSdkShellAction(copyCommand)(mounts = mounts, labels = labels).withAlwaysRun(true) + val delocalizationAction = + cloudSdkShellAction(copyCommand)(mounts = mounts, labels = labels).withAlwaysRun(true) List(describeAction, delocalizationAction) } @@ -132,32 +135,34 @@ trait PipelinesParameterConversions { implicit val directoryOutputToParameter: ToParameter[PipelinesApiDirectoryOutput] = new ToParameter[PipelinesApiDirectoryOutput] { - override def toActions(directoryOutput: PipelinesApiDirectoryOutput, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { - directoryOutput.cloudPath match { - case _: GcsPath => Nil // GCS paths will be delocalized with a separate delocalization script. - case _ => - val labels = ActionBuilder.parameterLabels(directoryOutput) - val describeAction = ActionBuilder.describeParameter(directoryOutput, labels) - val delocalizationAction = cloudSdkShellAction( - delocalizeDirectory(directoryOutput.containerPath, directoryOutput.cloudPath, None) - )(mounts = mounts, labels = labels).withAlwaysRun(true) - List(describeAction, delocalizationAction) - } + override def toActions(directoryOutput: PipelinesApiDirectoryOutput, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = + directoryOutput.cloudPath match { + case _: GcsPath => Nil // GCS paths will be delocalized with a separate delocalization script. + case _ => + val labels = ActionBuilder.parameterLabels(directoryOutput) + val describeAction = ActionBuilder.describeParameter(directoryOutput, labels) + val delocalizationAction = cloudSdkShellAction( + delocalizeDirectory(directoryOutput.containerPath, directoryOutput.cloudPath, None) + )(mounts = mounts, labels = labels).withAlwaysRun(true) + List(describeAction, delocalizationAction) + } } - } implicit val inputToParameter: ToParameter[PipelinesApiInput] = new ToParameter[PipelinesApiInput] { - override def toActions(p: PipelinesApiInput, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = p match { + override def toActions(p: PipelinesApiInput, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = p match { case fileInput: PipelinesApiFileInput => fileInputToParameter.toActions(fileInput, mounts) case directoryInput: PipelinesApiDirectoryInput => directoryInputToParameter.toActions(directoryInput, mounts) } } implicit val outputToParameter: ToParameter[PipelinesApiOutput] = new ToParameter[PipelinesApiOutput] { - override def toActions(p: PipelinesApiOutput, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = p match { + override def toActions(p: PipelinesApiOutput, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = p match { case fileOutput: PipelinesApiFileOutput => fileOutputToParameter.toActions(fileOutput, mounts) case directoryOutput: PipelinesApiDirectoryOutput => directoryOutputToParameter.toActions(directoryOutput, mounts) } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesUtilityConversions.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesUtilityConversions.scala index 09359428d01..a1e08007b05 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesUtilityConversions.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesUtilityConversions.scala @@ -12,14 +12,15 @@ import PipelinesUtilityConversions._ import scala.language.postfixOps trait PipelinesUtilityConversions { - def toAccelerator(gpuResource: GpuResource): Accelerator = new Accelerator().setCount(gpuResource.gpuCount.value.toLong).setType(gpuResource.gpuType.toString) + def toAccelerator(gpuResource: GpuResource): Accelerator = + new Accelerator().setCount(gpuResource.gpuCount.value.toLong).setType(gpuResource.gpuType.toString) def toMachineType(jobLogger: JobLogger)(attributes: PipelinesApiRuntimeAttributes): String = MachineConstraints.machineType( memory = attributes.memory, cpu = attributes.cpu, cpuPlatformOption = attributes.cpuPlatform, googleLegacyMachineSelection = attributes.googleLegacyMachineSelection, - jobLogger = jobLogger, + jobLogger = jobLogger ) def toMounts(disks: Seq[PipelinesApiAttachedDisk]): List[Mount] = disks.map(toMount).toList def toDisks(disks: Seq[PipelinesApiAttachedDisk]): List[Disk] = disks.map(toDisk).toList @@ -57,7 +58,9 @@ trait PipelinesUtilityConversions { // There are both "Started pulling" and "Stopped pulling" events but these are confusing for metadata, especially on the // timing diagram. Create a single "Pulling " grouping to absorb these events. def groupingFromPull: Option[String] = List("Started", "Stopped") flatMap { k => - Option(event.getDescription) collect { case d if d.startsWith(s"$k pulling") => "Pulling" + d.substring(s"$k pulling".length)} + Option(event.getDescription) collect { + case d if d.startsWith(s"$k pulling") => "Pulling" + d.substring(s"$k pulling".length) + } } headOption // See WX-1137. ContainerStoppedEvent descriptions contain the stderr, which may include 4-byte unicode @@ -85,7 +88,7 @@ trait PipelinesUtilityConversions { object PipelinesUtilityConversions { implicit class EnhancedEvent(val event: Event) extends AnyVal { - def getActionId: Option[Integer] = { + def getActionId: Option[Integer] = if (event.getContainerKilled != null) { Option(event.getContainerKilled.getActionId) } else if (event.getContainerStarted != null) { @@ -97,12 +100,10 @@ object PipelinesUtilityConversions { } else { None } - } } lazy val utf8mb4Regex = "[\\x{10000}-\\x{FFFFF}]" - lazy val utf8mb3Replacement = "\uFFFD" // This is the standard char for replacing invalid/unknown unicode chars - def cleanUtf8mb4(in: String): String = { + lazy val utf8mb3Replacement = "\uFFFD" // This is the standard char for replacing invalid/unknown unicode chars + def cleanUtf8mb4(in: String): String = in.replaceAll(utf8mb4Regex, utf8mb3Replacement) - } } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala index 8a1a3bfc97e..a2cbb69980f 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala @@ -20,6 +20,7 @@ import scala.concurrent.duration._ */ object ActionBuilder { implicit class EnhancedAction(val action: Action) extends AnyVal { + /** * Only for use with docker images KNOWN to not have entrypoints already set, * or used with accompanying call to setEntrypoint("non-empty-string"). @@ -33,7 +34,7 @@ object ActionBuilder { * Useful for any externally provided images that _might_ have entrypoints already set. This is a workaround for * the issue detailed in BA-6406. See underlying google issue in that ticket for more info. */ - def withEntrypointCommand(command: String*): Action = { + def withEntrypointCommand(command: String*): Action = action .setEntrypoint(command.headOption.orNull) .setCommands( @@ -42,7 +43,6 @@ object ActionBuilder { .map(_.asJava) .orNull ) - } def withMounts(mounts: List[Mount]): Action = action.setMounts(mounts.asJava) def withLabels(labels: Map[String, String]): Action = action.setLabels(labels.asJava) @@ -54,8 +54,10 @@ object ActionBuilder { def withRunInBackground(runInBackground: Boolean): Action = action.setRunInBackground(runInBackground) def withAlwaysRun(alwaysRun: Boolean): Action = action.setAlwaysRun(alwaysRun) def withEnableFuse(enableFuse: Boolean): Action = action.setEnableFuse(enableFuse) - def withPublishExposedPorts(publishExposedPorts: Boolean): Action = action.setPublishExposedPorts(publishExposedPorts) - def withDisableImagePrefetch(disableImagePrefetch: Boolean): Action = action.setDisableImagePrefetch(disableImagePrefetch) + def withPublishExposedPorts(publishExposedPorts: Boolean): Action = + action.setPublishExposedPorts(publishExposedPorts) + def withDisableImagePrefetch(disableImagePrefetch: Boolean): Action = + action.setDisableImagePrefetch(disableImagePrefetch) def scalaLabels: Map[String, String] = { val list = for { @@ -71,8 +73,9 @@ object ActionBuilder { def withImage(image: String): Action = new Action() .setImageUri(image) - def monitoringImageScriptAction(cloudPath: Path, containerPath: Path, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + def monitoringImageScriptAction(cloudPath: Path, containerPath: Path, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): Action = { val command = ActionCommands.localizeFile(cloudPath, containerPath) val labels = Map(Key.Tag -> Value.Localization) ActionBuilder.cloudSdkShellAction(command)(mounts = mounts, labels = labels) @@ -81,8 +84,8 @@ object ActionBuilder { def backgroundAction(image: String, command: List[String], environment: Map[String, String], - mounts: List[Mount], - ): Action = { + mounts: List[Mount] + ): Action = new Action() .setImageUri(image) .withEntrypointCommand(command: _*) @@ -92,7 +95,6 @@ object ActionBuilder { .setEnvironment(environment.asJava) .withLabels(Map(Key.Tag -> Value.Monitoring)) .setPidNamespace(backgroundActionPidNamespace) - } def terminateBackgroundActionsAction(): Action = cloudSdkShellAction(terminateAllBackgroundActionsCommand)(labels = Map(Key.Tag -> Value.Monitoring)) @@ -110,13 +112,16 @@ object ActionBuilder { mounts: List[Mount], jobShell: String, privateDockerKeyAndToken: Option[CreatePipelineDockerKeyAndToken], - fuseEnabled: Boolean): Action = { + fuseEnabled: Boolean + ): Action = { val dockerImageIdentifier = DockerImageIdentifier.fromString(docker) val secret = for { imageId <- dockerImageIdentifier.toOption - if DockerHub.isValidDockerHubHost(imageId.host) // This token only works for Docker Hub and not other repositories. + if DockerHub.isValidDockerHubHost( + imageId.host + ) // This token only works for Docker Hub and not other repositories. keyAndToken <- privateDockerKeyAndToken s = new Secret().setKeyName(keyAndToken.key).setCipherText(keyAndToken.encryptedToken) } yield s @@ -131,16 +136,16 @@ object ActionBuilder { .setEnableFuse(fuseEnabled) } - def checkForMemoryRetryAction(retryLookupKeys: List[String], mounts: List[Mount]): Action = { + def checkForMemoryRetryAction(retryLookupKeys: List[String], mounts: List[Mount]): Action = cloudSdkShellAction(ActionCommands.checkIfStderrContainsRetryKeys(retryLookupKeys))( mounts = mounts, labels = Map(Key.Tag -> Value.RetryWithMoreMemory) ).withAlwaysRun(true) - } def cloudSdkShellAction(shellCommand: String)(mounts: List[Mount] = List.empty, labels: Map[String, String] = Map.empty, - timeout: Duration = Duration.Inf): Action = + timeout: Duration = Duration.Inf + ): Action = cloudSdkAction .withEntrypointCommand( "/bin/sh", @@ -157,7 +162,7 @@ object ActionBuilder { * @param pipelinesParameter Input or output parameter to label. * @return The labels. */ - def parameterLabels(pipelinesParameter: PipelinesParameter): Map[String, String] = { + def parameterLabels(pipelinesParameter: PipelinesParameter): Map[String, String] = pipelinesParameter match { case _: PipelinesApiInput => Map( @@ -170,7 +175,6 @@ object ActionBuilder { Key.OutputName -> pipelinesParameter.name ) } - } /** * Surrounds the list of Actions with a pair of starting and done Actions. @@ -181,8 +185,9 @@ object ActionBuilder { * @param actions The list of Actions to surround. * @return The starting Action, the passed in list, and then a done Action. */ - def annotateTimestampedActions(description: String, loggingLabelValue: String, isAlwaysRun: Boolean = false) - (actions: List[Action]): List[Action] = { + def annotateTimestampedActions(description: String, loggingLabelValue: String, isAlwaysRun: Boolean = false)( + actions: List[Action] + ): List[Action] = { val labels = Map(Key.Logging -> loggingLabelValue) val starting = List(logTimestampedAction(s"Starting $description.", labels).withAlwaysRun(isAlwaysRun)) val done = List(logTimestampedAction(s"Done $description.", labels).withAlwaysRun(isAlwaysRun)) @@ -190,31 +195,28 @@ object ActionBuilder { } /** Creates an Action that describes the parameter localization or delocalization. */ - def describeParameter(pipelinesParameter: PipelinesParameter, - actionLabels: Map[String, String]): Action = { + def describeParameter(pipelinesParameter: PipelinesParameter, actionLabels: Map[String, String]): Action = pipelinesParameter match { case _: PipelinesApiInput => val message = "Localizing input %s -> %s".format( shellEscaped(pipelinesParameter.cloudPath), - shellEscaped(pipelinesParameter.containerPath), + shellEscaped(pipelinesParameter.containerPath) ) ActionBuilder.logTimestampedAction(message, actionLabels) case _: PipelinesApiOutput => val message = "Delocalizing output %s -> %s".format( shellEscaped(pipelinesParameter.containerPath), - shellEscaped(pipelinesParameter.cloudPath), + shellEscaped(pipelinesParameter.cloudPath) ) ActionBuilder.logTimestampedAction(message, actionLabels).withAlwaysRun(true) } - } /** Creates an Action that logs the docker command for the passed in action. */ - def describeDocker(description: String, action: Action): Action = { + def describeDocker(description: String, action: Action): Action = ActionBuilder.logTimestampedAction( s"Running $description: ${ActionBuilder.toDockerRun(action)}", action.scalaLabels ) - } def timestampedMessage(message: String): String = s"""printf '%s %s\\n' "$$(date -u '+%Y/%m/%d %H:%M:%S')" ${shellEscaped(message)}""" @@ -230,8 +232,7 @@ object ActionBuilder { * @param actionLabels Labels from the original Action to modify and apply to the logging Action. * @return A new Action that will log the time and print the message. */ - private def logTimestampedAction(message: String, - actionLabels: Map[String, String]): Action = { + private def logTimestampedAction(message: String, actionLabels: Map[String, String]): Action = // Uses the cloudSdk image as that image will be used for other operations as well. cloudSdkShellAction( timestampedMessage(message) @@ -242,7 +243,6 @@ object ActionBuilder { }, timeout = 300.seconds ) - } /** Converts an Action to a `docker run ...` command runnable in the shell. */ private[api] def toDockerRun(action: Action): String = { @@ -263,7 +263,8 @@ object ActionBuilder { val environmentArgs: String = Option(action.getEnvironment) match { case Some(environment) => environment.asScala map { - case (key, value) if Option(key).isDefined && Option(value).isDefined => s" -e ${shellEscaped(s"$key:$value")}" + case (key, value) if Option(key).isDefined && Option(value).isDefined => + s" -e ${shellEscaped(s"$key:$value")}" case (key, _) if Option(key).isDefined => s" -e ${shellEscaped(key)}" case _ => "" } mkString "" @@ -307,15 +308,15 @@ object ActionBuilder { val publishExposedPortsFlag: String = if (action.getPublishExposedPorts) " -P" else "" Array("docker run", - nameArg, - mountArgs, - environmentArgs, - pidNamespaceArg, - publishExposedPortsFlag, - portMappingArgs, - entrypointArg, - imageArg, - commandArgs, + nameArg, + mountArgs, + environmentArgs, + pidNamespaceArg, + publishExposedPortsFlag, + portMappingArgs, + entrypointArg, + imageArg, + commandArgs ).mkString } } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/CheckpointingAction.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/CheckpointingAction.scala index 658ea88ffff..fca9b0c6f9b 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/CheckpointingAction.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/CheckpointingAction.scala @@ -6,19 +6,21 @@ import cromwell.backend.google.pipelines.common.action.ActionUtils import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters trait CheckpointingAction { - def checkpointingSetupActions(createPipelineParameters: CreatePipelineParameters, - mounts: List[Mount] - ): List[Action] = + def checkpointingSetupActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]): List[Action] = createPipelineParameters.runtimeAttributes.checkpointFilename map { checkpointFilename => val checkpointingImage = ActionUtils.CloudSdkImage - val checkpointingCommand = createPipelineParameters.checkpointingConfiguration.checkpointingCommand(checkpointFilename, ActionCommands.multiLineBinBashCommand) + val checkpointingCommand = + createPipelineParameters.checkpointingConfiguration.checkpointingCommand(checkpointFilename, + ActionCommands.multiLineBinBashCommand + ) val checkpointingEnvironment = Map.empty[String, String] // Initial sync from cloud: val initialCheckpointSyncAction = ActionBuilder.cloudSdkShellAction( createPipelineParameters.checkpointingConfiguration.localizePreviousCheckpointCommand(checkpointFilename) )(mounts = mounts) - val describeInitialCheckpointingSyncAction = ActionBuilder.describeDocker("initial checkpointing sync", initialCheckpointSyncAction) + val describeInitialCheckpointingSyncAction = + ActionBuilder.describeDocker("initial checkpointing sync", initialCheckpointSyncAction) // Background upload action: val backgroundCheckpointingAction = ActionBuilder.backgroundAction( @@ -27,20 +29,28 @@ trait CheckpointingAction { environment = checkpointingEnvironment, mounts = mounts ) - val describeBackgroundCheckpointingAction = ActionBuilder.describeDocker("begin checkpointing background action", backgroundCheckpointingAction) + val describeBackgroundCheckpointingAction = + ActionBuilder.describeDocker("begin checkpointing background action", backgroundCheckpointingAction) - List(describeInitialCheckpointingSyncAction, initialCheckpointSyncAction, describeBackgroundCheckpointingAction, backgroundCheckpointingAction) - } getOrElse(Nil) + List(describeInitialCheckpointingSyncAction, + initialCheckpointSyncAction, + describeBackgroundCheckpointingAction, + backgroundCheckpointingAction + ) + } getOrElse Nil - def checkpointingShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = { + def checkpointingShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = createPipelineParameters.runtimeAttributes.checkpointFilename map { checkpointFilename => val terminationAction = ActionBuilder.terminateBackgroundActionsAction() val describeTerminationAction = ActionBuilder.describeDocker("terminate checkpointing action", terminationAction) - val deleteCheckpointAction = ActionBuilder.gcsFileDeletionAction(createPipelineParameters.checkpointingConfiguration.checkpointFileCloud(checkpointFilename)) - val deleteTmpCheckpointAction = ActionBuilder.gcsFileDeletionAction(createPipelineParameters.checkpointingConfiguration.tmpCheckpointFileCloud(checkpointFilename)) + val deleteCheckpointAction = ActionBuilder.gcsFileDeletionAction( + createPipelineParameters.checkpointingConfiguration.checkpointFileCloud(checkpointFilename) + ) + val deleteTmpCheckpointAction = ActionBuilder.gcsFileDeletionAction( + createPipelineParameters.checkpointingConfiguration.tmpCheckpointFileCloud(checkpointFilename) + ) List(describeTerminationAction, terminationAction, deleteCheckpointAction, deleteTmpCheckpointAction) - } getOrElse(Nil) - } + } getOrElse Nil } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala index 8fc0d81cff1..4d8416c46af 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala @@ -23,39 +23,50 @@ import scala.concurrent.duration._ trait Delocalization { - private def delocalizeLogsAction(gcsLogPath: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = { + private def delocalizeLogsAction(gcsLogPath: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = cloudSdkShellAction( - delocalizeDirectory(DefaultPathBuilder.build(logsRoot).get, gcsLogPath, PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType) + delocalizeDirectory(DefaultPathBuilder.build(logsRoot).get, + gcsLogPath, + PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType + ) )(labels = Map(Key.Tag -> Value.Delocalization)).withAlwaysRun(true) - } // Used for the final copy of the logs to make sure we have the most up to date version before terminating the job - private def copyAggregatedLogToLegacyPath(gcsLegacyLogPath: Path) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + private def copyAggregatedLogToLegacyPath( + gcsLegacyLogPath: Path + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = cloudSdkShellAction( - delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, gcsLegacyLogPath, PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType) + delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, + gcsLegacyLogPath, + PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType + ) )(labels = Map(Key.Tag -> Value.Delocalization)).withAlwaysRun(true) - } // Periodically copies the logs out to GCS - private def copyAggregatedLogToLegacyPathPeriodic(gcsLegacyLogPath: Path) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + private def copyAggregatedLogToLegacyPathPeriodic( + gcsLegacyLogPath: Path + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = cloudSdkShellAction( - every(30.seconds) { delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, gcsLegacyLogPath, PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType) } + every(30.seconds) { + delocalizeFileTo(DefaultPathBuilder.build(aggregatedLog).get, + gcsLegacyLogPath, + PipelinesApiAsyncBackendJobExecutionActor.plainTextContentType + ) + } )(labels = Map(Key.Tag -> Value.Background)).withRunInBackground(true) - } private def runtimeOutputExtractorAction(containerCallRoot: String, - outputFile: String, - mounts: List[Mount], - womOutputRuntimeExtractor: WomOutputRuntimeExtractor): Action = { + outputFile: String, + mounts: List[Mount], + womOutputRuntimeExtractor: WomOutputRuntimeExtractor + ): Action = { val commands = List( "-c", // Create the directory where the fofn will be written s"mkdir -p $$(dirname $outputFile) && " + - s"cd $containerCallRoot && " + - """echo "Runtime output files to be delocalized:" && """ + - s"${womOutputRuntimeExtractor.command} | tee $outputFile" + s"cd $containerCallRoot && " + + """echo "Runtime output files to be delocalized:" && """ + + s"${womOutputRuntimeExtractor.command} | tee $outputFile" ) ActionBuilder @@ -68,7 +79,9 @@ trait Delocalization { .withLabels(Map(Key.Tag -> Value.Delocalization)) } - private def delocalizeRuntimeOutputsScript(fofnPath: String, workflowRoot: Path, cloudCallRoot: Path)(implicit gcsTransferConfiguration: GcsTransferConfiguration) = { + private def delocalizeRuntimeOutputsScript(fofnPath: String, workflowRoot: Path, cloudCallRoot: Path)(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ) = { val gsutilCommand: String => String = { flag => s"""rm -f $$HOME/.config/gcloud/gce && gsutil -m $flag cp -r $$line "${cloudCallRoot.pathAsString.ensureSlashed}$$gcs_path"""" } @@ -99,13 +112,20 @@ trait Delocalization { |fi""".stripMargin } - private def delocalizeRuntimeOutputsAction(cloudCallRoot: Path, inputFile: String, workflowRoot: Path, mounts: List[Mount])(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { + private def delocalizeRuntimeOutputsAction(cloudCallRoot: Path, + inputFile: String, + workflowRoot: Path, + mounts: List[Mount] + )(implicit gcsTransferConfiguration: GcsTransferConfiguration): Action = { val command = multiLineCommand(delocalizeRuntimeOutputsScript(inputFile, workflowRoot, cloudCallRoot)) - ActionBuilder.cloudSdkShellAction(command)(mounts, labels = Map(Key.Tag -> Value.Delocalization)).withDisableImagePrefetch(true) + ActionBuilder + .cloudSdkShellAction(command)(mounts, labels = Map(Key.Tag -> Value.Delocalization)) + .withDisableImagePrefetch(true) } - def deLocalizeActions(createPipelineParameters: CreatePipelineParameters, - mounts: List[Mount])(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { + def deLocalizeActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = { val cloudCallRoot = createPipelineParameters.cloudCallRoot val callExecutionContainerRoot = createPipelineParameters.commandScriptContainerPath.parent @@ -116,21 +136,31 @@ trait Delocalization { * Ideally temporaryFofnForRuntimeOutputFiles should be somewhere else than the execution directory (we could mount anther directory) * However because it runs after everything else there's no risk of polluting the task's results and the random ID ensures we don't override anything */ - val temporaryFofnDirectoryForRuntimeOutputFiles = callExecutionContainerRoot.pathAsString.ensureSlashed + UUID.randomUUID().toString.split("-")(0) + val temporaryFofnDirectoryForRuntimeOutputFiles = + callExecutionContainerRoot.pathAsString.ensureSlashed + UUID.randomUUID().toString.split("-")(0) val temporaryFofnForRuntimeOutputFiles = temporaryFofnDirectoryForRuntimeOutputFiles + "/runtime_output_files.txt" val runtimeExtractionActions = createPipelineParameters.womOutputRuntimeExtractor.toList flatMap { extractor => - List ( - runtimeOutputExtractorAction(callExecutionContainerRoot.pathAsString, temporaryFofnForRuntimeOutputFiles, mounts, extractor), - delocalizeRuntimeOutputsAction(cloudCallRoot, temporaryFofnForRuntimeOutputFiles, createPipelineParameters.cloudWorkflowRoot, mounts) + List( + runtimeOutputExtractorAction(callExecutionContainerRoot.pathAsString, + temporaryFofnForRuntimeOutputFiles, + mounts, + extractor + ), + delocalizeRuntimeOutputsAction(cloudCallRoot, + temporaryFofnForRuntimeOutputFiles, + createPipelineParameters.cloudWorkflowRoot, + mounts + ) ) } - val gcsDelocalizationContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) + val gcsDelocalizationContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) val delocalizationLabel = Map(Key.Tag -> Value.Delocalization) - val runGcsDelocalizationScript: Action = cloudSdkShellAction( - s"/bin/bash $gcsDelocalizationContainerPath")(mounts = mounts, labels = delocalizationLabel) + val runGcsDelocalizationScript: Action = + cloudSdkShellAction(s"/bin/bash $gcsDelocalizationContainerPath")(mounts = mounts, labels = delocalizationLabel) ActionBuilder.annotateTimestampedActions("delocalization", Value.Delocalization)( runGcsDelocalizationScript :: diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala index 54de52e74be..8699983ca63 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala @@ -23,9 +23,10 @@ import scala.util.{Failure, Success, Try} * for which there's an existing class. * This class provides implicit functions to deserialize those map to their proper type. */ -private [api] object Deserialization { +private[api] object Deserialization { implicit class OperationDeserialization(val operation: Operation) extends AnyVal { + /** * Deserializes the events to com.google.api.services.genomics.v2beta.model.Event * @@ -35,11 +36,12 @@ private [api] object Deserialization { def events: ErrorOr[List[Event]] = { val eventsErrorOrOption = for { eventsMap <- metadata.get("events") - eventsErrorOr <- Option(eventsMap - .asInstanceOf[JArrayList[JMap[String, Object]]] - .asScala - .toList - .traverse[ErrorOr, Event](deserializeTo[Event](_).toErrorOr) + eventsErrorOr <- Option( + eventsMap + .asInstanceOf[JArrayList[JMap[String, Object]]] + .asScala + .toList + .traverse[ErrorOr, Event](deserializeTo[Event](_).toErrorOr) ) } yield eventsErrorOr eventsErrorOrOption.getOrElse(Nil.validNel) @@ -48,11 +50,10 @@ private [api] object Deserialization { /** * Deserializes the pipeline to com.google.api.services.genomics.v2beta.model.Pipeline */ - def pipeline: Option[Try[Pipeline]] = { + def pipeline: Option[Try[Pipeline]] = metadata .get("pipeline") .map(_.asInstanceOf[JMap[String, Object]] |> deserializeTo[Pipeline]) - } // If there's a WorkerAssignedEvent it means a VM was created - which we consider as the job started // Note that the VM might still be booting @@ -70,19 +71,23 @@ private [api] object Deserialization { /** * Deserializes a java.util.Map[String, Object] to an instance of T */ - private [api] def deserializeTo[T <: GenericJson](attributes: JMap[String, Object])(implicit tag: ClassTag[T]): Try[T] = Try { + private[api] def deserializeTo[T <: GenericJson]( + attributes: JMap[String, Object] + )(implicit tag: ClassTag[T]): Try[T] = Try { // Create a new instance, because it's a GenericJson there's always a 0-arg constructor val newT = tag.runtimeClass.asInstanceOf[Class[T]].getConstructor().newInstance() // Optionally returns the field with the given name def field(name: String) = Option(newT.getClassInfo.getField(name)) - def handleMap(key: String, value: Object) = { + def handleMap(key: String, value: Object) = (field(key), value) match { // If the serialized value is a list, we need to check if its elements need to be deserialized case (Some(f), list: java.util.List[java.util.Map[String, Object]] @unchecked) => // Try to get the generic type of the declared field (the field should have a list type since the value is a list) - Try(f.getGenericType.asInstanceOf[ParameterizedType].getActualTypeArguments.toList.head.asInstanceOf[Class[_]]) match { + Try( + f.getGenericType.asInstanceOf[ParameterizedType].getActualTypeArguments.toList.head.asInstanceOf[Class[_]] + ) match { // If we can get it and its a GenericJson, it means we need to deserialize the elements to their proper type case Success(genericListType) if classOf[GenericJson].isAssignableFrom(genericListType) => // The get throws at the first error and hence doesn't aggregate the errors but it seems @@ -101,7 +106,8 @@ private [api] object Deserialization { // If it can't be assigned and the value is a map, it is very likely that the field "key" of T is of some type U // but has been deserialized to a Map[String, Object]. In this case we retrieve the type U from the field and recurse // to deserialize properly - case (Some(f), map: java.util.Map[String, Object] @unchecked) if classOf[GenericJson].isAssignableFrom(f.getType) => + case (Some(f), map: java.util.Map[String, Object] @unchecked) + if classOf[GenericJson].isAssignableFrom(f.getType) => // This whole function is wrapped in a try so just .get to throw val deserializedInnerAttribute = deserializeTo(map)(ClassTag[GenericJson](f.getType)).get newT.set(key, deserializedInnerAttribute) @@ -118,7 +124,6 @@ private [api] object Deserialization { // and losing properly deserialized attributes case _ => } - } // Go over the map entries and use the "set" method of GenericJson to set the attributes. Option(attributes).map(_.asScala).getOrElse(Map.empty).foreach((handleMap _).tupled) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Localization.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Localization.scala index 3b470b9096b..5779c42cbeb 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Localization.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Localization.scala @@ -11,55 +11,69 @@ import cromwell.backend.google.pipelines.common.PipelinesApiJobPaths._ import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters import cromwell.backend.google.pipelines.v2beta.PipelinesConversions._ import cromwell.backend.google.pipelines.v2beta.ToParameter.ops._ -import cromwell.backend.google.pipelines.v2beta.api.ActionBuilder.{EnhancedAction, cloudSdkShellAction} +import cromwell.backend.google.pipelines.v2beta.api.ActionBuilder.{cloudSdkShellAction, EnhancedAction} import cromwell.core.path.Path import cromwell.filesystems.drs.DrsPath import scala.jdk.CollectionConverters._ - trait Localization { - def localizeActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]) - (implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { + def localizeActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = { val localizationLabel = Map(Key.Tag -> Value.Localization) - val gcsTransferLibraryContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsTransferLibraryName) - val localizeGcsTransferLibrary = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / GcsTransferLibraryName, - containerPath = gcsTransferLibraryContainerPath))(mounts = mounts, labels = localizationLabel) - - val gcsLocalizationContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsLocalizationScriptName) - val localizeGcsLocalizationScript = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / GcsLocalizationScriptName, - containerPath = gcsLocalizationContainerPath))(mounts = mounts, labels = localizationLabel) - - val gcsDelocalizationContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) - val localizeGcsDelocalizationScript = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / GcsDelocalizationScriptName, - containerPath = gcsDelocalizationContainerPath))(mounts = mounts, labels = localizationLabel) - - val runGcsLocalizationScript = cloudSdkShellAction( - s"/bin/bash $gcsLocalizationContainerPath")(mounts = mounts, labels = localizationLabel) + val gcsTransferLibraryContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsTransferLibraryName) + val localizeGcsTransferLibrary = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / GcsTransferLibraryName, + containerPath = gcsTransferLibraryContainerPath + ) + )(mounts = mounts, labels = localizationLabel) + + val gcsLocalizationContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsLocalizationScriptName) + val localizeGcsLocalizationScript = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / GcsLocalizationScriptName, + containerPath = gcsLocalizationContainerPath + ) + )(mounts = mounts, labels = localizationLabel) + + val gcsDelocalizationContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(GcsDelocalizationScriptName) + val localizeGcsDelocalizationScript = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / GcsDelocalizationScriptName, + containerPath = gcsDelocalizationContainerPath + ) + )(mounts = mounts, labels = localizationLabel) + + val runGcsLocalizationScript = + cloudSdkShellAction(s"/bin/bash $gcsLocalizationContainerPath")(mounts = mounts, labels = localizationLabel) val drsInputs: List[DrsPath] = createPipelineParameters.inputOutputParameters.fileInputParameters.collect { case PipelinesApiFileInput(_, drsPath: DrsPath, _, _) => drsPath } val drsLocalizationActions = if (drsInputs.nonEmpty) { - val drsLocalizationManifestContainerPath = createPipelineParameters.commandScriptContainerPath.sibling(DrsLocalizationManifestName) - val localizeDrsLocalizationManifest = cloudSdkShellAction(localizeFile( - cloudPath = createPipelineParameters.cloudCallRoot / DrsLocalizationManifestName, - containerPath = drsLocalizationManifestContainerPath))(mounts = mounts, labels = localizationLabel) + val drsLocalizationManifestContainerPath = + createPipelineParameters.commandScriptContainerPath.sibling(DrsLocalizationManifestName) + val localizeDrsLocalizationManifest = cloudSdkShellAction( + localizeFile(cloudPath = createPipelineParameters.cloudCallRoot / DrsLocalizationManifestName, + containerPath = drsLocalizationManifestContainerPath + ) + )(mounts = mounts, labels = localizationLabel) // Requester pays project id is stored on each DrsPath, but will be the same for all DRS inputs to a // particular workflow because it's determined by the Google project set in workflow options. val requesterPaysProjectId: Option[String] = drsInputs.flatMap(_.requesterPaysProjectIdOption).headOption - val runDrsLocalization = Localization.drsAction(drsLocalizationManifestContainerPath, mounts, localizationLabel, requesterPaysProjectId) + val runDrsLocalization = + Localization.drsAction(drsLocalizationManifestContainerPath, mounts, localizationLabel, requesterPaysProjectId) List(localizeDrsLocalizationManifest, runDrsLocalization) } else List[Action]() // Any "classic" PAPI v2 one-at-a-time localizations for non-GCS inputs. - val singletonLocalizations = createPipelineParameters.inputOutputParameters.fileInputParameters.flatMap(_.toActions(mounts).toList) + val singletonLocalizations = + createPipelineParameters.inputOutputParameters.fileInputParameters.flatMap(_.toActions(mounts).toList) val localizations = localizeGcsTransferLibrary :: @@ -78,7 +92,7 @@ object Localization { mounts: List[Mount], labels: Map[String, String], requesterPaysProjectId: Option[String] - ): Action = { + ): Action = { val config = ConfigFactory.load val drsResolverConfig = config.getConfig("filesystems.drs.global.config.resolver") val drsConfig = DrsConfig.fromConfig(drsResolverConfig) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MemoryRetryCheckAction.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MemoryRetryCheckAction.scala index 07bc213d1bd..ed223394376 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MemoryRetryCheckAction.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MemoryRetryCheckAction.scala @@ -5,10 +5,11 @@ import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.C trait MemoryRetryCheckAction { - def checkForMemoryRetryActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]): List[Action] = { + def checkForMemoryRetryActions(createPipelineParameters: CreatePipelineParameters, + mounts: List[Mount] + ): List[Action] = createPipelineParameters.retryWithMoreMemoryKeys match { case Some(keys) => List(ActionBuilder.checkForMemoryRetryAction(keys, mounts)) case None => List.empty[Action] } - } } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MonitoringAction.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MonitoringAction.scala index 5b86d75cbcf..b51b6577de3 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MonitoringAction.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/MonitoringAction.scala @@ -5,9 +5,9 @@ import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttribu import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters trait MonitoringAction { - def monitoringSetupActions(createPipelineParameters: CreatePipelineParameters, - mounts: List[Mount] - )(implicit gcsTransferConfiguration: GcsTransferConfiguration): List[Action] = { + def monitoringSetupActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount])(implicit + gcsTransferConfiguration: GcsTransferConfiguration + ): List[Action] = { val monitoringImageScriptActions = createPipelineParameters.monitoringImage.monitoringImageScriptOption match { @@ -16,12 +16,12 @@ trait MonitoringAction { ActionBuilder.monitoringImageScriptAction( script, createPipelineParameters.monitoringImage.monitoringImageScriptContainerPath, - mounts, + mounts ) val describeLocalizeScriptAction = ActionBuilder.describeDocker( "localizing monitoring image script action", - localizeScriptAction, + localizeScriptAction ) List(describeLocalizeScriptAction, localizeScriptAction) case None => Nil @@ -30,7 +30,6 @@ trait MonitoringAction { val monitoringImageActions = createPipelineParameters.monitoringImage.monitoringImageOption match { case Some(image) => - val monitoringImage = image val monitoringImageCommand = createPipelineParameters.monitoringImage.monitoringImageCommand val monitoringImageEnvironment = createPipelineParameters.monitoringImage.monitoringImageEnvironment @@ -39,7 +38,7 @@ trait MonitoringAction { monitoringImage, monitoringImageCommand, monitoringImageEnvironment(mounts.map(_.getPath)), - mounts, + mounts ) val describeMonitoringAction = ActionBuilder.describeDocker("monitoring action", monitoringAction) @@ -52,7 +51,7 @@ trait MonitoringAction { monitoringImageScriptActions ++ monitoringImageActions } - def monitoringShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = { + def monitoringShutdownActions(createPipelineParameters: CreatePipelineParameters): List[Action] = createPipelineParameters.monitoringImage.monitoringImageOption match { case Some(_) => val terminationAction = ActionBuilder.terminateBackgroundActionsAction() @@ -62,5 +61,4 @@ trait MonitoringAction { List(describeTerminationAction, terminationAction) case None => Nil } - } } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala index 342b5701847..a2b91178c1b 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala @@ -9,16 +9,16 @@ import scala.jdk.CollectionConverters._ trait SSHAccessAction { - def sshAccessActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]) : List[Action] = { + def sshAccessActions(createPipelineParameters: CreatePipelineParameters, mounts: List[Mount]): List[Action] = if (createPipelineParameters.enableSshAccess) { sshAccessAction(mounts) } else { Nil } - } private def sshAccessAction(mounts: List[Mount]): List[Action] = { - val sshAction = ActionBuilder.withImage(ActionUtils.sshImage) + val sshAction = ActionBuilder + .withImage(ActionUtils.sshImage) .withEntrypointCommand(ActionUtils.sshEntryPoint) .setPortMappings(ActionUtils.sshPortMappings.asJava) .setRunInBackground(true) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala index b4d9421b451..26905f51d2f 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/AbortRequestHandler.scala @@ -6,7 +6,10 @@ import com.google.api.client.googleapis.json.GoogleJsonError import com.google.api.client.http.HttpHeaders import com.typesafe.scalalogging.LazyLogging import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager._ -import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiAbortClient.{PAPIAbortRequestSuccessful, PAPIOperationIsAlreadyTerminal} +import cromwell.backend.google.pipelines.common.api.clients.PipelinesApiAbortClient.{ + PAPIAbortRequestSuccessful, + PAPIOperationIsAlreadyTerminal +} import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import org.apache.commons.lang3.StringUtils @@ -14,7 +17,11 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} trait AbortRequestHandler extends LazyLogging { this: RequestHandler => - protected def handleGoogleError(abortQuery: PAPIAbortRequest, pollingManager: ActorRef, e: GoogleJsonError, responseHeaders: HttpHeaders): Try[Unit] = { + protected def handleGoogleError(abortQuery: PAPIAbortRequest, + pollingManager: ActorRef, + e: GoogleJsonError, + responseHeaders: HttpHeaders + ): Try[Unit] = // This condition is telling us that the job we tried to cancel is already in a terminal state. Technically PAPI // was not able to cancel the job because the job could not be transitioned from 'Running' to 'Cancelled'. But from // Cromwell's perspective a job cancellation is really just a request for the job to be in a terminal state, so @@ -23,30 +30,34 @@ trait AbortRequestHandler extends LazyLogging { this: RequestHandler => // If/when Google implements https://partnerissuetracker.corp.google.com/issues/171993833 we could break these cases // out and make our logging more specific if we wanted to. if (Option(e.getCode).contains(400) || StringUtils.contains(e.getMessage, "Precondition check failed")) { - logger.info(s"PAPI declined to abort job ${abortQuery.jobId.jobId} in workflow ${abortQuery.workflowId}, most likely because it is no longer running. Marking as finished. Message: ${e.getMessage}") + logger.info( + s"PAPI declined to abort job ${abortQuery.jobId.jobId} in workflow ${abortQuery.workflowId}, most likely because it is no longer running. Marking as finished. Message: ${e.getMessage}" + ) abortQuery.requester ! PAPIOperationIsAlreadyTerminal(abortQuery.jobId.jobId) Success(()) } else { - pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, new SystemPAPIApiException(GoogleJsonException(e, responseHeaders))) + pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, + new SystemPAPIApiException(GoogleJsonException(e, responseHeaders)) + ) Failure(new Exception(mkErrorString(e))) } - } // The Genomics batch endpoint doesn't seem to be able to handle abort requests on V2 operations at the moment // For now, don't batch the request and execute it on its own - def handleRequest(abortQuery: PAPIAbortRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = { + def handleRequest(abortQuery: PAPIAbortRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit + ec: ExecutionContext + ): Future[Try[Unit]] = Future(abortQuery.httpRequest.setThrowExceptionOnExecuteError(false).execute()) map { case response if response.isSuccessStatusCode => abortQuery.requester ! PAPIAbortRequestSuccessful(abortQuery.jobId.jobId) Success(()) - case response => for { - asGoogleError <- Try(GoogleJsonError.parse(GoogleAuthMode.jsonFactory, response)) - handled <- handleGoogleError(abortQuery, pollingManager, asGoogleError, response.getHeaders) - } yield handled - } recover { - case e => - pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, new SystemPAPIApiException(e)) - Failure(e) + case response => + for { + asGoogleError <- Try(GoogleJsonError.parse(GoogleAuthMode.jsonFactory, response)) + handled <- handleGoogleError(abortQuery, pollingManager, asGoogleError, response.getHeaders) + } yield handled + } recover { case e => + pollingManager ! PipelinesApiAbortQueryFailed(abortQuery, new SystemPAPIApiException(e)) + Failure(e) } - } } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala index 1c57c3ba2ce..2329af494c0 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala @@ -20,28 +20,27 @@ object ErrorReporter { // This can be used to log non-critical deserialization failures and not fail the task implicit class ErrorOrLogger[A](val t: ErrorOr[A]) extends AnyVal { - private def logErrors(errors: NonEmptyList[String], workflowId: WorkflowId, operation: Operation): Unit = { - logger.error(s"[$workflowId] Failed to parse PAPI response. Operation Id: ${operation.getName}" + s"${errors.toList.mkString(", ")}") + private def logErrors(errors: NonEmptyList[String], workflowId: WorkflowId, operation: Operation): Unit = + logger.error( + s"[$workflowId] Failed to parse PAPI response. Operation Id: ${operation.getName}" + s"${errors.toList.mkString(", ")}" + ) + + def fallBack: RequestContextReader[Option[A]] = Reader { case (workflowId, operation) => + t match { + case Valid(s) => Option(s) + case Invalid(f) => + logErrors(f, workflowId, operation) + None + } } - def fallBack: RequestContextReader[Option[A]] = Reader { - case (workflowId, operation) => - t match { - case Valid(s) => Option(s) - case Invalid(f) => - logErrors(f, workflowId, operation) - None - } - } - - def fallBackTo(to: A): RequestContextReader[A] = Reader { - case (workflowId, operation) => - t match { - case Valid(s) => s - case Invalid(f) => - logErrors(f, workflowId, operation) - to - } + def fallBackTo(to: A): RequestContextReader[A] = Reader { case (workflowId, operation) => + t match { + case Valid(s) => s + case Invalid(f) => + logErrors(f, workflowId, operation) + to + } } } } @@ -53,7 +52,8 @@ class ErrorReporter(machineType: Option[String], instanceName: Option[String], actions: List[Action], operation: Operation, - workflowId: WorkflowId) { + workflowId: WorkflowId +) { def toUnsuccessfulRunStatus(error: Status, events: List[Event]): UnsuccessfulRunStatus = { // If for some reason the status is null, set it as UNAVAILABLE @@ -64,7 +64,11 @@ class ErrorReporter(machineType: Option[String], val status = statusOption.getOrElse(GStatus.UNAVAILABLE) val builder = status match { case GStatus.UNAVAILABLE if wasPreemptible => Preempted.apply _ - case GStatus.ABORTED if wasPreemptible && Option(error.getMessage).exists(_.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style)) => Preempted.apply _ + case GStatus.ABORTED + if wasPreemptible && Option(error.getMessage).exists( + _.contains(PipelinesApiAsyncBackendJobExecutionActor.FailedV2Style) + ) => + Preempted.apply _ case GStatus.CANCELLED => Cancelled.apply _ case _ => Failed.apply _ } @@ -77,7 +81,7 @@ class ErrorReporter(machineType: Option[String], } // There's maybe one FailedEvent per operation with a summary error message - private def unexpectedExitStatusErrorStrings(events: List[Event], actions: List[Action]): List[String] = { + private def unexpectedExitStatusErrorStrings(events: List[Event], actions: List[Action]): List[String] = for { event <- events unexpectedStatusEvent <- Option(event.getUnexpectedExitStatus) @@ -87,33 +91,33 @@ class ErrorReporter(machineType: Option[String], labelTag = action.flatMap(actionLabelTag) inputNameTag = action.flatMap(actionLabelInputName) } yield unexpectedStatusErrorString(event, stderr, labelTag, inputNameTag) - } // It would probably be good to define a richer error structure than String, but right now that's what the backend interface expects - private def unexpectedStatusErrorString(event: Event, stderr: Option[String], labelTag: Option[String], inputNameTag: Option[String]) = { + private def unexpectedStatusErrorString(event: Event, + stderr: Option[String], + labelTag: Option[String], + inputNameTag: Option[String] + ) = labelTag.map("[" + _ + "] ").getOrElse("") + - inputNameTag.map("Input name: " + _ + " - ").getOrElse("") + + inputNameTag.map("Input name: " + _ + " - ").getOrElse("") + Option(event).flatMap(eventValue => Option(eventValue.getDescription)).getOrElse("") + stderr.map(": " + _).getOrElse("") - } // There may be one FailedEvent per operation with a summary error message - private def summaryFailure(events: List[Event]): Option[String] = { + private def summaryFailure(events: List[Event]): Option[String] = events.collectFirst { case event if event.getFailed != null => event.getFailed.getCause } - } // Try to find the stderr for the given action ID - private def stderrForAction(events: List[Event])(actionId: Integer) = { + private def stderrForAction(events: List[Event])(actionId: Integer) = events.collectFirst { - case event if event.getContainerStopped != null && event.getContainerStopped.getActionId == actionId => event.getContainerStopped.getStderr + case event if event.getContainerStopped != null && event.getContainerStopped.getActionId == actionId => + event.getContainerStopped.getStderr } - } - private def actionLabelValue(action: Action, k: String): Option[String] = { + private def actionLabelValue(action: Action, k: String): Option[String] = Option(action).flatMap(actionValue => Option(actionValue.getLabels)).map(_.asScala).flatMap(_.get(k)) - } private def actionLabelTag(action: Action) = actionLabelValue(action, Key.Tag) private def actionLabelInputName(action: Action) = actionLabelValue(action, Key.InputName) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala index 5bcb9c812f4..0473aecaa36 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala @@ -10,7 +10,13 @@ import common.validation.Validation._ import cromwell.backend.google.pipelines.common.action.ActionLabels._ import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager._ import cromwell.backend.google.pipelines.common.api.RunStatus -import cromwell.backend.google.pipelines.common.api.RunStatus.{AwaitingCloudQuota, Initializing, Running, Success, UnsuccessfulRunStatus} +import cromwell.backend.google.pipelines.common.api.RunStatus.{ + AwaitingCloudQuota, + Initializing, + Running, + Success, + UnsuccessfulRunStatus +} import cromwell.backend.google.pipelines.v2beta.PipelinesConversions._ import cromwell.backend.google.pipelines.v2beta.api.Deserialization._ import cromwell.backend.google.pipelines.v2beta.api.request.ErrorReporter._ @@ -22,30 +28,39 @@ import org.apache.commons.lang3.exception.ExceptionUtils import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.language.postfixOps -import scala.util.{Failure, Try, Success => TrySuccess} +import scala.util.{Failure, Success => TrySuccess, Try} trait GetRequestHandler { this: RequestHandler => // the Genomics batch endpoint doesn't seem to be able to handle get requests on V2 operations at the moment // For now, don't batch the request and execute it on its own - def handleRequest(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = Future(pollingRequest.httpRequest.execute()) map { + def handleRequest(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit + ec: ExecutionContext + ): Future[Try[Unit]] = Future(pollingRequest.httpRequest.execute()) map { case response if response.isSuccessStatusCode => val operation = response.parseAs(classOf[Operation]) pollingRequest.requester ! interpretOperationStatus(operation, pollingRequest) TrySuccess(()) case response => val failure = Try(GoogleJsonError.parse(GoogleAuthMode.jsonFactory, response)) match { - case TrySuccess(googleError) => new SystemPAPIApiException(GoogleJsonException(googleError, response.getHeaders)) - case Failure(_) => new SystemPAPIApiException(new RuntimeException(s"Failed to get status for operation ${pollingRequest.jobId.jobId}: HTTP Status Code: ${response.getStatusCode}")) + case TrySuccess(googleError) => + new SystemPAPIApiException(GoogleJsonException(googleError, response.getHeaders)) + case Failure(_) => + new SystemPAPIApiException( + new RuntimeException( + s"Failed to get status for operation ${pollingRequest.jobId.jobId}: HTTP Status Code: ${response.getStatusCode}" + ) + ) } pollingManager ! PipelinesApiStatusQueryFailed(pollingRequest, failure) Failure(failure) - } recover { - case e => - pollingManager ! PipelinesApiStatusQueryFailed(pollingRequest, new SystemPAPIApiException(e)) - Failure(e) + } recover { case e => + pollingManager ! PipelinesApiStatusQueryFailed(pollingRequest, new SystemPAPIApiException(e)) + Failure(e) } - private [request] def interpretOperationStatus(operation: Operation, pollingRequest: PAPIStatusPollRequest): RunStatus = { + private[request] def interpretOperationStatus(operation: Operation, + pollingRequest: PAPIStatusPollRequest + ): RunStatus = if (Option(operation).isEmpty) { // It is possible to receive a null via an HTTP 200 with no response. If that happens, handle it and don't crash. // https://github.com/googleapis/google-http-java-client/blob/v1.28.0/google-http-client/src/main/java/com/google/api/client/http/HttpResponse.java#L456-L458 @@ -125,16 +140,20 @@ trait GetRequestHandler { this: RequestHandler => ) } } - } - private def getEventList(metadata: Map[String, AnyRef], events: List[Event], actions: List[Action]): List[ExecutionEvent] = { - val starterEvent: Option[ExecutionEvent] = { - metadata.get("createTime") map { time => ExecutionEvent("waiting for quota", OffsetDateTime.parse(time.toString)) } - } + private def getEventList(metadata: Map[String, AnyRef], + events: List[Event], + actions: List[Action] + ): List[ExecutionEvent] = { + val starterEvent: Option[ExecutionEvent] = + metadata.get("createTime") map { time => + ExecutionEvent("waiting for quota", OffsetDateTime.parse(time.toString)) + } - val completionEvent: Option[ExecutionEvent] = { - metadata.get("endTime") map { time => ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse(time.toString)) } - } + val completionEvent: Option[ExecutionEvent] = + metadata.get("endTime") map { time => + ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse(time.toString)) + } // Map action indexes to event types. Action indexes are 1-based for some reason. // BA-6455: since v2beta version of Life Sciences API, `a.getLabels` would return `null` for empty labels, unlike @@ -161,7 +180,10 @@ trait GetRequestHandler { this: RequestHandler => val filteredExecutionEvents = startDelocalization match { case None => executionEvents // Can't do filtering without a start time for Delocalization. case Some(start) => - executionEvents filterNot { e => (e.name.startsWith("Started pulling ") || e.name.startsWith("Stopped pulling ")) && e.offsetDateTime.compareTo(start.offsetDateTime) > 0 } + executionEvents filterNot { e => + (e.name.startsWith("Started pulling ") || e.name.startsWith("Stopped pulling ")) && e.offsetDateTime + .compareTo(start.offsetDateTime) > 0 + } } starterEvent.toList ++ filteredExecutionEvents ++ completionEvent @@ -172,7 +194,7 @@ trait GetRequestHandler { this: RequestHandler => // "metrics": [ // "CPUS" // ] - private def isQuotaDelayed(events: List[Event]): Boolean = { + private def isQuotaDelayed(events: List[Event]): Boolean = events.sortBy(_.getTimestamp).reverse.headOption match { case Some(event) => quotaMessages.exists(event.getDescription.contains) @@ -180,7 +202,6 @@ trait GetRequestHandler { this: RequestHandler => // If the events list is empty, we're not waiting for quota yet false } - } private val quotaMessages = List( "A resource limit has delayed the operation", diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala index 523556e378d..a8eccb027f7 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala @@ -5,7 +5,11 @@ import com.google.api.client.googleapis.batch.BatchRequest import com.google.api.client.googleapis.json.GoogleJsonError import com.google.api.services.lifesciences.v2beta.CloudLifeSciences import cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes.BatchRequestTimeoutConfiguration -import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{PAPIAbortRequest, PAPIRunCreationRequest, PAPIStatusPollRequest} +import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestManager.{ + PAPIAbortRequest, + PAPIRunCreationRequest, + PAPIStatusPollRequest +} import cromwell.backend.google.pipelines.common.api.{PipelinesApiRequestHandler, PipelinesApiRequestManager} import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import org.slf4j.{Logger, LoggerFactory} @@ -21,18 +25,18 @@ object RequestHandler { class RequestHandler(applicationName: String, endpointUrl: URL, - batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration) - extends PipelinesApiRequestHandler - with RunRequestHandler - with GetRequestHandler - with AbortRequestHandler { + batchRequestTimeoutConfiguration: BatchRequestTimeoutConfiguration +) extends PipelinesApiRequestHandler + with RunRequestHandler + with GetRequestHandler + with AbortRequestHandler { override def makeBatchRequest: BatchRequest = { val builder = new CloudLifeSciences.Builder( GoogleAuthMode.httpTransport, GoogleAuthMode.jsonFactory, - initializeHttpRequest(batchRequestTimeoutConfiguration) _, + initializeHttpRequest(batchRequestTimeoutConfiguration) _ ) .setApplicationName(applicationName) .setRootUrl(endpointUrl.toString) @@ -42,9 +46,8 @@ class RequestHandler(applicationName: String, override def enqueue[T <: PipelinesApiRequestManager.PAPIApiRequest](papiApiRequest: T, batchRequest: BatchRequest, - pollingManager: ActorRef) - (implicit ec: ExecutionContext) - : Future[Try[Unit]] = papiApiRequest match { + pollingManager: ActorRef + )(implicit ec: ExecutionContext): Future[Try[Unit]] = papiApiRequest match { case create: PAPIRunCreationRequest => handleRequest(create, batchRequest, pollingManager) case status: PAPIStatusPollRequest => handleRequest(status, batchRequest, pollingManager) case abort: PAPIAbortRequest => handleRequest(abort, batchRequest, pollingManager) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RunRequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RunRequestHandler.scala index ca815944d77..9de5a91d85c 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RunRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RunRequestHandler.scala @@ -14,7 +14,10 @@ import scala.concurrent.{Future, Promise} import scala.util.{Failure, Success, Try} trait RunRequestHandler { this: RequestHandler => - private def runCreationResultHandler(originalRequest: PAPIApiRequest, completionPromise: Promise[Try[Unit]], pollingManager: ActorRef) = new JsonBatchCallback[Operation] { + private def runCreationResultHandler(originalRequest: PAPIApiRequest, + completionPromise: Promise[Try[Unit]], + pollingManager: ActorRef + ) = new JsonBatchCallback[Operation] { override def onSuccess(operation: Operation, responseHeaders: HttpHeaders): Unit = { originalRequest.requester ! getJob(operation) completionPromise.trySuccess(Success(())) @@ -41,17 +44,23 @@ trait RunRequestHandler { this: RequestHandler => } } - def handleRequest(runCreationQuery: PAPIRunCreationRequest, batch: BatchRequest, pollingManager: ActorRef): Future[Try[Unit]] = { + def handleRequest(runCreationQuery: PAPIRunCreationRequest, + batch: BatchRequest, + pollingManager: ActorRef + ): Future[Try[Unit]] = { val completionPromise = Promise[Try[Unit]]() val resultHandler = runCreationResultHandler(runCreationQuery, completionPromise, pollingManager) addRunCreationToBatch(runCreationQuery.httpRequest, batch, resultHandler) completionPromise.future } - private def addRunCreationToBatch(request: HttpRequest, batch: BatchRequest, resultHandler: JsonBatchCallback[Operation]): Unit = { + private def addRunCreationToBatch(request: HttpRequest, + batch: BatchRequest, + resultHandler: JsonBatchCallback[Operation] + ): Unit = { /* - * Manually enqueue the request instead of doing it through the RunPipelineRequest - * as it would unnecessarily rebuild the request (which we already have) + * Manually enqueue the request instead of doing it through the RunPipelineRequest + * as it would unnecessarily rebuild the request (which we already have) */ batch.queue(request, classOf[Operation], classOf[GoogleJsonErrorContainer], resultHandler) () diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala index e7189d453ad..97a06617721 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala @@ -18,8 +18,11 @@ import org.scalatest.matchers.should.Matchers import scala.concurrent.duration.DurationInt -class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers - with MockSugar { +class PipelinesApiAsyncBackendJobExecutionActorSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with MockSugar { behavior of "PipelinesParameterConversions" it should "group files by bucket" in { @@ -47,8 +50,8 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Cro val expected = Map("foo" -> (NonEmptyList.of(0, 1) map inputs.apply)) ++ - Map("bar" -> (NonEmptyList.of(2, 3, 4) map inputs.apply)) ++ - Map("baz" -> NonEmptyList.of(inputs(5))) + Map("bar" -> (NonEmptyList.of(2, 3, 4) map inputs.apply)) ++ + Map("baz" -> NonEmptyList.of(inputs(5))) PipelinesApiAsyncBackendJobExecutionActor.groupParametersByGcsBucket(inputs) shouldEqual expected } @@ -65,11 +68,16 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Cro val fakeCredentials = NoCredentials.getInstance val drsReadInterpreter: DrsReadInterpreter = (_, _) => - throw new UnsupportedOperationException("PipelinesApiAsyncBackendJobExecutionActorSpec doesn't need to use drs read interpreter.") + throw new UnsupportedOperationException( + "PipelinesApiAsyncBackendJobExecutionActorSpec doesn't need to use drs read interpreter." + ) DrsPathBuilder( - new DrsCloudNioFileSystemProvider(drsResolverConfig, GoogleOauthDrsCredentials(fakeCredentials, 1.minutes), drsReadInterpreter), - None, + new DrsCloudNioFileSystemProvider(drsResolverConfig, + GoogleOauthDrsCredentials(fakeCredentials, 1.minutes), + drsReadInterpreter + ), + None ) } @@ -82,7 +90,10 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Cro } val nonDrsInput: PipelinesApiFileInput = PipelinesApiFileInput("nnn", - DefaultPathBuilder.get("/local/nnn.bai"), DefaultPathBuilder.get("/path/to/nnn.bai"), mount) + DefaultPathBuilder.get("/local/nnn.bai"), + DefaultPathBuilder.get("/path/to/nnn.bai"), + mount + ) val inputs = List( makeDrsInput("aaa", "drs://drs.example.org/aaa", "path/to/aaa.bai"), diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala index 44abde7862c..233f168a257 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala @@ -19,36 +19,38 @@ class ActionBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ("description", "action", "command"), ("a cloud sdk action", ActionBuilder.cloudSdkAction, s"docker run ${ActionUtils.CloudSdkImage}"), ("a cloud sdk action with args", - ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello").asJava), - s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello" + ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello").asJava), + s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello" ), ("a cloud sdk action with quotes in the args", - ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello m'lord").asJava), - s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\ m\\'lord" + ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello m'lord").asJava), + s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\ m\\'lord" ), ("a cloud sdk action with a newline in the args", - ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello\\\nworld").asJava), - s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\\\world" + ActionBuilder.cloudSdkAction.setCommands(List("bash", "-c", "echo hello\\\nworld").asJava), + s"docker run ${ActionUtils.CloudSdkImage} bash -c echo\\ hello\\\\world" ), ("an action with multiple args", - new Action() - .setImageUri("ubuntu") - .setEnvironment(Map("ENV" -> "dev").asJava) - .setEntrypoint("") - .setCommands(List("bash", "-c", "echo hello").asJava) - .setPublishExposedPorts(true) - .setAlwaysRun(true) - .setMounts(List( - new Mount().setDisk("read-only-disk").setPath("/read/only/container").setReadOnly(true), - new Mount().setDisk("read-write-disk").setPath("/read/write/container"), - ).asJava) - .setContainerName("my_container_name") - .setPidNamespace("host") - .setPortMappings(Map("8008" -> Int.box(8000)).asJava), - "docker run --name my_container_name" + - " -v /mnt/read-only-disk:/read/only/container:ro -v /mnt/read-write-disk:/read/write/container" + - " -e ENV:dev --pid=host -P -p 8008:8000 --entrypoint= ubuntu bash -c echo\\ hello" - ), + new Action() + .setImageUri("ubuntu") + .setEnvironment(Map("ENV" -> "dev").asJava) + .setEntrypoint("") + .setCommands(List("bash", "-c", "echo hello").asJava) + .setPublishExposedPorts(true) + .setAlwaysRun(true) + .setMounts( + List( + new Mount().setDisk("read-only-disk").setPath("/read/only/container").setReadOnly(true), + new Mount().setDisk("read-write-disk").setPath("/read/write/container") + ).asJava + ) + .setContainerName("my_container_name") + .setPidNamespace("host") + .setPortMappings(Map("8008" -> Int.box(8000)).asJava), + "docker run --name my_container_name" + + " -v /mnt/read-only-disk:/read/only/container:ro -v /mnt/read-write-disk:/read/write/container" + + " -e ENV:dev --pid=host -P -p 8008:8000 --entrypoint= ubuntu bash -c echo\\ hello" + ) ) forAll(dockerRunActions) { (description, action, command) => @@ -59,12 +61,11 @@ class ActionBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche private val memoryRetryExpectedEntrypoint = "/bin/sh" - def memoryRetryExpectedCommand(lookupString: String): util.List[String] = { + def memoryRetryExpectedCommand(lookupString: String): util.List[String] = List( "-c", s"grep -E -q '$lookupString' /cromwell_root/stderr ; echo $$? > /cromwell_root/memory_retry_rc" ).asJava - } val mounts = List(new Mount().setDisk("read-only-disk").setPath("/read/only/container")) private val memoryRetryActionExpectedLabels = Map(Key.Tag -> Value.RetryWithMoreMemory).asJava diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala index 257fb094b0d..4356c5f0418 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala @@ -19,7 +19,7 @@ class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match mock[Path], mock[com.google.api.services.storage.Storage], mock[com.google.cloud.storage.Storage], - "my-project", + "my-project" ) val recovered = recoverRequesterPaysError(path) { flag => s"flag is $flag" @@ -46,21 +46,23 @@ class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } it should "use GcsTransferConfiguration to set the number of localization retries" in { - implicit val gcsTransferConfiguration: GcsTransferConfiguration = GcsTransferConfiguration( - transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") - retry("I'm very flaky") shouldBe """for i in $(seq 31380); do - | ( - | I'm very flaky - | ) - | RC=$? - | if [ "$RC" = "0" ]; then - | break - | fi - | if [ $i -lt 31380 ]; then - | printf '%s %s\n' "$(date -u '+%Y/%m/%d %H:%M:%S')" Waiting\ 5\ seconds\ and\ retrying - | sleep 5 - | fi - |done - |exit "$RC"""".stripMargin + implicit val gcsTransferConfiguration: GcsTransferConfiguration = + GcsTransferConfiguration(transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") + retry( + "I'm very flaky" + ) shouldBe """for i in $(seq 31380); do + | ( + | I'm very flaky + | ) + | RC=$? + | if [ "$RC" = "0" ]; then + | break + | fi + | if [ $i -lt 31380 ]; then + | printf '%s %s\n' "$(date -u '+%Y/%m/%d %H:%M:%S')" Waiting\ 5\ seconds\ and\ retrying + | sleep 5 + | fi + |done + |exit "$RC"""".stripMargin } } diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala index 6afd75adbd6..c2e8920301b 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala @@ -37,7 +37,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc "portMappings" -> Map( "8000" -> Integer.valueOf(8008) ).asJava - ).asJava, + ).asJava ).asJava ).asJava ) @@ -118,7 +118,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc "resources" -> Map[String, Object]( "projectId" -> "project", "virtualMachine" -> Map( - "machineType" -> "custom-1-1024", + "machineType" -> "custom-1-1024" ).asJava ).asJava ).asJava @@ -149,15 +149,20 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ) ).asJava - val metadataMapStarted = makeMetadata(Map[String, Object]( - "workerAssigned" -> Map( - "zone" -> "event 1 Zone", - "instance" -> "event 1 Instance" - ).asJava)) + val metadataMapStarted = makeMetadata( + Map[String, Object]( + "workerAssigned" -> Map( + "zone" -> "event 1 Zone", + "instance" -> "event 1 Instance" + ).asJava + ) + ) val metadataMapNotStarted = makeMetadata(Map.empty) - val metadataMapNotStarted2 = makeMetadata(Map[String, Object]( - "containerStarted" -> Map().asJava - )) + val metadataMapNotStarted2 = makeMetadata( + Map[String, Object]( + "containerStarted" -> Map().asJava + ) + ) operation.setMetadata(metadataMapStarted) operation.hasStarted shouldBe true @@ -170,8 +175,8 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "deserialize big decimals correctly" in { val valueMap = Map[String, Object]( "integerValue" -> BigDecimal(5), - "doubleValue" -> BigDecimal.decimal(6D), - "floatValue" -> BigDecimal.decimal(7F), + "doubleValue" -> BigDecimal.decimal(6d), + "floatValue" -> BigDecimal.decimal(7f), "longValue" -> BigDecimal.decimal(8L) ).asJava @@ -179,8 +184,8 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc deserialized match { case Success(deserializedSuccess) => deserializedSuccess.integerValue shouldBe 5 - deserializedSuccess.doubleValue shouldBe 6D - deserializedSuccess.floatValue shouldBe 7F + deserializedSuccess.doubleValue shouldBe 6d + deserializedSuccess.floatValue shouldBe 7f deserializedSuccess.longValue shouldBe 8L case Failure(f) => fail("Bad deserialization", f) diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/LocalizationSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/LocalizationSpec.scala index f815ceebd14..2ddf991d0f7 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/LocalizationSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/LocalizationSpec.scala @@ -25,11 +25,12 @@ class LocalizationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher action.get("commands") should be(a[java.util.List[_]]) action.get("commands").asInstanceOf[java.util.List[_]] should contain theSameElementsAs List( - "-m", manifestPathString + "-m", + manifestPathString ) action.get("mounts") should be(a[java.util.List[_]]) - action.get("mounts").asInstanceOf[java.util.List[_]] should be (empty) + action.get("mounts").asInstanceOf[java.util.List[_]] should be(empty) action.get("imageUri") should be("somerepo/drs-downloader:tagged") @@ -52,11 +53,14 @@ class LocalizationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher action.get("commands") should be(a[java.util.List[_]]) action.get("commands").asInstanceOf[java.util.List[_]] should contain theSameElementsAs List( - "-m", manifestPathString, "-r", requesterPaysProjectId + "-m", + manifestPathString, + "-r", + requesterPaysProjectId ) action.get("mounts") should be(a[java.util.List[_]]) - action.get("mounts").asInstanceOf[java.util.List[_]] should be (empty) + action.get("mounts").asInstanceOf[java.util.List[_]] should be(empty) action.get("imageUri") should be("somerepo/drs-downloader:tagged") diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandlerSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandlerSpec.scala index 802a7b68106..7f0ae47135f 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandlerSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandlerSpec.scala @@ -25,7 +25,7 @@ class GetRequestHandlerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma private val requestHandler: GetRequestHandler = new RequestHandler( "GetRequestHandlerSpec", new URL("file:///getrequesthandlerspec"), - BatchRequestTimeoutConfiguration(None, None), + BatchRequestTimeoutConfiguration(None, None) ) private val workflowId = WorkflowId.randomId() @@ -37,623 +37,656 @@ class GetRequestHandlerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma private val interpretedStatus = Table( ("description", "json", "status"), - ("parse null operation json", null, UnsuccessfulRunStatus( - Status.UNKNOWN, - Option("Operation returned as empty"), - Nil, - None, - None, - None, - wasPreemptible = false - )), + ("parse null operation json", + null, + UnsuccessfulRunStatus( + Status.UNKNOWN, + Option("Operation returned as empty"), + Nil, + None, + None, + None, + wasPreemptible = false + ) + ), ("parse empty operation json", "{}", Initializing), ("parse error operation json without resources", - """|{ - | "done": true, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json without virtualMachine", - """|{ - | "done": true, - | "resources": { - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "resources": { + | }, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json without preemptible", - """|{ - | "done": true, - | "resources": { - | "virtualMachine": { - | } - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "resources": { + | "virtualMachine": { + | } + | }, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json with preemptible true", - """|{ - | "done": true, - | "resources": { - | "virtualMachine": { - | "preemptible": true - | } - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + """|{ + | "done": true, + | "resources": { + | "virtualMachine": { + | "preemptible": true + | } + | }, + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) ), ("parse error operation json with preemptible false", - """|{ - | "done": true, - | "resources": { - | "virtualMachine": { - | "preemptible": false - | } - | }, - | "error": {} - |} - |""".stripMargin, - Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) - ), - ("check that we classify error code 10 as a preemption on a preemptible VM", - """{ + """|{ | "done": true, - | "error": { - | "code": 10, - | "message": "The assigned worker has failed to complete the operation" - | }, - | "metadata": { - | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", - | "createTime": "2019-08-18T12:04:38.082650Z", - | "endTime": "2019-08-18T15:58:26.659602622Z", - | "events": [], - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "pipeline": { - | "actions": [], - | "environment": {}, - | "resources": { - | "projectId": "", - | "regions": [], - | "virtualMachine": { - | "accelerators": [], - | "bootDiskSizeGb": 11, - | "bootImage": "asdfasdf", - | "cpuPlatform": "", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 41, - | "sourceImage": "", - | "type": "pd-standard" - | } - | ], - | "enableStackdriverMonitoring": false, - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "goog-pipelines-worker": "true", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "machineType": "custom-2-7168", - | "network": { - | "name": "", - | "subnetwork": "", - | "usePrivateAddress": false - | }, - | "nvidiaDriverVersion": "", - | "preemptible": true, - | "serviceAccount": { - | "email": "default", - | "scopes": [ - | "https://www.googleapis.com/auth/genomics", - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | } - | }, - | "zones": [ - | "us-central1-a", - | "us-central1-b", - | "us-east1-d", - | "us-central1-c", - | "us-central1-f", - | "us-east1-c" - | ] - | }, - | "timeout": "604800s" - | }, - | "startTime": "2019-08-18T12:04:39.192909594Z" + | "resources": { + | "virtualMachine": { + | "preemptible": false + | } | }, - | "name": "asdfasdf" - |}""".stripMargin, - Preempted( - Status.ABORTED, - None, - Nil, - List( - ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"),None), - ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"),None), - ), - Some("custom-2-7168"), - None, - None) + | "error": {} + |} + |""".stripMargin, + Failed(Status.UNAVAILABLE, None, Nil, Nil, None, None, None) + ), + ("check that we classify error code 10 as a preemption on a preemptible VM", + """{ + | "done": true, + | "error": { + | "code": 10, + | "message": "The assigned worker has failed to complete the operation" + | }, + | "metadata": { + | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", + | "createTime": "2019-08-18T12:04:38.082650Z", + | "endTime": "2019-08-18T15:58:26.659602622Z", + | "events": [], + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "pipeline": { + | "actions": [], + | "environment": {}, + | "resources": { + | "projectId": "", + | "regions": [], + | "virtualMachine": { + | "accelerators": [], + | "bootDiskSizeGb": 11, + | "bootImage": "asdfasdf", + | "cpuPlatform": "", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 41, + | "sourceImage": "", + | "type": "pd-standard" + | } + | ], + | "enableStackdriverMonitoring": false, + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "goog-pipelines-worker": "true", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "machineType": "custom-2-7168", + | "network": { + | "name": "", + | "subnetwork": "", + | "usePrivateAddress": false + | }, + | "nvidiaDriverVersion": "", + | "preemptible": true, + | "serviceAccount": { + | "email": "default", + | "scopes": [ + | "https://www.googleapis.com/auth/genomics", + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | } + | }, + | "zones": [ + | "us-central1-a", + | "us-central1-b", + | "us-east1-d", + | "us-central1-c", + | "us-central1-f", + | "us-east1-c" + | ] + | }, + | "timeout": "604800s" + | }, + | "startTime": "2019-08-18T12:04:39.192909594Z" + | }, + | "name": "asdfasdf" + |}""".stripMargin, + Preempted( + Status.ABORTED, + None, + Nil, + List( + ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"), None), + ExecutionEvent("Complete in GCE / Cromwell Poll Interval", + OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"), + None + ) + ), + Some("custom-2-7168"), + None, + None + ) ), ("check that we classify error code 10 as a failure on a non-preemptible VM", - """{ - | "done": true, - | "error": { - | "code": 10, - | "message": "The assigned worker has failed to complete the operation" - | }, - | "metadata": { - | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", - | "createTime": "2019-08-18T12:04:38.082650Z", - | "endTime": "2019-08-18T15:58:26.659602622Z", - | "events": [], - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "pipeline": { - | "actions": [], - | "environment": {}, - | "resources": { - | "projectId": "", - | "regions": [], - | "virtualMachine": { - | "accelerators": [], - | "bootDiskSizeGb": 11, - | "bootImage": "asdfasdf", - | "cpuPlatform": "", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 41, - | "sourceImage": "", - | "type": "pd-standard" - | } - | ], - | "enableStackdriverMonitoring": false, - | "labels": { - | "cromwell-sub-workflow-name": "bamtocram", - | "cromwell-workflow-id": "asdfasdf", - | "goog-pipelines-worker": "true", - | "wdl-call-alias": "validatecram", - | "wdl-task-name": "validatesamfile" - | }, - | "machineType": "custom-2-7168", - | "network": { - | "name": "", - | "subnetwork": "", - | "usePrivateAddress": false - | }, - | "nvidiaDriverVersion": "", - | "preemptible": false, - | "serviceAccount": { - | "email": "default", - | "scopes": [ - | "https://www.googleapis.com/auth/genomics", - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | } - | }, - | "zones": [ - | "us-central1-a", - | "us-central1-b", - | "us-east1-d", - | "us-central1-c", - | "us-central1-f", - | "us-east1-c" - | ] - | }, - | "timeout": "604800s" - | }, - | "startTime": "2019-08-18T12:04:39.192909594Z" - | }, - | "name": "asdfasdf" - |}""".stripMargin, - Failed( - Status.ABORTED, - None, - Nil, - List( - ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"),None), - ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"),None), - ), - Some("custom-2-7168"), - None, - None - ) + """{ + | "done": true, + | "error": { + | "code": 10, + | "message": "The assigned worker has failed to complete the operation" + | }, + | "metadata": { + | "@type": "type.googleapis.com/google.genomics.v2alpha1.Metadata", + | "createTime": "2019-08-18T12:04:38.082650Z", + | "endTime": "2019-08-18T15:58:26.659602622Z", + | "events": [], + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "pipeline": { + | "actions": [], + | "environment": {}, + | "resources": { + | "projectId": "", + | "regions": [], + | "virtualMachine": { + | "accelerators": [], + | "bootDiskSizeGb": 11, + | "bootImage": "asdfasdf", + | "cpuPlatform": "", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 41, + | "sourceImage": "", + | "type": "pd-standard" + | } + | ], + | "enableStackdriverMonitoring": false, + | "labels": { + | "cromwell-sub-workflow-name": "bamtocram", + | "cromwell-workflow-id": "asdfasdf", + | "goog-pipelines-worker": "true", + | "wdl-call-alias": "validatecram", + | "wdl-task-name": "validatesamfile" + | }, + | "machineType": "custom-2-7168", + | "network": { + | "name": "", + | "subnetwork": "", + | "usePrivateAddress": false + | }, + | "nvidiaDriverVersion": "", + | "preemptible": false, + | "serviceAccount": { + | "email": "default", + | "scopes": [ + | "https://www.googleapis.com/auth/genomics", + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | } + | }, + | "zones": [ + | "us-central1-a", + | "us-central1-b", + | "us-east1-d", + | "us-central1-c", + | "us-central1-f", + | "us-east1-c" + | ] + | }, + | "timeout": "604800s" + | }, + | "startTime": "2019-08-18T12:04:39.192909594Z" + | }, + | "name": "asdfasdf" + |}""".stripMargin, + Failed( + Status.ABORTED, + None, + Nil, + List( + ExecutionEvent("waiting for quota", OffsetDateTime.parse("2019-08-18T12:04:38.082650Z"), None), + ExecutionEvent("Complete in GCE / Cromwell Poll Interval", + OffsetDateTime.parse("2019-08-18T15:58:26.659602622Z"), + None + ) + ), + Some("custom-2-7168"), + None, + None + ) ), // As of 2022-01 the zone `us-west3` in `broad-dsde-cromwell-dev` has its CPU quota purposely de-rated to 1 for testing ("check that a job is AwaitingCloudQuota if its most recent event is quota exhaustion", - """{ - | "metadata": { - | "@type": "type.googleapis.com/google.cloud.lifesciences.v2beta.Metadata", - | "createTime": "2022-01-19T21:53:55.138960Z", - | "events": [ - | { - | "delayed": { - | "cause": "generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", - | "metrics": [ - | "CPUS" - | ] - | }, - | "description": "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", - | "timestamp": "2022-01-19T21:54:07.717679160Z" - | } - | ], - | "labels": { - | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", - | "wdl-task-name": "sleep" - | }, - | "pipeline": { - | "actions": [ - | { - | "commands": [ - | "-c", - | "printf '%s %s\\n' \"$(date -u '+%Y/%m/%d %H:%M:%S')\" Starting\\ container\\ setup." - | ], - | "entrypoint": "/bin/sh", - | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", - | "labels": { - | "logging": "ContainerSetup" - | }, - | "timeout": "300s" - | } - | ], - | "environment": { - | "MEM_SIZE": "2.0", - | "MEM_UNIT": "GB" - | }, - | "resources": { - | "virtualMachine": { - | "bootDiskSizeGb": 12, - | "bootImage": "projects/cos-cloud/global/images/family/cos-stable", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 10, - | "type": "pd-ssd" - | } - | ], - | "labels": { - | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", - | "goog-pipelines-worker": "true", - | "wdl-task-name": "sleep" - | }, - | "machineType": "custom-1-2048", - | "network": {}, - | "nvidiaDriverVersion": "450.51.06", - | "serviceAccount": { - | "email": "centaur@broad-dsde-cromwell-dev.iam.gserviceaccount.com", - | "scopes": [ - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/bigquery", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | }, - | "volumes": [ - | { - | "persistentDisk": { - | "sizeGb": 10, - | "type": "pd-ssd" - | }, - | "volume": "local-disk" - | } - | ] - | }, - | "zones": [ - | "us-west3-a", - | "us-west3-b", - | "us-west3-c" - | ] - | }, - | "timeout": "604800s" - | } - | }, - | "name": "projects/1005074806481/locations/us-central1/operations/3874882033889365536" - |}""".stripMargin, - AwaitingCloudQuota + """{ + | "metadata": { + | "@type": "type.googleapis.com/google.cloud.lifesciences.v2beta.Metadata", + | "createTime": "2022-01-19T21:53:55.138960Z", + | "events": [ + | { + | "delayed": { + | "cause": "generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + | "metrics": [ + | "CPUS" + | ] + | }, + | "description": "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + | "timestamp": "2022-01-19T21:54:07.717679160Z" + | } + | ], + | "labels": { + | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", + | "wdl-task-name": "sleep" + | }, + | "pipeline": { + | "actions": [ + | { + | "commands": [ + | "-c", + | "printf '%s %s\\n' \"$(date -u '+%Y/%m/%d %H:%M:%S')\" Starting\\ container\\ setup." + | ], + | "entrypoint": "/bin/sh", + | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", + | "labels": { + | "logging": "ContainerSetup" + | }, + | "timeout": "300s" + | } + | ], + | "environment": { + | "MEM_SIZE": "2.0", + | "MEM_UNIT": "GB" + | }, + | "resources": { + | "virtualMachine": { + | "bootDiskSizeGb": 12, + | "bootImage": "projects/cos-cloud/global/images/family/cos-stable", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 10, + | "type": "pd-ssd" + | } + | ], + | "labels": { + | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", + | "goog-pipelines-worker": "true", + | "wdl-task-name": "sleep" + | }, + | "machineType": "custom-1-2048", + | "network": {}, + | "nvidiaDriverVersion": "450.51.06", + | "serviceAccount": { + | "email": "centaur@broad-dsde-cromwell-dev.iam.gserviceaccount.com", + | "scopes": [ + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/bigquery", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | }, + | "volumes": [ + | { + | "persistentDisk": { + | "sizeGb": 10, + | "type": "pd-ssd" + | }, + | "volume": "local-disk" + | } + | ] + | }, + | "zones": [ + | "us-west3-a", + | "us-west3-b", + | "us-west3-c" + | ] + | }, + | "timeout": "604800s" + | } + | }, + | "name": "projects/1005074806481/locations/us-central1/operations/3874882033889365536" + |}""".stripMargin, + AwaitingCloudQuota ), ("check that a job is Running and no longer AwaitingCloudQuota once a worker assigns", - """{ - | "metadata": { - | "@type": "type.googleapis.com/google.cloud.lifesciences.v2beta.Metadata", - | "createTime": "2022-01-19T21:53:55.138960Z", - | "events": [ - | { - | "description": "Started pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"", - | "pullStarted": { - | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine" - | }, - | "timestamp": "2022-01-19T22:09:55.410251187Z" - | }, - | { - | "description": "Worker \"google-pipelines-worker-e6c8bf8035860b2cd69488497bd602d8\" assigned in \"us-west3-c\" on a \"custom-1-2048\" machine", - | "timestamp": "2022-01-19T22:09:20.363771714Z", - | "workerAssigned": { - | "instance": "google-pipelines-worker-e6c8bf8035860b2cd69488497bd602d8", - | "machineType": "custom-1-2048", - | "zone": "us-west3-c" - | } - | }, - | { - | "delayed": { - | "cause": "generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", - | "metrics": [ - | "CPUS" - | ] - | }, - | "description": "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", - | "timestamp": "2022-01-19T21:54:07.717679160Z" - | } - | ], - | "labels": { - | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", - | "wdl-task-name": "sleep" - | }, - | "pipeline": { - | "actions": [ - | { - | "commands": [ - | "-c", - | "printf '%s %s\\n' \"$(date -u '+%Y/%m/%d %H:%M:%S')\" Starting\\ container\\ setup." - | ], - | "entrypoint": "/bin/sh", - | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", - | "labels": { - | "logging": "ContainerSetup" - | }, - | "timeout": "300s" - | } - | ], - | "environment": { - | "MEM_SIZE": "2.0", - | "MEM_UNIT": "GB" - | }, - | "resources": { - | "virtualMachine": { - | "bootDiskSizeGb": 12, - | "bootImage": "projects/cos-cloud/global/images/family/cos-stable", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 10, - | "type": "pd-ssd" - | } - | ], - | "labels": { - | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", - | "goog-pipelines-worker": "true", - | "wdl-task-name": "sleep" - | }, - | "machineType": "custom-1-2048", - | "network": {}, - | "nvidiaDriverVersion": "450.51.06", - | "serviceAccount": { - | "email": "centaur@broad-dsde-cromwell-dev.iam.gserviceaccount.com", - | "scopes": [ - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/bigquery", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | }, - | "volumes": [ - | { - | "persistentDisk": { - | "sizeGb": 10, - | "type": "pd-ssd" - | }, - | "volume": "local-disk" - | } - | ] - | }, - | "zones": [ - | "us-west3-a", - | "us-west3-b", - | "us-west3-c" - | ] - | }, - | "timeout": "604800s" - | }, - | "startTime": "2022-01-19T22:09:20.363771714Z" - | }, - | "name": "projects/1005074806481/locations/us-central1/operations/3874882033889365536" - |} - | - | - |""".stripMargin, - Running + """{ + | "metadata": { + | "@type": "type.googleapis.com/google.cloud.lifesciences.v2beta.Metadata", + | "createTime": "2022-01-19T21:53:55.138960Z", + | "events": [ + | { + | "description": "Started pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"", + | "pullStarted": { + | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine" + | }, + | "timestamp": "2022-01-19T22:09:55.410251187Z" + | }, + | { + | "description": "Worker \"google-pipelines-worker-e6c8bf8035860b2cd69488497bd602d8\" assigned in \"us-west3-c\" on a \"custom-1-2048\" machine", + | "timestamp": "2022-01-19T22:09:20.363771714Z", + | "workerAssigned": { + | "instance": "google-pipelines-worker-e6c8bf8035860b2cd69488497bd602d8", + | "machineType": "custom-1-2048", + | "zone": "us-west3-c" + | } + | }, + | { + | "delayed": { + | "cause": "generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + | "metrics": [ + | "CPUS" + | ] + | }, + | "description": "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + | "timestamp": "2022-01-19T21:54:07.717679160Z" + | } + | ], + | "labels": { + | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", + | "wdl-task-name": "sleep" + | }, + | "pipeline": { + | "actions": [ + | { + | "commands": [ + | "-c", + | "printf '%s %s\\n' \"$(date -u '+%Y/%m/%d %H:%M:%S')\" Starting\\ container\\ setup." + | ], + | "entrypoint": "/bin/sh", + | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", + | "labels": { + | "logging": "ContainerSetup" + | }, + | "timeout": "300s" + | } + | ], + | "environment": { + | "MEM_SIZE": "2.0", + | "MEM_UNIT": "GB" + | }, + | "resources": { + | "virtualMachine": { + | "bootDiskSizeGb": 12, + | "bootImage": "projects/cos-cloud/global/images/family/cos-stable", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 10, + | "type": "pd-ssd" + | } + | ], + | "labels": { + | "cromwell-workflow-id": "cromwell-ac888b4e-2e6b-4dcc-a537-3c6db7764037", + | "goog-pipelines-worker": "true", + | "wdl-task-name": "sleep" + | }, + | "machineType": "custom-1-2048", + | "network": {}, + | "nvidiaDriverVersion": "450.51.06", + | "serviceAccount": { + | "email": "centaur@broad-dsde-cromwell-dev.iam.gserviceaccount.com", + | "scopes": [ + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/bigquery", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | }, + | "volumes": [ + | { + | "persistentDisk": { + | "sizeGb": 10, + | "type": "pd-ssd" + | }, + | "volume": "local-disk" + | } + | ] + | }, + | "zones": [ + | "us-west3-a", + | "us-west3-b", + | "us-west3-c" + | ] + | }, + | "timeout": "604800s" + | }, + | "startTime": "2022-01-19T22:09:20.363771714Z" + | }, + | "name": "projects/1005074806481/locations/us-central1/operations/3874882033889365536" + |} + | + | + |""".stripMargin, + Running ), ("check that a job is no longer AwaitingCloudQuota once it finishes", - """{ - | "done": true, - | "metadata": { - | "@type": "type.googleapis.com/google.cloud.lifesciences.v2beta.Metadata", - | "createTime": "2022-01-19T19:17:13.175579Z", - | "endTime": "2022-01-19T19:37:22.764120036Z", - | "events": [ - | { - | "description": "Worker released", - | "timestamp": "2022-01-19T19:37:22.764120036Z", - | "workerReleased": { - | "instance": "google-pipelines-worker-8eff543e6858c204c8f67520aee75432", - | "zone": "us-west3-c" - | } - | }, - | { - | "containerStopped": { - | "actionId": 19 - | }, - | "description": "Stopped running shortened for test", - | "timestamp": "2022-01-19T19:37:19.822873814Z" - | }, - | { - | "description": "Started pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"", - | "pullStarted": { - | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine" - | }, - | "timestamp": "2022-01-19T19:32:55.709674372Z" - | }, - | { - | "description": "Worker \"google-pipelines-worker-8eff543e6858c204c8f67520aee75432\" assigned in \"us-west3-c\" on a \"custom-1-2048\" machine", - | "timestamp": "2022-01-19T19:32:19.204055448Z", - | "workerAssigned": { - | "instance": "google-pipelines-worker-8eff543e6858c204c8f67520aee75432", - | "machineType": "custom-1-2048", - | "zone": "us-west3-c" - | } - | }, - | { - | "delayed": { - | "cause": "generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", - | "metrics": [ - | "CPUS" - | ] - | }, - | "description": "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", - | "timestamp": "2022-01-19T19:17:14.948193837Z" - | } - | ], - | "labels": { - | "cromwell-workflow-id": "cromwell-058bff35-4a55-4c0f-9113-0885f4119cd9", - | "wdl-task-name": "sleep" - | }, - | "pipeline": { - | "actions": [ - | { - | "commands": [ - | "-c", - | "printf '%s %s\\n' \"$(date -u '+%Y/%m/%d %H:%M:%S')\" Starting\\ container\\ setup." - | ], - | "entrypoint": "/bin/sh", - | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", - | "labels": { - | "logging": "ContainerSetup" - | }, - | "timeout": "300s" - | }, - | { - | "alwaysRun": true, - | "commands": [ - | "-c", - | "python3 -c 'import base64; shortened for test" - | ], - | "entrypoint": "/bin/sh", - | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", - | "labels": { - | "tag": "Delocalization" - | } - | } - | ], - | "environment": { - | "MEM_SIZE": "2.0", - | "MEM_UNIT": "GB" - | }, - | "resources": { - | "virtualMachine": { - | "bootDiskSizeGb": 12, - | "bootImage": "projects/cos-cloud/global/images/family/cos-stable", - | "disks": [ - | { - | "name": "local-disk", - | "sizeGb": 10, - | "type": "pd-ssd" - | } - | ], - | "labels": { - | "cromwell-workflow-id": "cromwell-058bff35-4a55-4c0f-9113-0885f4119cd9", - | "goog-pipelines-worker": "true", - | "wdl-task-name": "sleep" - | }, - | "machineType": "custom-1-2048", - | "network": {}, - | "nvidiaDriverVersion": "450.51.06", - | "serviceAccount": { - | "email": "centaur@broad-dsde-cromwell-dev.iam.gserviceaccount.com", - | "scopes": [ - | "https://www.googleapis.com/auth/compute", - | "https://www.googleapis.com/auth/devstorage.full_control", - | "https://www.googleapis.com/auth/cloudkms", - | "https://www.googleapis.com/auth/userinfo.email", - | "https://www.googleapis.com/auth/userinfo.profile", - | "https://www.googleapis.com/auth/monitoring.write", - | "https://www.googleapis.com/auth/bigquery", - | "https://www.googleapis.com/auth/cloud-platform" - | ] - | }, - | "volumes": [ - | { - | "persistentDisk": { - | "sizeGb": 10, - | "type": "pd-ssd" - | }, - | "volume": "local-disk" - | } - | ] - | }, - | "zones": [ - | "us-west3-a", - | "us-west3-b", - | "us-west3-c" - | ] - | }, - | "timeout": "604800s" - | }, - | "startTime": "2022-01-19T19:32:19.204055448Z" - | }, - | "name": "projects/1005074806481/locations/us-central1/operations/5001350794958839237", - | "response": { - | "@type": "type.googleapis.com/cloud.lifesciences.pipelines.RunPipelineResponse" - | } - |}""".stripMargin, - Success(List( - new ExecutionEvent("waiting for quota", OffsetDateTime.parse("2022-01-19T19:17:13.175579Z"), None), - new ExecutionEvent("Worker released", OffsetDateTime.parse("2022-01-19T19:37:22.764120036Z"), None), - new ExecutionEvent("Stopped running shortened for test", OffsetDateTime.parse("2022-01-19T19:37:19.822873814Z"), None), - new ExecutionEvent("Started pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"", OffsetDateTime.parse("2022-01-19T19:32:55.709674372Z"), Option("Pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"")), - new ExecutionEvent("Worker \"google-pipelines-worker-8eff543e6858c204c8f67520aee75432\" assigned in \"us-west3-c\" on a \"custom-1-2048\" machine", OffsetDateTime.parse("2022-01-19T19:32:19.204055448Z"), None), - new ExecutionEvent("A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", OffsetDateTime.parse("2022-01-19T19:17:14.948193837Z"), None), - new ExecutionEvent("Complete in GCE / Cromwell Poll Interval", OffsetDateTime.parse("2022-01-19T19:37:22.764120036Z"), None) - ), Option("custom-1-2048"), Option("us-west3-c"), Option("google-pipelines-worker-8eff543e6858c204c8f67520aee75432")) + """{ + | "done": true, + | "metadata": { + | "@type": "type.googleapis.com/google.cloud.lifesciences.v2beta.Metadata", + | "createTime": "2022-01-19T19:17:13.175579Z", + | "endTime": "2022-01-19T19:37:22.764120036Z", + | "events": [ + | { + | "description": "Worker released", + | "timestamp": "2022-01-19T19:37:22.764120036Z", + | "workerReleased": { + | "instance": "google-pipelines-worker-8eff543e6858c204c8f67520aee75432", + | "zone": "us-west3-c" + | } + | }, + | { + | "containerStopped": { + | "actionId": 19 + | }, + | "description": "Stopped running shortened for test", + | "timestamp": "2022-01-19T19:37:19.822873814Z" + | }, + | { + | "description": "Started pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"", + | "pullStarted": { + | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine" + | }, + | "timestamp": "2022-01-19T19:32:55.709674372Z" + | }, + | { + | "description": "Worker \"google-pipelines-worker-8eff543e6858c204c8f67520aee75432\" assigned in \"us-west3-c\" on a \"custom-1-2048\" machine", + | "timestamp": "2022-01-19T19:32:19.204055448Z", + | "workerAssigned": { + | "instance": "google-pipelines-worker-8eff543e6858c204c8f67520aee75432", + | "machineType": "custom-1-2048", + | "zone": "us-west3-c" + | } + | }, + | { + | "delayed": { + | "cause": "generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + | "metrics": [ + | "CPUS" + | ] + | }, + | "description": "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + | "timestamp": "2022-01-19T19:17:14.948193837Z" + | } + | ], + | "labels": { + | "cromwell-workflow-id": "cromwell-058bff35-4a55-4c0f-9113-0885f4119cd9", + | "wdl-task-name": "sleep" + | }, + | "pipeline": { + | "actions": [ + | { + | "commands": [ + | "-c", + | "printf '%s %s\\n' \"$(date -u '+%Y/%m/%d %H:%M:%S')\" Starting\\ container\\ setup." + | ], + | "entrypoint": "/bin/sh", + | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", + | "labels": { + | "logging": "ContainerSetup" + | }, + | "timeout": "300s" + | }, + | { + | "alwaysRun": true, + | "commands": [ + | "-c", + | "python3 -c 'import base64; shortened for test" + | ], + | "entrypoint": "/bin/sh", + | "imageUri": "gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine", + | "labels": { + | "tag": "Delocalization" + | } + | } + | ], + | "environment": { + | "MEM_SIZE": "2.0", + | "MEM_UNIT": "GB" + | }, + | "resources": { + | "virtualMachine": { + | "bootDiskSizeGb": 12, + | "bootImage": "projects/cos-cloud/global/images/family/cos-stable", + | "disks": [ + | { + | "name": "local-disk", + | "sizeGb": 10, + | "type": "pd-ssd" + | } + | ], + | "labels": { + | "cromwell-workflow-id": "cromwell-058bff35-4a55-4c0f-9113-0885f4119cd9", + | "goog-pipelines-worker": "true", + | "wdl-task-name": "sleep" + | }, + | "machineType": "custom-1-2048", + | "network": {}, + | "nvidiaDriverVersion": "450.51.06", + | "serviceAccount": { + | "email": "centaur@broad-dsde-cromwell-dev.iam.gserviceaccount.com", + | "scopes": [ + | "https://www.googleapis.com/auth/compute", + | "https://www.googleapis.com/auth/devstorage.full_control", + | "https://www.googleapis.com/auth/cloudkms", + | "https://www.googleapis.com/auth/userinfo.email", + | "https://www.googleapis.com/auth/userinfo.profile", + | "https://www.googleapis.com/auth/monitoring.write", + | "https://www.googleapis.com/auth/bigquery", + | "https://www.googleapis.com/auth/cloud-platform" + | ] + | }, + | "volumes": [ + | { + | "persistentDisk": { + | "sizeGb": 10, + | "type": "pd-ssd" + | }, + | "volume": "local-disk" + | } + | ] + | }, + | "zones": [ + | "us-west3-a", + | "us-west3-b", + | "us-west3-c" + | ] + | }, + | "timeout": "604800s" + | }, + | "startTime": "2022-01-19T19:32:19.204055448Z" + | }, + | "name": "projects/1005074806481/locations/us-central1/operations/5001350794958839237", + | "response": { + | "@type": "type.googleapis.com/cloud.lifesciences.pipelines.RunPipelineResponse" + | } + |}""".stripMargin, + Success( + List( + new ExecutionEvent("waiting for quota", OffsetDateTime.parse("2022-01-19T19:17:13.175579Z"), None), + new ExecutionEvent("Worker released", OffsetDateTime.parse("2022-01-19T19:37:22.764120036Z"), None), + new ExecutionEvent("Stopped running shortened for test", + OffsetDateTime.parse("2022-01-19T19:37:19.822873814Z"), + None + ), + new ExecutionEvent( + "Started pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"", + OffsetDateTime.parse("2022-01-19T19:32:55.709674372Z"), + Option("Pulling \"gcr.io/google.com/cloudsdktool/cloud-sdk:354.0.0-alpine\"") + ), + new ExecutionEvent( + "Worker \"google-pipelines-worker-8eff543e6858c204c8f67520aee75432\" assigned in \"us-west3-c\" on a \"custom-1-2048\" machine", + OffsetDateTime.parse("2022-01-19T19:32:19.204055448Z"), + None + ), + new ExecutionEvent( + "A resource limit has delayed the operation: generic::resource_exhausted: allocating: selecting resources: selecting region and zone: no available zones: us-west3: 1 CPUS (0/1 available) usage too high", + OffsetDateTime.parse("2022-01-19T19:17:14.948193837Z"), + None + ), + new ExecutionEvent("Complete in GCE / Cromwell Poll Interval", + OffsetDateTime.parse("2022-01-19T19:37:22.764120036Z"), + None + ) + ), + Option("custom-1-2048"), + Option("us-west3-c"), + Option("google-pipelines-worker-8eff543e6858c204c8f67520aee75432") + ) ) ) @@ -662,7 +695,7 @@ class GetRequestHandlerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma // Operation responses could come back as null. Handle it and don't crash. // https://github.com/googleapis/google-http-java-client/blob/v1.28.0/google-http-client/src/main/java/com/google/api/client/http/HttpResponse.java#L456-L458 val operation = - Option(json).map(GoogleAuthMode.jsonFactory.createJsonParser).map(_.parse(classOf[Operation])).orNull + Option(json).map(GoogleAuthMode.jsonFactory.createJsonParser).map(_.parse(classOf[Operation])).orNull val runStatus = requestHandler.interpretOperationStatus(operation, pollingRequest) runStatus should be(expectedStatus) diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala index 0cf90bf9f1d..6e8bfba564e 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala @@ -57,8 +57,9 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut * @param inputs The customized inputs to this task. */ def writeTaskScript(script: Path, taskName: String, inputs: WorkflowCoercedInputs): Unit = { - val task = configInitializationData.wdlNamespace.findTask(taskName). - getOrElse(throw new RuntimeException(s"Unable to find task $taskName")) + val task = configInitializationData.wdlNamespace + .findTask(taskName) + .getOrElse(throw new RuntimeException(s"Unable to find task $taskName")) val taskDefinition = task.toWomTaskDefinition.toTry.get @@ -103,8 +104,8 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut if !inputs.contains(optional.localName.value) } yield optional -> WomOptionalValue.none(optional.womType.memberType) - - val runtimeEnvironment = RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, jobPaths)(standardParams.minimumRuntimeSettings) + val runtimeEnvironment = + RuntimeEnvironmentBuilder(jobDescriptor.runtimeAttributes, jobPaths)(standardParams.minimumRuntimeSettings) val allInputs = providedWomInputs ++ optionalsForciblyInitializedToNone val womInstantiation = taskDefinition.instantiateCommand(allInputs, NoIoFunctionSet, identity, runtimeEnvironment) @@ -122,14 +123,15 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut * The inputs that are not specified by the config, that will be passed into a command for both submit and * submit-docker. */ - private lazy val standardInputs: WorkflowCoercedInputs = { + private lazy val standardInputs: WorkflowCoercedInputs = Map( JobNameInput -> WomString(jobName), CwdInput -> WomString(jobPaths.callRoot.pathAsString) ) - } - private [config] def dockerCidInputValue: WomString = WomString(jobPaths.callExecutionRoot.resolve(jobPaths.dockerCid).pathAsString) + private[config] def dockerCidInputValue: WomString = WomString( + jobPaths.callExecutionRoot.resolve(jobPaths.dockerCid).pathAsString + ) /** * The inputs that are not specified by the config, that will be passed into a command for either submit or @@ -142,7 +144,7 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut DockerStdoutInput -> WomString(jobPathsWithDocker.toDockerPath(standardPaths.output).pathAsString), DockerStderrInput -> WomString(jobPathsWithDocker.toDockerPath(standardPaths.error).pathAsString), DockerScriptInput -> WomString(jobPathsWithDocker.toDockerPath(jobPaths.script).pathAsString), - DockerCidInput -> dockerCidInputValue, + DockerCidInput -> dockerCidInputValue ), Map.empty ) @@ -151,7 +153,7 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut StdoutInput -> WomString(standardPaths.output.pathAsString), StderrInput -> WomString(standardPaths.error.pathAsString), ScriptInput -> WomString(jobPaths.script.pathAsString), - JobShellInput -> WomString(jobShell), + JobShellInput -> WomString(jobShell) )).toMap } @@ -163,7 +165,8 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut val declarationValidations = configInitializationData.declarationValidations val inputOptions = declarationValidations map { // Is it always the right thing to pass the Docker hash to a config backend? What if it can't use hashes? - case declarationValidation if declarationValidation.key == DockerValidation.instance.key && jobDescriptor.maybeCallCachingEligible.dockerHash.isDefined => + case declarationValidation + if declarationValidation.key == DockerValidation.instance.key && jobDescriptor.maybeCallCachingEligible.dockerHash.isDefined => val dockerHash = jobDescriptor.maybeCallCachingEligible.dockerHash.get Option(declarationValidation.key -> WomString(dockerHash)) case declarationValidation => @@ -175,7 +178,8 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut } // `runtimeAttributeInputs` has already adjusted for the case of a `JobDescriptor` with `DockerWithHash`. - override lazy val dockerImageUsed: Option[String] = runtimeAttributeInputs.get(DockerValidation.instance.key).map(_.valueString) + override lazy val dockerImageUsed: Option[String] = + runtimeAttributeInputs.get(DockerValidation.instance.key).map(_.valueString) /** * Generates a command for a job id, using a config task. @@ -185,7 +189,11 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut * @param task The config task that defines the command. * @return A runnable command. */ - protected def jobScriptArgs(job: StandardAsyncJob, suffix: String, task: String, extraInputs: Map[String, WomValue] = Map.empty): SharedFileSystemCommand = { + protected def jobScriptArgs(job: StandardAsyncJob, + suffix: String, + task: String, + extraInputs: Map[String, WomValue] = Map.empty + ): SharedFileSystemCommand = { val script = jobPaths.script.plusExt(suffix) writeTaskScript(script, task, Map(JobIdInput -> WomString(job.jobId)) ++ extraInputs) SharedFileSystemCommand("/bin/bash", script) @@ -198,12 +206,12 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut * @param standardParams Params for running a shared file system job. */ class BackgroundConfigAsyncJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends ConfigAsyncJobExecutionActor with BackgroundAsyncJobExecutionActor { + extends ConfigAsyncJobExecutionActor + with BackgroundAsyncJobExecutionActor { - override def killArgs(job: StandardAsyncJob): SharedFileSystemCommand = { + override def killArgs(job: StandardAsyncJob): SharedFileSystemCommand = if (isDockerRun) jobScriptArgs(job, "kill", KillDockerTask, Map(DockerCidInput -> dockerCidInputValue)) else super[BackgroundAsyncJobExecutionActor].killArgs(job) - } } /** @@ -213,7 +221,7 @@ class BackgroundConfigAsyncJobExecutionActor(override val standardParams: Standa * @param standardParams Params for running a shared file system job. */ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends ConfigAsyncJobExecutionActor { + extends ConfigAsyncJobExecutionActor { lazy val jobIdRegexString = configurationDescriptor.backendConfig.getString(JobIdRegexConfig) @@ -234,9 +242,8 @@ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: Standa * @param job The job to check. * @return A command that checks if the job is alive. */ - override def checkAliveArgs(job: StandardAsyncJob): SharedFileSystemCommand = { + override def checkAliveArgs(job: StandardAsyncJob): SharedFileSystemCommand = jobScriptArgs(job, "check", CheckAliveTask) - } /** * Kills the job using the kill command from the config. @@ -244,19 +251,23 @@ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: Standa * @param job The job id to kill. * @return A command that may be used to kill the job. */ - override def killArgs(job: StandardAsyncJob): SharedFileSystemCommand = { + override def killArgs(job: StandardAsyncJob): SharedFileSystemCommand = if (isDockerRun) jobScriptArgs(job, "kill", KillDockerTask, Map(DockerCidInput -> dockerCidInputValue)) else jobScriptArgs(job, "kill", KillTask) - } protected lazy val exitCodeTimeout: Option[Long] = { val timeout = configurationDescriptor.backendConfig.as[Option[Long]](ExitCodeTimeoutConfig) timeout match { case Some(x) => - jobLogger.info("Cromwell will watch for an rc file *and* double-check every {} seconds to make sure this job is still alive", x) + jobLogger.info( + "Cromwell will watch for an rc file *and* double-check every {} seconds to make sure this job is still alive", + x + ) if (x < 0) throw new IllegalArgumentException(s"config value '$ExitCodeTimeoutConfig' must be 0 or higher") case None => - jobLogger.info("Cromwell will watch for an rc file but will *not* double-check whether this job is actually alive (unless Cromwell restarts)") + jobLogger.info( + "Cromwell will watch for an rc file but will *not* double-check whether this job is actually alive (unless Cromwell restarts)" + ) } timeout } @@ -286,7 +297,10 @@ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: Standa // The job has stopped but we don't have an RC yet. We'll wait one more 'timeout' for the RC to arrive: SharedFileSystemJobWaitingForReturnCode(nextTimeout) case Failure(e) => - log.error(e, s"Failed to check status for ${handle.jobDescriptor.key.tag} using command: ${checkAliveArgs(handle.pendingJob)}") + log.error( + e, + s"Failed to check status for ${handle.jobDescriptor.key.tag} using command: ${checkAliveArgs(handle.pendingJob)}" + ) SharedFileSystemJobRunning(nextTimeout) } } else { @@ -300,8 +314,11 @@ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: Standa // `isAlive` is not called anymore from this point // If exit-code-timeout is set in the config cromwell will create a fake exitcode file - val backupError = "??? (!! Programmer Error: It should be impossible to give up on 'waiting' without having set a maximum wait timeout. Please report this as a bug in the Cromwell Github repository !!)" - jobLogger.error(s"Return file not found after ${exitCodeTimeout.getOrElse(backupError)} seconds, assuming external kill") + val backupError = + "??? (!! Programmer Error: It should be impossible to give up on 'waiting' without having set a maximum wait timeout. Please report this as a bug in the Cromwell Github repository !!)" + jobLogger.error( + s"Return file not found after ${exitCodeTimeout.getOrElse(backupError)} seconds, assuming external kill" + ) val returnCodeTemp = jobPaths.returnCode.plusExt("kill") @@ -323,7 +340,9 @@ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: Standa // Essentially, this covers the rare race condition whereby the task completes between starting to write the // fake RC, and trying to copy it: - log.error(s"An RC file appeared at ${jobPaths.returnCode} whilst trying to copy a fake exitcode file from ${returnCodeTemp}. Not to worry: the real file should now be picked up on the next poll.") + log.error( + s"An RC file appeared at ${jobPaths.returnCode} whilst trying to copy a fake exitcode file from ${returnCodeTemp}. Not to worry: the real file should now be picked up on the next poll." + ) // Delete the fake file since it's not needed: returnCodeTemp.delete(true) @@ -347,7 +366,6 @@ class DispatchedConfigAsyncJobExecutionActor(override val standardParams: Standa override def isTerminal(runStatus: SharedFileSystemRunState): Boolean = runStatus.terminal } - object DispatchedConfigAsyncJobExecutionActor { def getJob(stdoutContent: String, stderr: Path, jobIdRegexString: String): StandardAsyncJob = { val jobIdRegex = jobIdRegexString.r @@ -355,9 +373,10 @@ object DispatchedConfigAsyncJobExecutionActor { jobIdRegex findFirstIn output match { case Some(jobIdRegex(jobId)) => StandardAsyncJob(jobId) case _ => - throw new RuntimeException("Could not find job ID from stdout file." + - s"Check the stderr file for possible errors: $stderr") + throw new RuntimeException( + "Could not find job ID from stdout file." + + s"Check the stderr file for possible errors: $stderr" + ) } } } - diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashing.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashing.scala index 8676479883a..e3a62b549c6 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashing.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashing.scala @@ -9,9 +9,9 @@ import scala.language.postfixOps import scala.util.Try private[config] object ConfigBackendFileHashing { - def getMd5Result(request: SingleFileHashRequest, log: LoggingAdapter): Try[String] ={ - val path = DefaultPathBuilder.build(request.file.valueString) recover { - case failure => throw new RuntimeException("Failed to construct path to hash", failure) + def getMd5Result(request: SingleFileHashRequest, log: LoggingAdapter): Try[String] = { + val path = DefaultPathBuilder.build(request.file.valueString) recover { case failure => + throw new RuntimeException("Failed to construct path to hash", failure) } get tryWithResource(() => path.newInputStream) { inputStream => diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashingActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashingActor.scala index a406730afa6..d18d828eed8 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashingActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendFileHashingActor.scala @@ -13,14 +13,16 @@ object ConfigBackendFileHashingActor { def props(standardParams: StandardFileHashingActorParams) = Props(new ConfigBackendFileHashingActor(standardParams)) } -class ConfigBackendFileHashingActor(standardParams: StandardFileHashingActorParams) extends StandardFileHashingActor(standardParams) { +class ConfigBackendFileHashingActor(standardParams: StandardFileHashingActorParams) + extends StandardFileHashingActor(standardParams) { override val ioCommandBuilder = GcsBatchCommandBuilder - - lazy val hashingStrategy: ConfigHashingStrategy = { - configurationDescriptor.backendConfig.as[Option[Config]]("filesystems.local.caching") map ConfigHashingStrategy.apply getOrElse ConfigHashingStrategy.defaultStrategy - } - + + lazy val hashingStrategy: ConfigHashingStrategy = + configurationDescriptor.backendConfig.as[Option[Config]]( + "filesystems.local.caching" + ) map ConfigHashingStrategy.apply getOrElse ConfigHashingStrategy.defaultStrategy + override def customHashStrategy(fileRequest: SingleFileHashRequest): Option[Try[String]] = { log.debug(hashingStrategy.toString) Option(hashingStrategy.getHash(fileRequest, log)) diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendLifecycleActorFactory.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendLifecycleActorFactory.scala index 9ce1538cb00..1211e39fc80 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendLifecycleActorFactory.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigBackendLifecycleActorFactory.scala @@ -14,22 +14,26 @@ import org.slf4j.{Logger, LoggerFactory} * @param configurationDescriptor The config information. */ class ConfigBackendLifecycleActorFactory(val name: String, val configurationDescriptor: BackendConfigurationDescriptor) - extends SharedFileSystemBackendLifecycleActorFactory { + extends SharedFileSystemBackendLifecycleActorFactory { lazy val logger: Logger = LoggerFactory.getLogger(getClass) - lazy val hashingStrategy: ConfigHashingStrategy = { - configurationDescriptor.backendConfig.as[Option[Config]]("filesystems.local.caching") map ConfigHashingStrategy.apply getOrElse ConfigHashingStrategy.defaultStrategy - } + lazy val hashingStrategy: ConfigHashingStrategy = + configurationDescriptor.backendConfig.as[Option[Config]]( + "filesystems.local.caching" + ) map ConfigHashingStrategy.apply getOrElse ConfigHashingStrategy.defaultStrategy override lazy val initializationActorClass: Class[ConfigInitializationActor] = classOf[ConfigInitializationActor] override lazy val asyncExecutionActorClass: Class[_ <: ConfigAsyncJobExecutionActor] = { - val runInBackground = configurationDescriptor.backendConfig.as[Option[Boolean]](RunInBackgroundConfig).getOrElse(false) + val runInBackground = + configurationDescriptor.backendConfig.as[Option[Boolean]](RunInBackgroundConfig).getOrElse(false) if (runInBackground) classOf[BackgroundConfigAsyncJobExecutionActor] else classOf[DispatchedConfigAsyncJobExecutionActor] } - override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option(classOf[ConfigBackendFileHashingActor]) + override lazy val fileHashingActorClassOption: Option[Class[_ <: StandardFileHashingActor]] = Option( + classOf[ConfigBackendFileHashingActor] + ) } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategy.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategy.scala index 1478819f72a..2c70739af57 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategy.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategy.scala @@ -20,24 +20,24 @@ object ConfigHashingStrategy { val defaultStrategy = HashFileMd5Strategy(checkSiblingMd5 = false) def apply(hashingConfig: Config): ConfigHashingStrategy = { - val checkSiblingMd5 = hashingConfig.as[Option[Boolean]]("check-sibling-md5").getOrElse(false) - - // Fingerprint strategy by default checks the first 10 MiB (10485760 bytes) for performance reasons. - // 100 MB will take to much time on network file systems. 1 MB might not be unique enough. - // The value is user configurable. - lazy val fingerprintSize = hashingConfig.as[Option[Long]]("fingerprint-size").getOrElse(10L * 1024 * 1024) - - hashingConfig.as[Option[String]]("hashing-strategy").getOrElse("file") match { - case "path" => HashPathStrategy(checkSiblingMd5) - case "file" => HashFileMd5Strategy(checkSiblingMd5) - case "md5" => HashFileMd5Strategy(checkSiblingMd5) - case "path+modtime" => HashPathModTimeStrategy(checkSiblingMd5) - case "xxh64" => HashFileXxH64Strategy(checkSiblingMd5) - case "fingerprint" => FingerprintStrategy(checkSiblingMd5, fingerprintSize) - case what => - logger.warn(s"Unrecognized hashing strategy $what.") - HashPathStrategy(checkSiblingMd5) - } + val checkSiblingMd5 = hashingConfig.as[Option[Boolean]]("check-sibling-md5").getOrElse(false) + + // Fingerprint strategy by default checks the first 10 MiB (10485760 bytes) for performance reasons. + // 100 MB will take to much time on network file systems. 1 MB might not be unique enough. + // The value is user configurable. + lazy val fingerprintSize = hashingConfig.as[Option[Long]]("fingerprint-size").getOrElse(10L * 1024 * 1024) + + hashingConfig.as[Option[String]]("hashing-strategy").getOrElse("file") match { + case "path" => HashPathStrategy(checkSiblingMd5) + case "file" => HashFileMd5Strategy(checkSiblingMd5) + case "md5" => HashFileMd5Strategy(checkSiblingMd5) + case "path+modtime" => HashPathModTimeStrategy(checkSiblingMd5) + case "xxh64" => HashFileXxH64Strategy(checkSiblingMd5) + case "fingerprint" => FingerprintStrategy(checkSiblingMd5, fingerprintSize) + case what => + logger.warn(s"Unrecognized hashing strategy $what.") + HashPathStrategy(checkSiblingMd5) + } } } @@ -53,7 +53,8 @@ abstract class ConfigHashingStrategy { def usingStandardInitData(initData: StandardInitializationData) = { val pathBuilders = initData.workflowPaths.pathBuilders val file = PathFactory.buildPath(request.file.valueString, pathBuilders).followSymbolicLinks - if (!file.exists) Failure(new FileNotFoundException(s"Cannot hash file $file because it can't be found")) else { + if (!file.exists) Failure(new FileNotFoundException(s"Cannot hash file $file because it can't be found")) + else { if (checkSiblingMd5) { precomputedMd5(file) match { case Some(md5) => Try(md5.contentAsString.trim) @@ -65,7 +66,8 @@ abstract class ConfigHashingStrategy { request.initializationData match { case Some(initData: StandardInitializationData) => usingStandardInitData(initData) - case _ => Failure(new IllegalArgumentException("Need SharedFileSystemBackendInitializationData to calculate hash.")) + case _ => + Failure(new IllegalArgumentException("Need SharedFileSystemBackendInitializationData to calculate hash.")) } } @@ -74,54 +76,51 @@ abstract class ConfigHashingStrategy { if (md5.exists) Option(md5) else None } - override def toString: String = { + override def toString: String = s"Call caching hashing strategy: $checkSiblingMessage$description." - } } final case class HashPathStrategy(checkSiblingMd5: Boolean) extends ConfigHashingStrategy { - override def hash(file: Path): Try[String] = { + override def hash(file: Path): Try[String] = Try(DigestUtils.md5Hex(file.toAbsolutePath.pathAsString)) - } override val description = "hash file path" } final case class HashPathModTimeStrategy(checkSiblingMd5: Boolean) extends ConfigHashingStrategy { - override def hash(file: Path): Try[String] = { + override def hash(file: Path): Try[String] = // Add the last modified date here to make sure these are the files we are looking for. Try(DigestUtils.md5Hex(file.toAbsolutePath.pathAsString + file.lastModifiedTime.toString)) - } override val description = "hash file path and last modified time" } final case class HashFileMd5Strategy(checkSiblingMd5: Boolean) extends ConfigHashingStrategy { - override protected def hash(file: Path): Try[String] = { - tryWithResource(() => file.newInputStream) { DigestUtils.md5Hex } - } + override protected def hash(file: Path): Try[String] = + tryWithResource(() => file.newInputStream)(DigestUtils.md5Hex) override val description = "hash file content with md5" } final case class HashFileXxH64Strategy(checkSiblingMd5: Boolean) extends ConfigHashingStrategy { - override protected def hash(file: Path): Try[String] = { - tryWithResource(() => file.newInputStream) {HashFileXxH64StrategyMethods.xxh64sum(_)} - } + override protected def hash(file: Path): Try[String] = + tryWithResource(() => file.newInputStream)(HashFileXxH64StrategyMethods.xxh64sum(_)) override val description = "hash file content with xxh64" } final case class FingerprintStrategy(checkSiblingMd5: Boolean, fingerprintSize: Long) extends ConfigHashingStrategy { - override protected def hash(file: Path): Try[String] = { + override protected def hash(file: Path): Try[String] = Try { // Calculate the xxh64 hash of last modified time and filesize. These are NOT added, as it will lead to loss of // information. Instead their hexstrings are concatenated and then hashed. - HashFileXxH64StrategyMethods.xxh64sumString(file.lastModifiedTime.toEpochMilli.toHexString + - file.size.toHexString) + - HashFileXxH64StrategyMethods.xxh64sum(file.newInputStream, maxSize = fingerprintSize) - } + HashFileXxH64StrategyMethods.xxh64sumString( + file.lastModifiedTime.toEpochMilli.toHexString + + file.size.toHexString + ) + + HashFileXxH64StrategyMethods.xxh64sum(file.newInputStream, maxSize = fingerprintSize) } - override val description = "fingerprint the file with last modified time, size and a xxh64 hash of the first part of the file" + override val description = + "fingerprint the file with last modified time, size and a xxh64 hash of the first part of the file" } object HashFileXxH64StrategyMethods { @@ -139,17 +138,17 @@ object HashFileXxH64StrategyMethods { def xxh64sum(inputStream: InputStream, bufferSize: Int = defaultBufferSize, maxSize: Long = Long.MaxValue, - seed: Long = 0L): String = { + seed: Long = 0L + ): String = { val hasher = xxhashFactory.newStreamingHash64(seed) val buffer: Array[Byte] = new Array[Byte](bufferSize) var byteCounter: Long = 0 - try { + try while (inputStream.available() > 0 && byteCounter < maxSize) { val length: Int = inputStream.read(buffer) hasher.update(buffer, 0, length) byteCounter += length } - } finally inputStream.close() // Long.toHexString does not add leading zero's f"%%16s".format(hasher.getValue.toHexString).replace(" ", "0") @@ -165,4 +164,3 @@ object HashFileXxH64StrategyMethods { f"%%16s".format(hash.toHexString).replace(" ", "0") } } - diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActor.scala index ea34521c8a8..b1e30ab7506 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActor.scala @@ -2,7 +2,11 @@ package cromwell.backend.impl.sfs.config import cromwell.backend.io.WorkflowPaths import cromwell.backend.sfs._ -import cromwell.backend.standard.{StandardInitializationActorParams, StandardInitializationData, StandardValidatedRuntimeAttributesBuilder} +import cromwell.backend.standard.{ + StandardInitializationActorParams, + StandardInitializationData, + StandardValidatedRuntimeAttributesBuilder +} import wdl.draft2.model.WdlNamespace import scala.concurrent.Future @@ -17,14 +21,14 @@ import scala.concurrent.Future * @param declarationValidations A collection of validations for each declaration. * @param wdlNamespace A collection of WDL tasks for submitting, killing, etc. */ -class ConfigInitializationData -( - workflowPaths: WorkflowPaths, - runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, - val declarationValidations: Seq[DeclarationValidation], - val wdlNamespace: WdlNamespace) - extends StandardInitializationData(workflowPaths, runtimeAttributesBuilder, - classOf[SharedFileSystemExpressionFunctions]) +class ConfigInitializationData(workflowPaths: WorkflowPaths, + runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, + val declarationValidations: Seq[DeclarationValidation], + val wdlNamespace: WdlNamespace +) extends StandardInitializationData(workflowPaths, + runtimeAttributesBuilder, + classOf[SharedFileSystemExpressionFunctions] + ) /** * Extends the SharedFileSystemInitializationActor to create an instance of the ConfigInitializationData. @@ -34,13 +38,14 @@ class ConfigInitializationData * @param params Parameters to create an initialization actor. */ class ConfigInitializationActor(params: StandardInitializationActorParams) - extends SharedFileSystemInitializationActor(params) { + extends SharedFileSystemInitializationActor(params) { private lazy val configWdlNamespace = new ConfigWdlNamespace(params.configurationDescriptor.backendConfig) - lazy val declarationValidations: Seq[DeclarationValidation] = { - DeclarationValidation.fromDeclarations(configWdlNamespace.runtimeDeclarations, configWdlNamespace.callCachedRuntimeAttributes) - } + lazy val declarationValidations: Seq[DeclarationValidation] = + DeclarationValidation.fromDeclarations(configWdlNamespace.runtimeDeclarations, + configWdlNamespace.callCachedRuntimeAttributes + ) override lazy val initializationData: Future[ConfigInitializationData] = { val wdlNamespace = configWdlNamespace.wdlNamespace diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigWdlNamespace.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigWdlNamespace.scala index e5d84f38331..e06ebf66e84 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigWdlNamespace.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigWdlNamespace.scala @@ -20,22 +20,27 @@ class ConfigWdlNamespace(backendConfig: Config) { private val configRuntimeAttributes = backendConfig.as[Option[String]](RuntimeAttributesConfig).getOrElse("") private val submitCommandOption = backendConfig.as[Option[String]](SubmitConfig) - private val submitSourceOption = submitCommandOption.map(makeWdlSource( - SubmitTask, _, submitRuntimeAttributes + configRuntimeAttributes)) + private val submitSourceOption = + submitCommandOption.map(makeWdlSource(SubmitTask, _, submitRuntimeAttributes + configRuntimeAttributes)) private val submitDockerCommandOption = backendConfig.as[Option[String]](SubmitDockerConfig) - private val submitDockerSourceOption = submitDockerCommandOption.map(makeWdlSource( - SubmitDockerTask, _, submitRuntimeAttributes + submitDockerRuntimeAttributes + configRuntimeAttributes)) + private val submitDockerSourceOption = submitDockerCommandOption.map( + makeWdlSource(SubmitDockerTask, + _, + submitRuntimeAttributes + submitDockerRuntimeAttributes + configRuntimeAttributes + ) + ) private val killCommandOption = backendConfig.as[Option[String]](KillConfig) private val killSourceOption = killCommandOption.map(makeWdlSource(KillTask, _, jobIdRuntimeAttributes)) private val killDockerCommandOption = backendConfig.as[Option[String]](KillDockerConfig) - private val killDockerSourceOption = killDockerCommandOption.map(makeWdlSource(KillDockerTask, _, jobIdRuntimeAttributes)) + private val killDockerSourceOption = + killDockerCommandOption.map(makeWdlSource(KillDockerTask, _, jobIdRuntimeAttributes)) private val checkAliveCommandOption = backendConfig.as[Option[String]](CheckAliveConfig) - private val checkAliveSourceOption = checkAliveCommandOption.map(makeWdlSource( - CheckAliveTask, _, jobIdRuntimeAttributes)) + private val checkAliveSourceOption = + checkAliveCommandOption.map(makeWdlSource(CheckAliveTask, _, jobIdRuntimeAttributes)) private val workflowSource = s""" @@ -49,12 +54,11 @@ class ConfigWdlNamespace(backendConfig: Config) { /** * The wdl namespace containing the submit, kill, and check alive tasks. */ - val wdlNamespace = { + val wdlNamespace = WdlNamespace.loadUsingSource(workflowSource, None, None) match { case Success(ns) => ns case Failure(f) => throw new RuntimeException(s"Error parsing generated wdl:\n$workflowSource".stripMargin, f) } - } private val runtimeAttributesTask = makeTask(RuntimeAttributesTask, "", configRuntimeAttributes) @@ -62,12 +66,13 @@ class ConfigWdlNamespace(backendConfig: Config) { * The declarations of runtime attributes. */ val runtimeDeclarations = runtimeAttributesTask.declarations - val callCachedRuntimeAttributes = backendConfig.as[Option[Map[String, Boolean]]](RuntimeAttributesCachingConfig).getOrElse(Map.empty) + val callCachedRuntimeAttributes = + backendConfig.as[Option[Map[String, Boolean]]](RuntimeAttributesCachingConfig).getOrElse(Map.empty) } object ConfigWdlNamespace { - private def makeWdlSource(taskName: String, command: String, declarations: String): WorkflowSource = { + private def makeWdlSource(taskName: String, command: String, declarations: String): WorkflowSource = s""" |task $taskName { |$declarations @@ -76,7 +81,6 @@ object ConfigWdlNamespace { |} |} |""".stripMargin - } private def makeTask(taskName: String, command: String, declarations: String): WdlTask = { val workflowSource = makeWdlSource(taskName, command, declarations) diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/CpuDeclarationValidation.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/CpuDeclarationValidation.scala index 455cf2bec81..19721aeef7c 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/CpuDeclarationValidation.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/CpuDeclarationValidation.scala @@ -6,12 +6,12 @@ import eu.timepit.refined.numeric.Positive import wdl.draft2.model.Declaration import wom.values.WomValue -class CpuDeclarationValidation(declaration: Declaration, instanceValidation: RuntimeAttributesValidation[Int Refined Positive]) - extends DeclarationValidation(declaration, instanceValidation, usedInCallCachingOverride = Option(false)) { +class CpuDeclarationValidation(declaration: Declaration, + instanceValidation: RuntimeAttributesValidation[Int Refined Positive] +) extends DeclarationValidation(declaration, instanceValidation, usedInCallCachingOverride = Option(false)) { - override def extractWdlValueOption(validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[WomValue] = { + override def extractWdlValueOption(validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[WomValue] = RuntimeAttributesValidation.extractOption(instanceValidation, validatedRuntimeAttributes) map { refined => declaration.womType.coerceRawValue(refined.value).get } - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/DeclarationValidation.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/DeclarationValidation.scala index 221f4d16fb7..7b5bcd4e7fc 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/DeclarationValidation.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/DeclarationValidation.scala @@ -14,9 +14,10 @@ import scala.annotation.tailrec * Creates instances of runtime attribute validations from WDL declarations. */ object DeclarationValidation { - def fromDeclarations(declarations: Seq[Declaration], callCachedRuntimeAttributes: Map[String, Boolean]): Seq[DeclarationValidation] = { + def fromDeclarations(declarations: Seq[Declaration], + callCachedRuntimeAttributes: Map[String, Boolean] + ): Seq[DeclarationValidation] = declarations map fromDeclaration(callCachedRuntimeAttributesMap = callCachedRuntimeAttributes) _ - } /** * Create a runtime attribute validation from a WDL declaration. @@ -24,7 +25,9 @@ object DeclarationValidation { * @param declaration The declaration. * @return The DeclarationValidation object for the declaration. */ - def fromDeclaration(callCachedRuntimeAttributesMap: Map[String, Boolean])(declaration: Declaration): DeclarationValidation = { + def fromDeclaration( + callCachedRuntimeAttributesMap: Map[String, Boolean] + )(declaration: Declaration): DeclarationValidation = declaration.unqualifiedName match { // Docker and CPU are special keys understood by cromwell. case name if name == DockerValidation.instance.key => @@ -33,13 +36,26 @@ object DeclarationValidation { case RuntimeAttributesKeys.CpuMinKey => new CpuDeclarationValidation(declaration, CpuValidation.instanceMin) case RuntimeAttributesKeys.CpuMaxKey => new CpuDeclarationValidation(declaration, CpuValidation.instanceMax) // See MemoryDeclarationValidation for more info - case name if MemoryDeclarationValidation.isMemoryDeclaration(name, MemoryRuntimeAttribute, MemoryRuntimeAttributePrefix) => + case name + if MemoryDeclarationValidation.isMemoryDeclaration(name, + MemoryRuntimeAttribute, + MemoryRuntimeAttributePrefix + ) => new MemoryDeclarationValidation(declaration, MemoryRuntimeAttribute, MemoryRuntimeAttributePrefix) - case name if MemoryDeclarationValidation.isMemoryDeclaration(name, MemoryMinRuntimeAttribute, MemoryRuntimeAttributePrefix) => + case name + if MemoryDeclarationValidation.isMemoryDeclaration(name, + MemoryMinRuntimeAttribute, + MemoryRuntimeAttributePrefix + ) => new MemoryDeclarationValidation(declaration, MemoryMinRuntimeAttribute, MemoryMinRuntimeAttributePrefix) - case name if MemoryDeclarationValidation.isMemoryDeclaration(name, MemoryMaxRuntimeAttribute, MemoryRuntimeAttributePrefix) => + case name + if MemoryDeclarationValidation.isMemoryDeclaration(name, + MemoryMaxRuntimeAttribute, + MemoryRuntimeAttributePrefix + ) => new MemoryDeclarationValidation(declaration, MemoryMaxRuntimeAttribute, MemoryMaxRuntimeAttributePrefix) - case name if MemoryDeclarationValidation.isMemoryDeclaration(name, DiskRuntimeAttribute, DiskRuntimeAttributePrefix) => + case name + if MemoryDeclarationValidation.isMemoryDeclaration(name, DiskRuntimeAttribute, DiskRuntimeAttributePrefix) => new MemoryDeclarationValidation(declaration, DiskRuntimeAttribute, DiskRuntimeAttributePrefix) // All other declarations must be a Boolean, Float, Integer, or String. case _ => @@ -50,10 +66,9 @@ object DeclarationValidation { usedInCallCachingOverride = callCachedRuntimeAttributesMap.get(declaration.unqualifiedName) ) } - } @tailrec - private def validator(womType: WomType, unqualifiedName: String): PrimitiveRuntimeAttributesValidation[_, _] = { + private def validator(womType: WomType, unqualifiedName: String): PrimitiveRuntimeAttributesValidation[_, _] = womType match { case WomBooleanType => new BooleanRuntimeAttributesValidation(unqualifiedName) case WomFloatType => new FloatRuntimeAttributesValidation(unqualifiedName) @@ -62,7 +77,6 @@ object DeclarationValidation { case WomOptionalType(x) => validator(x, unqualifiedName) case other => throw new RuntimeException(s"Unsupported config runtime attribute $other $unqualifiedName") } - } } /** @@ -71,7 +85,10 @@ object DeclarationValidation { * @param declaration The declaration from the config "runtime-attributes". * @param instanceValidation A basic instance validation for the declaration. */ -class DeclarationValidation(declaration: Declaration, instanceValidation: RuntimeAttributesValidation[_], usedInCallCachingOverride: Option[Boolean]) { +class DeclarationValidation(declaration: Declaration, + instanceValidation: RuntimeAttributesValidation[_], + usedInCallCachingOverride: Option[Boolean] +) { val key: String = declaration.unqualifiedName /** @@ -97,12 +114,16 @@ class DeclarationValidation(declaration: Declaration, instanceValidation: Runtim // As a first approximation, think "caseClass.copy, but for validation functions" // In this case, we might (or might not) want to make our validations: // 1. have defaults: - val validationWithDefault = if (declaration.expression.isDefined) default(instanceValidation, declaration.expression.get) else instanceValidation + val validationWithDefault = + if (declaration.expression.isDefined) default(instanceValidation, declaration.expression.get) + else instanceValidation // 2. be optional: - val validationWithDefaultAndOptionality = if (declaration.womType.isInstanceOf[WomOptionalType]) validationWithDefault.optional else validationWithDefault + val validationWithDefaultAndOptionality = + if (declaration.womType.isInstanceOf[WomOptionalType]) validationWithDefault.optional else validationWithDefault // Or 3. have customized call caching properties: val validationWithDefaultAndOptionalityAndCallCaching = usedInCallCachingOverride match { - case Some(usedInCallCachingValue) => RuntimeAttributesValidation.withUsedInCallCaching(validationWithDefaultAndOptionality, usedInCallCachingValue) + case Some(usedInCallCachingValue) => + RuntimeAttributesValidation.withUsedInCallCaching(validationWithDefaultAndOptionality, usedInCallCachingValue) case None => validationWithDefaultAndOptionality } @@ -117,9 +138,9 @@ class DeclarationValidation(declaration: Declaration, instanceValidation: Runtim * @return A new copy of the validation with the default value. */ protected def default(validation: RuntimeAttributesValidation[_], - wdlExpression: WdlExpression): RuntimeAttributesValidation[_] = { + wdlExpression: WdlExpression + ): RuntimeAttributesValidation[_] = validation.withDefault(wdlExpression.evaluate(NoLookup, NoFunctions).get) - } /** * Utility to get the value of this declaration from a collection of validated runtime attributes. @@ -127,9 +148,8 @@ class DeclarationValidation(declaration: Declaration, instanceValidation: Runtim * @param validatedRuntimeAttributes The validated attributes. * @return The value from the collection wrapped in `Some`, or `None` if the value wasn't found. */ - def extractWdlValueOption(validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[WomValue] = { + def extractWdlValueOption(validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[WomValue] = RuntimeAttributesValidation.extractOption(instanceValidation, validatedRuntimeAttributes) map { declaration.womType.coerceRawValue(_).get } - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidation.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidation.scala index 7eab2f58b78..df712cd0fd6 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidation.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidation.scala @@ -31,7 +31,10 @@ import wom.values._ * @param declaration The declaration used to create this memory validation. */ class MemoryDeclarationValidation(declaration: Declaration, attributeName: String, attributeNamePrefix: String) - extends DeclarationValidation(declaration, MemoryValidation.instance(attributeName), usedInCallCachingOverride = Option(false)) { + extends DeclarationValidation(declaration, + MemoryValidation.instance(attributeName), + usedInCallCachingOverride = Option(false) + ) { import MemoryDeclarationValidation._ @@ -46,14 +49,15 @@ class MemoryDeclarationValidation(declaration: Declaration, attributeName: Strin * @return The new validation. */ override protected def default(validation: RuntimeAttributesValidation[_], - wdlExpression: WdlExpression): RuntimeAttributesValidation[_] = { + wdlExpression: WdlExpression + ): RuntimeAttributesValidation[_] = { val womValue = declaration.expression.get.evaluate(NoLookup, NoFunctions).get val amount: Double = defaultAmount(womValue) val memorySize = MemorySize(amount, declarationMemoryUnit) validation.withDefault(WomLong(memorySize.bytes.toLong)) } - private def defaultAmount(womValue: WomValue): Double = { + private def defaultAmount(womValue: WomValue): Double = womValue match { case WomInteger(value) => value.toDouble case WomLong(value) => value.toDouble @@ -61,7 +65,6 @@ class MemoryDeclarationValidation(declaration: Declaration, attributeName: Strin case WomOptionalValue(_, Some(optionalWdlValue)) => defaultAmount(optionalWdlValue) case other => throw new RuntimeException(s"Unsupported memory default: $other") } - } private lazy val declarationMemoryUnit: MemoryUnit = { val suffix = memoryUnitSuffix(declaration.unqualifiedName, attributeName, attributeNamePrefix) @@ -78,10 +81,9 @@ class MemoryDeclarationValidation(declaration: Declaration, attributeName: Strin * @param validatedRuntimeAttributes The validated attributes. * @return The value from the collection wrapped in `Some`, or `None` if the value wasn't found. */ - override def extractWdlValueOption(validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[WomValue] = { + override def extractWdlValueOption(validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[WomValue] = RuntimeAttributesValidation.extractOption(MemoryValidation.instance(attributeName), validatedRuntimeAttributes) map coerceMemorySize(declaration.womType) - } private def coerceMemorySize(womType: WomType)(value: MemorySize): WomValue = { val amount = value.to(declarationMemoryUnit).amount @@ -95,7 +97,7 @@ class MemoryDeclarationValidation(declaration: Declaration, attributeName: Strin } object MemoryDeclarationValidation { - def isMemoryDeclaration(name: String, attributeName: String, attributeNamePrefix: String): Boolean = { + def isMemoryDeclaration(name: String, attributeName: String, attributeNamePrefix: String): Boolean = name match { case `attributeName` => true case prefixed if prefixed.startsWith(attributeNamePrefix) => @@ -105,12 +107,10 @@ object MemoryDeclarationValidation { } case _ => false } - } - private def memoryUnitSuffix(name: String, attributeName: String, attributeNamePrefix: String) = { + private def memoryUnitSuffix(name: String, attributeName: String, attributeNamePrefix: String) = if (name == attributeName) MemoryUnit.Bytes.suffixes.head else name.substring(attributeNamePrefix.length) - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala index 199c08787a0..9648d7ea411 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala @@ -8,9 +8,8 @@ trait BackgroundAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecution lazy val backgroundScript = jobPaths.script.plusExt("background") - override def writeScriptContents(): Either[ExecutionHandle, Unit] = { + override def writeScriptContents(): Either[ExecutionHandle, Unit] = super.writeScriptContents().flatMap(_ => writeBackgroundScriptContents()) - } /** * Run the command via bash in the background, and echo the PID. @@ -18,11 +17,10 @@ trait BackgroundAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecution private def writeBackgroundScriptContents(): Either[ExecutionHandle, Unit] = { val backgroundCommand = redirectOutputs(processArgs.argv.mkString("'", "' '", "'")) // $! contains the previous background command's process id (PID) - backgroundScript.write( - s"""|#!/bin/bash - |BACKGROUND_COMMAND & - |echo $$! - |""".stripMargin.replace("BACKGROUND_COMMAND", backgroundCommand)) + backgroundScript.write(s"""|#!/bin/bash + |BACKGROUND_COMMAND & + |echo $$! + |""".stripMargin.replace("BACKGROUND_COMMAND", backgroundCommand)) Right(()) } @@ -38,9 +36,8 @@ trait BackgroundAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecution StandardAsyncJob(pid) } - override def checkAliveArgs(job: StandardAsyncJob): SharedFileSystemCommand = { + override def checkAliveArgs(job: StandardAsyncJob): SharedFileSystemCommand = SharedFileSystemCommand("ps", job.jobId) - } override def killArgs(job: StandardAsyncJob): SharedFileSystemCommand = { val killScript = jobPaths.script.plusExt("kill") @@ -52,21 +49,20 @@ trait BackgroundAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecution /* Use pgrep to find the children of a process, and recursively kill the children before killing the parent. */ - killScript.write( - s"""|#!/bin/bash - |kill_tree() { - | local pid - | local cpid - | pid=$$1 - | for cpid in $$(pgrep -P "$$pid"); do - | kill_tree "$$cpid" - | done - | echo killing "$$pid" - | kill "$$pid" - |} - | - |kill_tree "${job.jobId}" - |""".stripMargin) + killScript.write(s"""|#!/bin/bash + |kill_tree() { + | local pid + | local cpid + | pid=$$1 + | for cpid in $$(pgrep -P "$$pid"); do + | kill_tree "$$cpid" + | done + | echo killing "$$pid" + | kill "$$pid" + |} + | + |kill_tree "${job.jobId}" + |""".stripMargin) () } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala index eaf087f50c7..f1ab13d9234 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala @@ -33,8 +33,8 @@ object SharedFileSystem extends StrictLogging { object AttemptedLookupResult { implicit class AugmentedAttemptedLookupSequence(s: Seq[AttemptedLookupResult]) { - def toLookupMap: Map[String, WomValue] = s collect { - case AttemptedLookupResult(name, Success(value)) => (name, value) + def toLookupMap: Map[String, WomValue] = s collect { case AttemptedLookupResult(name, Success(value)) => + (name, value) } toMap } } @@ -42,10 +42,9 @@ object SharedFileSystem extends StrictLogging { case class PairOfFiles(src: Path, dst: Path) type DuplicationStrategy = (Path, Path, Boolean) => Try[Unit] - private def createParentDirectory(executionPath: Path, docker: Boolean) = { + private def createParentDirectory(executionPath: Path, docker: Boolean) = if (docker) executionPath.parent.createPermissionedDirectories() else executionPath.parent.createDirectories() - } /** * Return a `Success` result if the file has already been localized, otherwise `Failure`. @@ -71,24 +70,29 @@ object SharedFileSystem extends StrictLogging { logOnFailure(action, "hard link") } - private def localizePathViaSymbolicLink(originalPath: Path, executionPath: Path, docker: Boolean): Try[Unit] = { - if (originalPath.isDirectory) Failure(new UnsupportedOperationException("Cannot localize directory with symbolic links")) - else if (!originalPath.exists) Failure(new FileNotFoundException(originalPath.pathAsString)) - else { - val action = Try { - createParentDirectory(executionPath, docker) - executionPath.linkTo(originalPath, symbolic = true) - }.void - logOnFailure(action, "symbolic link") - } - } + private def localizePathViaSymbolicLink(originalPath: Path, executionPath: Path, docker: Boolean): Try[Unit] = + if (originalPath.isDirectory) + Failure(new UnsupportedOperationException("Cannot localize directory with symbolic links")) + else if (!originalPath.exists) Failure(new FileNotFoundException(originalPath.pathAsString)) + else { + val action = Try { + createParentDirectory(executionPath, docker) + executionPath.linkTo(originalPath, symbolic = true) + }.void + logOnFailure(action, "symbolic link") + } private def logOnFailure(action: Try[Unit], actionLabel: String): Try[Unit] = { if (action.isFailure) logger.warn(s"Localization via $actionLabel has failed: ${action.failed.get.getMessage}") action } - private def duplicate(description: String, source: Path, dest: Path, strategies: LazyList[DuplicationStrategy], docker: Boolean): Try[Unit] = { + private def duplicate(description: String, + source: Path, + dest: Path, + strategies: LazyList[DuplicationStrategy], + docker: Boolean + ): Try[Unit] = { val attempts: LazyList[Try[Unit]] = strategies.map(_.apply(source.followSymbolicLinks, dest, docker)) attempts.find(_.isSuccess) getOrElse { TryUtil.sequence(attempts, s"Could not $description $source -> $dest").void @@ -97,22 +101,20 @@ object SharedFileSystem extends StrictLogging { private lazy val beingCopied: mutable.Map[Path, Boolean] = mutable.Map[Path, Boolean]() - private def waitOnCopy(path: Path, lockFile: Path): Unit = { - while (beingCopied.getOrElse(path, false) || lockFile.exists) { + private def waitOnCopy(path: Path, lockFile: Path): Unit = + while (beingCopied.getOrElse(path, false) || lockFile.exists) Thread.sleep(1) - } - } - private def countLinks(path: Path): Int = { + private def countLinks(path: Path): Int = path.getAttribute("unix:nlink").asInstanceOf[Int] - } } trait SharedFileSystem extends PathFactory { import SharedFileSystem._ def sharedFileSystemConfig: Config - lazy val maxHardLinks: Int = sharedFileSystemConfig.getOrElse[Int]("max-hardlinks",950) // Windows limit 1024. Keep a safe margin. + lazy val maxHardLinks: Int = + sharedFileSystemConfig.getOrElse[Int]("max-hardlinks", 950) // Windows limit 1024. Keep a safe margin. lazy val cachedCopyDir: Option[Path] = None private def localizePathViaCachedCopy(originalPath: Path, executionPath: Path, docker: Boolean): Try[Unit] = { @@ -121,7 +123,8 @@ trait SharedFileSystem extends PathFactory { // Hash the parent. This will make sure bamfiles and their indexes stay in the same dir. This is not ideal. But should work. // There should be no file collisions because two files with the same name cannot exist in the same parent dir. // use .get . This strategy should not be used when there is no cachedCopyDir - val cachedCopySubDir: Path = cachedCopyDir.get.createChild(originalPath.toAbsolutePath.parent.hashCode.toString, asDirectory = true) + val cachedCopySubDir: Path = + cachedCopyDir.get.createChild(originalPath.toAbsolutePath.parent.hashCode.toString, asDirectory = true) // By prepending the modtime we prevent collisions in the cache from files that have changed in between. // Md5 is safer but much much slower and way too CPU intensive for big files. @@ -142,9 +145,11 @@ trait SharedFileSystem extends PathFactory { // The copying may have been started while waiting on the lock. // If it is not there or the maxHardLinks are exceeded, is it already being copied by another thread? // if not copied by another thread, is it copied by another cromwell process? (Lock file present) - if ((!cachedCopyPath.exists || countLinks(cachedCopyPath) >= maxHardLinks) && + if ( + (!cachedCopyPath.exists || countLinks(cachedCopyPath) >= maxHardLinks) && !SharedFileSystem.beingCopied.getOrElse(cachedCopyPath, false) && - !cachedCopyPathLockFile.exists) { + !cachedCopyPathLockFile.exists + ) { // Create a lock file so other cromwell processes know copying has started try { cachedCopyPathLockFile.touch() @@ -174,8 +179,7 @@ trait SharedFileSystem extends PathFactory { originalPath.copyTo(cachedCopyTmpPath, overwrite = true).moveTo(cachedCopyPath, overwrite = true) } catch { case e: Exception => throw e - } - finally { + } finally { // Always remove the locks after copying. Even if there is an exception. // We remove the key! Not set it to false. We don't want this map being flooded with // keys if the cromwell process is active for months in server mode. (Memory leak!) @@ -197,19 +201,20 @@ trait SharedFileSystem extends PathFactory { lazy val DefaultStrategies = Seq("hard-link", "soft-link", "copy") lazy val LocalizationStrategyNames: Seq[String] = getConfigStrategies("localization") - lazy val LocalizationStrategies: Seq[DuplicationStrategy] = createStrategies(LocalizationStrategyNames, docker = false) - lazy val DockerLocalizationStrategies: Seq[DuplicationStrategy] = createStrategies(LocalizationStrategyNames, docker = true) + lazy val LocalizationStrategies: Seq[DuplicationStrategy] = + createStrategies(LocalizationStrategyNames, docker = false) + lazy val DockerLocalizationStrategies: Seq[DuplicationStrategy] = + createStrategies(LocalizationStrategyNames, docker = true) lazy val CachingStrategies: Seq[String] = getConfigStrategies("caching.duplication-strategy") lazy val Cachers: Seq[DuplicationStrategy] = createStrategies(CachingStrategies, docker = false) - private def getConfigStrategies(configPath: String): Seq[String] = { + private def getConfigStrategies(configPath: String): Seq[String] = if (sharedFileSystemConfig.hasPath(configPath)) { sharedFileSystemConfig.getStringList(configPath).asScala.toList } else { DefaultStrategies } - } private def createStrategies(configStrategies: Seq[String], docker: Boolean): Seq[DuplicationStrategy] = { // If localizing for a docker job, remove soft-link as an option @@ -238,42 +243,41 @@ trait SharedFileSystem extends PathFactory { val path = PathFactory.buildPath(pathString, pathBuilders) path match { case _: DefaultPath if !path.isAbsolute => jobPaths.callExecutionRoot.resolve(path).toAbsolutePath - case _: DefaultPath if jobPaths.isInExecution(path.pathAsString) => jobPaths.hostPathFromContainerPath(path.pathAsString) + case _: DefaultPath if jobPaths.isInExecution(path.pathAsString) => + jobPaths.hostPathFromContainerPath(path.pathAsString) case _: DefaultPath => jobPaths.hostPathFromContainerInputs(path.pathAsString) } } - def outputMapper(job: JobPaths)(womValue: WomValue): Try[WomValue] = { + def outputMapper(job: JobPaths)(womValue: WomValue): Try[WomValue] = WomFileMapper.mapWomFiles(mapJobWomFile(job))(womValue) - } def mapJobWomFile(jobPaths: JobPaths)(womFile: WomFile): WomFile = { val hostPath = hostAbsoluteFilePath(jobPaths, womFile.valueString) def hostAbsolute(pathString: String): String = hostAbsoluteFilePath(jobPaths, pathString).pathAsString - if (!hostPath.exists) throw new FileNotFoundException(s"Could not process output, file not found: ${hostAbsolute(womFile.valueString)}") + if (!hostPath.exists) + throw new FileNotFoundException(s"Could not process output, file not found: ${hostAbsolute(womFile.valueString)}") // There are composite WomFile types like WomMaybeListedDirectoryType that need to make the paths of contained // WomFiles host absolute, so don't just pass in a `const` of the function call result above. womFile mapFile hostAbsolute } - def cacheCopy(sourceFilePath: Path, destinationFilePath: Path): Try[Unit] = { + def cacheCopy(sourceFilePath: Path, destinationFilePath: Path): Try[Unit] = duplicate("cache", sourceFilePath, destinationFilePath, Cachers.to(LazyList), docker = false) - } /** * Return a possibly altered copy of inputs reflecting any localization of input file paths that might have * been performed for this `Backend` implementation. */ - def localizeInputs(inputsRoot: Path, docker: Boolean)(inputs: WomEvaluatedCallInputs): Try[WomEvaluatedCallInputs] = { + def localizeInputs(inputsRoot: Path, docker: Boolean)(inputs: WomEvaluatedCallInputs): Try[WomEvaluatedCallInputs] = TryUtil.sequenceMap( inputs safeMapValues WomFileMapper.mapWomFiles(localizeWomFile(inputsRoot, docker)), "Failures during localization" - ) recoverWith { - case e => Failure(new IOException(e.getMessage) with CromwellFatalExceptionMarker) + ) recoverWith { case e => + Failure(new IOException(e.getMessage) with CromwellFatalExceptionMarker) } - } def localizeWomFile(inputsRoot: Path, docker: Boolean)(value: WomFile): WomFile = { val strategies = if (docker) DockerLocalizationStrategies else LocalizationStrategies @@ -282,23 +286,27 @@ trait SharedFileSystem extends PathFactory { def stripProtocolScheme(path: Path): Path = DefaultPathBuilder.get(path.pathWithoutScheme) /* - * Transform an original input path to a path in the call directory. - * The new path matches the original path, it only "moves" the root to be the call directory. - */ + * Transform an original input path to a path in the call directory. + * The new path matches the original path, it only "moves" the root to be the call directory. + */ def toCallPath(womFile: WomFile)(path: String): Try[PairOfFiles] = Try { val src = buildPath(path) // Strip out potential prefix protocol val localInputPath = stripProtocolScheme(src) - val dest = if (inputsRoot.isParentOf(localInputPath)) localInputPath - else { - val nameOverride = womFile match { - case directory: WomMaybeListedDirectory => directory.basename - case _ => None + val dest = + if (inputsRoot.isParentOf(localInputPath)) localInputPath + else { + val nameOverride = womFile match { + case directory: WomMaybeListedDirectory => directory.basename + case _ => None + } + // Concatenate call directory with absolute input path + DefaultPathBuilder.get(inputsRoot.pathAsString, + localInputPath.parent.pathAsString.hashCode.toString, + nameOverride.getOrElse(localInputPath.name) + ) } - // Concatenate call directory with absolute input path - DefaultPathBuilder.get(inputsRoot.pathAsString, localInputPath.parent.pathAsString.hashCode.toString, nameOverride.getOrElse(localInputPath.name)) - } PairOfFiles(src, dest) } @@ -306,12 +314,12 @@ trait SharedFileSystem extends PathFactory { // A possibly staged version of the input file suitable for downstream processing, or just the original input // file if no staging was required. val staged: WomFile = value.mapFile { input => - pathBuilders.collectFirst({ case h: HttpPathBuilder if HttpPathBuilder.accepts(input) => h }) match { + pathBuilders.collectFirst { case h: HttpPathBuilder if HttpPathBuilder.accepts(input) => h } match { case Some(httpPathBuilder) => implicit val materializer = ActorMaterializer() implicit val ec: ExecutionContext = actorContext.dispatcher - Await.result(httpPathBuilder.content(input).map { _.toString }, Duration.Inf) + Await.result(httpPathBuilder.content(input).map(_.toString), Duration.Inf) case _ => input } } @@ -328,16 +336,18 @@ trait SharedFileSystem extends PathFactory { * @param womFile WomFile to localize * @return localized WomFile */ - private def localizeWomFile(toDestPath: WomFile => String => Try[PairOfFiles], strategies: LazyList[DuplicationStrategy], docker: Boolean) - (womFile: WomFile): WomFile = { + private def localizeWomFile(toDestPath: WomFile => String => Try[PairOfFiles], + strategies: LazyList[DuplicationStrategy], + docker: Boolean + )(womFile: WomFile): WomFile = { val localized = womFile mapWomFile { file => - val result = toDestPath(file)(file.value) flatMap { - case PairOfFiles(src, dst) => duplicate("localize", src, dst, strategies, docker).map(_ => dst.pathAsString) + val result = toDestPath(file)(file.value) flatMap { case PairOfFiles(src, dst) => + duplicate("localize", src, dst, strategies, docker).map(_ => dst.pathAsString) } result.get } val sized = localized collect { - case womMaybePopulatedFile@WomMaybePopulatedFile(Some(path), _, None, _, _, _, _) => + case womMaybePopulatedFile @ WomMaybePopulatedFile(Some(path), _, None, _, _, _, _) => val pair = toDestPath(womMaybePopulatedFile)(path).get val srcSize = pair.src.size womMaybePopulatedFile.copy(sizeOption = Option(srcSize)) diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemAsyncJobExecutionActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemAsyncJobExecutionActor.scala index 1d55ead7860..24eb4d97a70 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemAsyncJobExecutionActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemAsyncJobExecutionActor.scala @@ -76,7 +76,9 @@ object SharedFileSystemAsyncJobExecutionActor { * messages. */ trait SharedFileSystemAsyncJobExecutionActor - extends BackendJobLifecycleActor with StandardAsyncExecutionActor with SharedFileSystemJobCachingActorHelper { + extends BackendJobLifecycleActor + with StandardAsyncExecutionActor + with SharedFileSystemJobCachingActorHelper { override type StandardAsyncRunInfo = Any @@ -85,7 +87,8 @@ trait SharedFileSystemAsyncJobExecutionActor /** True if the status contained in `thiz` is equivalent to `that`, delta any other data that might be carried around * in the state type. */ - override def statusEquivalentTo(thiz: StandardAsyncRunState)(that: StandardAsyncRunState): Boolean = thiz.status == that.status + override def statusEquivalentTo(thiz: StandardAsyncRunState)(that: StandardAsyncRunState): Boolean = + thiz.status == that.status override lazy val pollBackOff = SimpleExponentialBackoff(1.second, 5.minutes, 1.1) @@ -129,28 +132,26 @@ trait SharedFileSystemAsyncJobExecutionActor lazy val jobPathsWithDocker: JobPathsWithDocker = jobPaths.asInstanceOf[JobPathsWithDocker] - def jobName: String = s"cromwell_${jobDescriptor.workflowDescriptor.id.shortString}_${jobDescriptor.taskCall.localName}" + def jobName: String = + s"cromwell_${jobDescriptor.workflowDescriptor.id.shortString}_${jobDescriptor.taskCall.localName}" /** * Localizes the file, run outside of docker. */ - override def preProcessWomFile(womFile: WomFile): WomFile = { + override def preProcessWomFile(womFile: WomFile): WomFile = sharedFileSystem.localizeWomFile(jobPathsWithDocker.callInputsRoot, isDockerRun)(womFile) - } /** * Returns the paths to the file, inside of docker. */ - override def mapCommandLineWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineWomFile(womFile: WomFile): WomFile = womFile mapFile { path => val cleanPath = DefaultPathBuilder.build(path).get if (isDockerRun) jobPathsWithDocker.toDockerPath(cleanPath).pathAsString else cleanPath.pathAsString } - } - override lazy val commandDirectory: Path = { + override lazy val commandDirectory: Path = if (isDockerRun) jobPathsWithDocker.callExecutionDockerRoot else jobPaths.callExecutionRoot - } override def execute(): ExecutionHandle = { if (isDockerRun) jobPaths.callExecutionRoot.createPermissionedDirectories() @@ -161,22 +162,34 @@ trait SharedFileSystemAsyncJobExecutionActor val runner = makeProcessRunner() val exitValue = runner.run() if (exitValue != 0) { - FailedNonRetryableExecutionHandle(new RuntimeException("Unable to start job. " + - s"Check the stderr file for possible errors: ${runner.stderrPath}"), kvPairsToSave = None) + FailedNonRetryableExecutionHandle(new RuntimeException( + "Unable to start job. " + + s"Check the stderr file for possible errors: ${runner.stderrPath}" + ), + kvPairsToSave = None + ) } else { val runningJob = getJob(exitValue, runner.stdoutPath, runner.stderrPath) PendingExecutionHandle(jobDescriptor, runningJob, None, None) } - } + } ) } def writeScriptContents(): Either[ExecutionHandle, Unit] = commandScriptContents.fold( - errors => Left(FailedNonRetryableExecutionHandle(new RuntimeException("Unable to start job due to: " + errors.toList.mkString(", ")), kvPairsToSave = None)), - {script => jobPaths.script.write(script); Right(())} ) + errors => + Left( + FailedNonRetryableExecutionHandle( + new RuntimeException("Unable to start job due to: " + errors.toList.mkString(", ")), + kvPairsToSave = None + ) + ), + { script => jobPaths.script.write(script); Right(()) } + ) lazy val standardPaths = jobPaths.standardPaths + /** * Creates a script to submit the script for asynchronous processing. The default implementation assumes the * processArgs already runs the script asynchronously. If not, mix in the `BackgroundAsyncJobExecutionActor` that @@ -192,15 +205,13 @@ trait SharedFileSystemAsyncJobExecutionActor override def recover(job: StandardAsyncJob): ExecutionHandle = reconnectToExistingJob(job) - override def reconnectAsync(job: StandardAsyncJob): Future[ExecutionHandle] = { + override def reconnectAsync(job: StandardAsyncJob): Future[ExecutionHandle] = Future.successful(reconnectToExistingJob(job)) - } - override def reconnectToAbortAsync(job: StandardAsyncJob): Future[ExecutionHandle] = { + override def reconnectToAbortAsync(job: StandardAsyncJob): Future[ExecutionHandle] = Future.successful(reconnectToExistingJob(job, forceAbort = true)) - } - private def reconnectToExistingJob(job: StandardAsyncJob, forceAbort: Boolean = false): ExecutionHandle = { + private def reconnectToExistingJob(job: StandardAsyncJob, forceAbort: Boolean = false): ExecutionHandle = // To avoid race conditions, check for the rc file after checking if the job is alive. isAlive(job) match { case Success(true) => @@ -215,12 +226,16 @@ trait SharedFileSystemAsyncJobExecutionActor PendingExecutionHandle(jobDescriptor, job, None, None) } else { // Could start executeScript(), but for now fail because we shouldn't be in this state. - FailedNonRetryableExecutionHandle(new RuntimeException( - s"Unable to determine that ${job.jobId} is alive, and ${jobPaths.returnCode} does not exist."), None, kvPairsToSave = None) + FailedNonRetryableExecutionHandle( + new RuntimeException( + s"Unable to determine that ${job.jobId} is alive, and ${jobPaths.returnCode} does not exist." + ), + None, + kvPairsToSave = None + ) } case Failure(f) => FailedNonRetryableExecutionHandle(f, None, kvPairsToSave = None) } - } def isAlive(job: StandardAsyncJob): Try[Boolean] = Try { val argv = checkAliveArgs(job).argv @@ -235,18 +250,18 @@ trait SharedFileSystemAsyncJobExecutionActor override def tryAbort(job: StandardAsyncJob): Unit = { val returnCodeTmp = jobPaths.returnCode.plusExt("kill") returnCodeTmp.write(s"$SIGTERM\n") - try { + try returnCodeTmp.moveTo(jobPaths.returnCode) - } catch { + catch { case _: FileAlreadyExistsException => // If the process has already completed, there will be an existing rc file. returnCodeTmp.delete(true) } val stderrTmp = standardPaths.error.plusExt("kill") stderrTmp.touch() - try { + try stderrTmp.moveTo(standardPaths.error) - } catch { + catch { case _: FileAlreadyExistsException => // If the process has already started, there will be an existing stderr file. stderrTmp.delete(true) @@ -259,16 +274,13 @@ trait SharedFileSystemAsyncJobExecutionActor () } - override def pollStatus(handle: StandardAsyncPendingExecutionHandle): SharedFileSystemRunState = { + override def pollStatus(handle: StandardAsyncPendingExecutionHandle): SharedFileSystemRunState = if (jobPaths.returnCode.exists) SharedFileSystemJobDone else SharedFileSystemJobWaitingForReturnCode(waitUntil = None) - } - override def isTerminal(runStatus: StandardAsyncRunState): Boolean = { + override def isTerminal(runStatus: StandardAsyncRunState): Boolean = runStatus.terminal - } - override def mapOutputWomFile(womFile: WomFile): WomFile = { + override def mapOutputWomFile(womFile: WomFile): WomFile = sharedFileSystem.mapJobWomFile(jobPaths)(womFile) - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemBackendLifecycleActorFactory.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemBackendLifecycleActorFactory.scala index 7e991a6ec5f..bf9f10a4da6 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemBackendLifecycleActorFactory.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemBackendLifecycleActorFactory.scala @@ -12,7 +12,6 @@ trait SharedFileSystemBackendLifecycleActorFactory extends StandardLifecycleActo override def jobIdKey: String = SharedFileSystemAsyncJobExecutionActor.JobIdKey - override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = { + override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = Option(classOf[SharedFileSystemCacheHitCopyingActor]) - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemCacheHitCopyingActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemCacheHitCopyingActor.scala index 042cc8759cd..2ad334002f3 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemCacheHitCopyingActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemCacheHitCopyingActor.scala @@ -9,24 +9,23 @@ import cromwell.backend.standard.callcaching.{StandardCacheHitCopyingActor, Stan import scala.util.{Failure, Try} class SharedFileSystemCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) - extends StandardCacheHitCopyingActor(standardParams) with SharedFileSystemJobCachingActorHelper { + extends StandardCacheHitCopyingActor(standardParams) + with SharedFileSystemJobCachingActorHelper { override protected def duplicate(copyPairs: Set[PathPair]): Option[Try[Unit]] = Option { - val copies = copyPairs map { - case (source, destination) => - sharedFileSystem.cacheCopy(source, destination) + val copies = copyPairs map { case (source, destination) => + sharedFileSystem.cacheCopy(source, destination) } - TryUtil.sequence(copies.toList).void recoverWith { - case failure => - // If one or more of the copies failed, we want to delete all the files that were successfully copied - // before that. Especially if they've been symlinked, leaving them could lead to rewriting the original - // files when the job gets re-run - // TODO: this could be done more generally in the StandardCacheHitCopyingActor - copyPairs foreach { - case (_, dst) => dst.delete(swallowIOExceptions = true) - } - Failure(failure) + TryUtil.sequence(copies.toList).void recoverWith { case failure => + // If one or more of the copies failed, we want to delete all the files that were successfully copied + // before that. Especially if they've been symlinked, leaving them could lead to rewriting the original + // files when the job gets re-run + // TODO: this could be done more generally in the StandardCacheHitCopyingActor + copyPairs foreach { case (_, dst) => + dst.delete(swallowIOExceptions = true) + } + Failure(failure) } } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala index 303e8d621e0..3b3bc978878 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala @@ -2,7 +2,11 @@ package cromwell.backend.sfs import akka.actor.ActorRef import cromwell.backend.io._ -import cromwell.backend.standard.{DefaultStandardExpressionFunctionsParams, StandardExpressionFunctions, StandardExpressionFunctionsParams} +import cromwell.backend.standard.{ + DefaultStandardExpressionFunctionsParams, + StandardExpressionFunctions, + StandardExpressionFunctionsParams +} import cromwell.core.CallContext import cromwell.core.path.{DefaultPath, DefaultPathBuilder, Path, PathBuilder} import wom.expression.IoFunctionSet @@ -13,43 +17,39 @@ object SharedFileSystemExpressionFunctions { def apply(jobPaths: JobPaths, pathBuilders: List[PathBuilder], ioActorProxy: ActorRef, - ec: ExecutionContext): SharedFileSystemExpressionFunctions = { + ec: ExecutionContext + ): SharedFileSystemExpressionFunctions = new SharedFileSystemExpressionFunctions(pathBuilders, jobPaths.callContext, ioActorProxy, ec) - } } class SharedFileSystemExpressionFunctions(standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { + extends StandardExpressionFunctions(standardParams) { - def this(pathBuilders: List[PathBuilder], - callContext: CallContext, - ioActorProxy: ActorRef, - ec: ExecutionContext) = { + def this(pathBuilders: List[PathBuilder], callContext: CallContext, ioActorProxy: ActorRef, ec: ExecutionContext) = this(DefaultStandardExpressionFunctionsParams(pathBuilders, callContext, ioActorProxy, ec)) - } - override def makeInputSpecificFunctions(): IoFunctionSet = new SharedFileSystemExpressionFunctionsForInput(standardParams) + override def makeInputSpecificFunctions(): IoFunctionSet = new SharedFileSystemExpressionFunctionsForInput( + standardParams + ) - override def postMapping(path: Path) = { + override def postMapping(path: Path) = path match { case _: DefaultPath if !path.isAbsolute => callContext.root.resolve(path) case _ => path } - } } class SharedFileSystemExpressionFunctionsForInput(standardParams: StandardExpressionFunctionsParams) - extends SharedFileSystemExpressionFunctions(standardParams) { + extends SharedFileSystemExpressionFunctions(standardParams) { // override needed to prevent class self-reference override def makeInputSpecificFunctions(): IoFunctionSet = this lazy val cromwellCwd: Path = DefaultPathBuilder.build(sys.props("user.dir")).get - override def postMapping(path: Path) = { + override def postMapping(path: Path) = path match { case _: DefaultPath if !path.isAbsolute => cromwellCwd.resolve(path) case _ => path } - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemInitializationActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemInitializationActor.scala index 74392610465..75b5a09a8a3 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemInitializationActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemInitializationActor.scala @@ -2,17 +2,20 @@ package cromwell.backend.sfs import cromwell.backend.BackendInitializationData import cromwell.backend.io.WorkflowPaths -import cromwell.backend.standard.{StandardExpressionFunctions, StandardInitializationActor, StandardInitializationActorParams} +import cromwell.backend.standard.{ + StandardExpressionFunctions, + StandardInitializationActor, + StandardInitializationActorParams +} import cromwell.backend.wfs.WorkflowPathBuilder import scala.concurrent.Future class SharedFileSystemInitializationActor(standardParams: StandardInitializationActorParams) - extends StandardInitializationActor(standardParams) { + extends StandardInitializationActor(standardParams) { - override lazy val pathBuilders = { + override lazy val pathBuilders = standardParams.configurationDescriptor.pathBuildersWithDefault(workflowDescriptor.workflowOptions) - } override lazy val workflowPaths: Future[WorkflowPaths] = pathBuilders map { WorkflowPathBuilder.workflowPaths(configurationDescriptor, workflowDescriptor, _) @@ -21,11 +24,10 @@ class SharedFileSystemInitializationActor(standardParams: StandardInitialization override lazy val expressionFunctions: Class[_ <: StandardExpressionFunctions] = classOf[SharedFileSystemExpressionFunctions] - override def beforeAll(): Future[Option[BackendInitializationData]] = { + override def beforeAll(): Future[Option[BackendInitializationData]] = initializationData map { data => publishWorkflowRoot(data.workflowPaths.workflowRoot.pathAsString) data.workflowPaths.workflowRoot.createDirectories() Option(data) } - } } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemJobCachingActorHelper.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemJobCachingActorHelper.scala index 2897d8a1ac1..699c9ab534d 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemJobCachingActorHelper.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemJobCachingActorHelper.scala @@ -12,18 +12,18 @@ trait SharedFileSystemJobCachingActorHelper extends StandardCachingActorHelper { lazy val sharedFileSystem = new SharedFileSystem { - override implicit def actorContext: ActorContext = context + implicit override def actorContext: ActorContext = context override lazy val pathBuilders: List[PathBuilder] = standardInitializationData.workflowPaths.pathBuilders - override lazy val sharedFileSystemConfig: Config = { + override lazy val sharedFileSystemConfig: Config = configurationDescriptor.backendConfig.as[Option[Config]]("filesystems.local").getOrElse(ConfigFactory.empty()) - } // cachedCopyDir should be on the same physical filesystem as the execution root. // WDL workflow names may not contain '-' so using 'cached-inputs' will certainly // not collide with any workflows in the root directory. override lazy val cachedCopyDir: Option[Path] = Option( - workflowPaths.executionRoot.createChild("cached-inputs", asDirectory = true)) + workflowPaths.executionRoot.createChild("cached-inputs", asDirectory = true) + ) } } diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActorSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActorSpec.scala index f20267053e9..fce1e05a60b 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActorSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActorSpec.scala @@ -4,18 +4,18 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class ConfigAsyncJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "getJobRegex" val successfulTestCases = Map( ("Job <03957>... blah blah blah", "Job <(\\d+)>.*") -> "03957", - ("""mxq_group_id=... - |mxq_group_name=... - |mxq_job_id=16988030 - |""".stripMargin, "mxq_job_id=(\\d+)") -> "16988030" + |mxq_group_name=... + |mxq_job_id=16988030 + |""".stripMargin, + "mxq_job_id=(\\d+)" + ) -> "16988030" ) successfulTestCases foreach { case ((fileContent, jobIdRegex), expectedJobId) => diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala index 0cc6267374e..2814eb821a1 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala @@ -19,8 +19,13 @@ import wom.values.WomSingleFile import scala.util.Success -class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers - with TableDrivenPropertyChecks with MockSugar with BeforeAndAfterAll { +class ConfigHashingStrategySpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks + with MockSugar + with BeforeAndAfterAll { behavior of "ConfigHashingStrategy" @@ -60,7 +65,9 @@ class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec wit private def makeStrategy(strategy: String, checkSibling: Option[Boolean] = None) = { val conf = ConfigFactory.parseString(s"""hashing-strategy: "$strategy"""") ConfigHashingStrategy( - checkSibling map { check => conf.withValue("check-sibling-md5", ConfigValueFactory.fromAnyRef(check)) } getOrElse conf + checkSibling map { check => + conf.withValue("check-sibling-md5", ConfigValueFactory.fromAnyRef(check)) + } getOrElse conf ) } @@ -233,15 +240,14 @@ class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "create a fingerprint strategy from config" in { - val defaultFingerprint: FingerprintStrategy = makeStrategy("fingerprint").asInstanceOf[FingerprintStrategy] + val defaultFingerprint: FingerprintStrategy = makeStrategy("fingerprint").asInstanceOf[FingerprintStrategy] defaultFingerprint.isInstanceOf[FingerprintStrategy] shouldBe true defaultFingerprint.checkSiblingMd5 shouldBe false defaultFingerprint.fingerprintSize shouldBe 10 * 1024 * 1024 - val config = ConfigFactory.parseString( - """|hashing-strategy: "fingerprint" - |fingerprint-size: 123456789 - |""".stripMargin) + val config = ConfigFactory.parseString("""|hashing-strategy: "fingerprint" + |fingerprint-size: 123456789 + |""".stripMargin) val otherFingerprint: FingerprintStrategy = ConfigHashingStrategy.apply(config).asInstanceOf[FingerprintStrategy] otherFingerprint.fingerprintSize shouldBe 123456789 otherFingerprint.isInstanceOf[FingerprintStrategy] shouldBe true @@ -261,8 +267,10 @@ class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "have a fingerprint strategy and use md5 sibling file when appropriate" in { - val fingerPrintHash = HashFileXxH64StrategyMethods.xxh64sumString(file.lastModifiedTime.toEpochMilli.toHexString + - file.size.toHexString) + steakXxh64 + val fingerPrintHash = HashFileXxH64StrategyMethods.xxh64sumString( + file.lastModifiedTime.toEpochMilli.toHexString + + file.size.toHexString + ) + steakXxh64 val table = Table( ("check", "withMd5", "expected"), (true, true, md5FileHash), diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActorSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActorSpec.scala index fe2f320c562..085ac5cd974 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActorSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigInitializationActorSpec.scala @@ -11,34 +11,38 @@ class ConfigInitializationActorSpec extends AnyFlatSpec with CromwellTimeoutSpec val tripleQuote = "\"\"\"" val singularityBackendConfigString = s"""run-in-background = true - | runtime-attributes = ${tripleQuote} - | String singularity - | Int uncacheworthy1 - | Int uncacheworthy2 - | ${tripleQuote} - | runtime-attributes-for-caching = { - | "singularity": true - | "uncacheworthy1": false - | # NB: no specific entry for uncacheworthy2 - the default should already be 'false'. - | } - | submit-docker = ${tripleQuote} - | singularity exec --bind $${cwd}:$${docker_cwd} docker://$${singularity} $${job_shell} $${script} - | ${tripleQuote} - |""".stripMargin + | runtime-attributes = ${tripleQuote} + | String singularity + | Int uncacheworthy1 + | Int uncacheworthy2 + | ${tripleQuote} + | runtime-attributes-for-caching = { + | "singularity": true + | "uncacheworthy1": false + | # NB: no specific entry for uncacheworthy2 - the default should already be 'false'. + | } + | submit-docker = ${tripleQuote} + | singularity exec --bind $${cwd}:$${docker_cwd} docker://$${singularity} $${job_shell} $${script} + | ${tripleQuote} + |""".stripMargin val singularityBackendConfig = ConfigFactory.parseString(singularityBackendConfigString) // Mirroring how the declarations are made in ConfigInitializationActor: lazy val configWdlNamespace = new ConfigWdlNamespace(singularityBackendConfig) - lazy val declarationValidations: Seq[DeclarationValidation] = { - DeclarationValidation.fromDeclarations(configWdlNamespace.runtimeDeclarations, configWdlNamespace.callCachedRuntimeAttributes) - } + lazy val declarationValidations: Seq[DeclarationValidation] = + DeclarationValidation.fromDeclarations(configWdlNamespace.runtimeDeclarations, + configWdlNamespace.callCachedRuntimeAttributes + ) declarationValidations.exists(p => p.key == "singularity" && p.makeValidation().usedInCallCaching) should be(true) - declarationValidations.exists(p => p.key == "uncacheworthy1" && p.makeValidation().usedInCallCaching) should be(false) - declarationValidations.exists(p => p.key == "uncacheworthy2" && p.makeValidation().usedInCallCaching) should be(false) + declarationValidations.exists(p => p.key == "uncacheworthy1" && p.makeValidation().usedInCallCaching) should be( + false + ) + declarationValidations.exists(p => p.key == "uncacheworthy2" && p.makeValidation().usedInCallCaching) should be( + false + ) } } - diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidationSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidationSpec.scala index bd00982a49a..48722a0dc66 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidationSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/MemoryDeclarationValidationSpec.scala @@ -11,7 +11,11 @@ import wdl4s.parser.MemoryUnit import wom.format.MemorySize import wom.values.{WomFloat, WomLong} -class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class MemoryDeclarationValidationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { behavior of "MemoryDeclarationValidation" val validDeclaredAmounts = Table( @@ -24,7 +28,7 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp ("Int? memory_gb", Option(2), None, Option(WomLong(2))), ("Int? memory_gb = 3", None, Option(3), None), ("Int? memory_gb = 3", Option(2), Option(3), Option(WomLong(2))), - ("Float memory", Option(2), None, Option(WomFloat(2F * 1024F * 1024F * 1024F))), + ("Float memory", Option(2), None, Option(WomFloat(2f * 1024f * 1024f * 1024f))), ("Float memory_gb", Option(2), None, Option(WomFloat(2))), ("Float memory_gb = 3.0", None, Option(3), None), ("Float memory_gb = 3.0", Option(2), Option(3), Option(WomFloat(2))), @@ -44,7 +48,7 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp ("Int? memoryMin_gb", Option(2), None, Option(WomLong(2))), ("Int? memoryMin_gb = 3", None, Option(3), None), ("Int? memoryMin_gb = 3", Option(2), Option(3), Option(WomLong(2))), - ("Float memoryMin", Option(2), None, Option(WomFloat(2F * 1024F * 1024F * 1024F))), + ("Float memoryMin", Option(2), None, Option(WomFloat(2f * 1024f * 1024f * 1024f))), ("Float memoryMin_gb", Option(2), None, Option(WomFloat(2))), ("Float memoryMin_gb = 3.0", None, Option(3), None), ("Float memoryMin_gb = 3.0", Option(2), Option(3), Option(WomFloat(2))), @@ -60,7 +64,7 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp ("Int? memoryMax_gb", Option(2), None, Option(WomLong(2))), ("Int? memoryMax_gb = 3", None, Option(3), None), ("Int? memoryMax_gb = 3", Option(2), Option(3), Option(WomLong(2))), - ("Float memoryMax", Option(2), None, Option(WomFloat(2F * 1024F * 1024F * 1024F))), + ("Float memoryMax", Option(2), None, Option(WomFloat(2f * 1024f * 1024f * 1024f))), ("Float memoryMax_gb", Option(2), None, Option(WomFloat(2))), ("Float memoryMax_gb = 3.0", None, Option(3), None), ("Float memoryMax_gb = 3.0", Option(2), Option(3), Option(WomFloat(2))), @@ -72,23 +76,23 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp forAll(validDeclaredAmounts) { (declaration, runtimeAmount, expectedDefaultAmount, expectedExtracted) => it should s"extract memory from declared $declaration with memory set to ${runtimeAmount.getOrElse("none")}" in { - val memoryKey = if (declaration.contains("Min")) MemoryMinRuntimeAttribute - else if (declaration.contains("Max")) MemoryMaxRuntimeAttribute - else MemoryRuntimeAttribute - - val memoryPrefix = if (declaration.contains("Min")) MemoryMinRuntimeAttributePrefix - else if (declaration.contains("Max")) MemoryMaxRuntimeAttributePrefix - else MemoryRuntimeAttributePrefix - - val config = ConfigFactory.parseString( - s"""|submit = "anything" - |${ConfigConstants.RuntimeAttributesConfig} = "$declaration" - |""".stripMargin) + val memoryKey = + if (declaration.contains("Min")) MemoryMinRuntimeAttribute + else if (declaration.contains("Max")) MemoryMaxRuntimeAttribute + else MemoryRuntimeAttribute + + val memoryPrefix = + if (declaration.contains("Min")) MemoryMinRuntimeAttributePrefix + else if (declaration.contains("Max")) MemoryMaxRuntimeAttributePrefix + else MemoryRuntimeAttributePrefix + + val config = ConfigFactory.parseString(s"""|submit = "anything" + |${ConfigConstants.RuntimeAttributesConfig} = "$declaration" + |""".stripMargin) val configWdlNamespace = new ConfigWdlNamespace(config) val runtimeDeclaration = configWdlNamespace.runtimeDeclarations.head - val memoryDeclarationValidation = new MemoryDeclarationValidation(runtimeDeclaration, - memoryKey, memoryPrefix) + val memoryDeclarationValidation = new MemoryDeclarationValidation(runtimeDeclaration, memoryKey, memoryPrefix) val attributes = runtimeAmount .map(amount => memoryKey -> MemorySize(amount.toDouble, MemoryUnit.GB)) .toMap @@ -101,7 +105,9 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp .map(amount => WomLong(MemorySize(amount.toDouble, MemoryUnit.GB).bytes.toLong)) MemoryDeclarationValidation.isMemoryDeclaration(runtimeDeclaration.unqualifiedName, - memoryKey, memoryPrefix) should be(true) + memoryKey, + memoryPrefix + ) should be(true) default should be(expectedDefault) extracted should be(expectedExtracted) } @@ -114,10 +120,9 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp forAll(badSyntaxDeclarations) { declaration => it should s"throw a syntax error for memory declaration $declaration" in { - val config = ConfigFactory.parseString( - s"""|submit = "anything" - |${ConfigConstants.RuntimeAttributesConfig} = "$declaration" - |""".stripMargin) + val config = ConfigFactory.parseString(s"""|submit = "anything" + |${ConfigConstants.RuntimeAttributesConfig} = "$declaration" + |""".stripMargin) val expectedException = intercept[RuntimeException](new ConfigWdlNamespace(config)) expectedException.getMessage should startWith("Error parsing generated wdl:\n") @@ -133,15 +138,16 @@ class MemoryDeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSp forAll(invalidDeclarations) { declaration => it should s"not identify $declaration as a memory declaration" in { - val config = ConfigFactory.parseString( - s"""|submit = "anything" - |${ConfigConstants.RuntimeAttributesConfig} = "$declaration" - |""".stripMargin) + val config = ConfigFactory.parseString(s"""|submit = "anything" + |${ConfigConstants.RuntimeAttributesConfig} = "$declaration" + |""".stripMargin) val configWdlNamespace = new ConfigWdlNamespace(config) val runtimeDeclaration = configWdlNamespace.runtimeDeclarations.head MemoryDeclarationValidation.isMemoryDeclaration(runtimeDeclaration.unqualifiedName, - MemoryRuntimeAttribute, MemoryRuntimeAttributePrefix) should be(false) + MemoryRuntimeAttribute, + MemoryRuntimeAttributePrefix + ) should be(false) } } } diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemInitializationActorSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemInitializationActorSpec.scala index cd74a247b8c..ee10f41ba48 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemInitializationActorSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemInitializationActorSpec.scala @@ -18,32 +18,38 @@ import wom.graph.CommandCallNode import scala.concurrent.duration._ -class SharedFileSystemInitializationActorSpec extends TestKitSuite - with AnyWordSpecLike with Matchers with ImplicitSender { +class SharedFileSystemInitializationActorSpec + extends TestKitSuite + with AnyWordSpecLike + with Matchers + with ImplicitSender { val Timeout: FiniteDuration = 10.second.dilated val HelloWorld: String = s""" - |task hello { - | String addressee = "you" - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - |} + |task hello { + | String addressee = "you" + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + |} """.stripMargin - private def getActorRef(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], - conf: BackendConfigurationDescriptor) = { - val params = DefaultInitializationActorParams(workflowDescriptor, emptyActor, calls, emptyActor, conf, restarting = false) + private def getActorRef(workflowDescriptor: BackendWorkflowDescriptor, + calls: Set[CommandCallNode], + conf: BackendConfigurationDescriptor + ) = { + val params = + DefaultInitializationActorParams(workflowDescriptor, emptyActor, calls, emptyActor, conf, restarting = false) val props = Props(new SharedFileSystemInitializationActor(params)) system.actorOf(props, "SharedFileSystemInitializationActor") } diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala index 848b5e56c6b..5d1b1e02c12 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala @@ -4,7 +4,12 @@ import _root_.wdl.draft2.model.LocallyQualifiedName import akka.testkit.{TestDuration, TestProbe} import com.typesafe.config.ConfigFactory import common.collections.EnhancedCollections._ -import cromwell.backend.BackendJobExecutionActor.{JobAbortedResponse, JobFailedNonRetryableResponse, JobSucceededResponse, RunOnBackend} +import cromwell.backend.BackendJobExecutionActor.{ + JobAbortedResponse, + JobFailedNonRetryableResponse, + JobSucceededResponse, + RunOnBackend +} import cromwell.backend.BackendLifecycleActor.AbortJobCommand import cromwell.backend._ import cromwell.backend.async.WrongReturnCode @@ -33,8 +38,12 @@ import java.io.FileNotFoundException import scala.concurrent.duration._ import scala.sys.process._ -class SharedFileSystemJobExecutionActorSpec extends TestKitSuite - with AnyFlatSpecLike with BackendSpec with TableDrivenPropertyChecks with OptionValues { +class SharedFileSystemJobExecutionActorSpec + extends TestKitSuite + with AnyFlatSpecLike + with BackendSpec + with TableDrivenPropertyChecks + with OptionValues { behavior of "SharedFileSystemJobExecutionActor" @@ -45,13 +54,27 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite private val mockBackendJobDescriptorKey = BackendJobDescriptorKey(call, None, 1) def executeSpec(docker: Boolean): Any = { - val expectedOutputs: CallOutputs = WomMocks.mockOutputExpectations(Map("hello.salutation" -> WomString("Hello you !"))) - - val expectedResponse = JobSucceededResponse(mockBackendJobDescriptorKey, Some(0), expectedOutputs, None, Seq.empty, None, resultGenerationMode = RunOnBackend) + val expectedOutputs: CallOutputs = + WomMocks.mockOutputExpectations(Map("hello.salutation" -> WomString("Hello you !"))) + + val expectedResponse = JobSucceededResponse(mockBackendJobDescriptorKey, + Some(0), + expectedOutputs, + None, + Seq.empty, + None, + resultGenerationMode = RunOnBackend + ) val runtime = if (docker) s"""runtime { docker: "$dockerImageUbuntu" }""" else "" val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = runtime) val workflow = TestWorkflow(workflowDescriptor, TestConfig.backendRuntimeConfigDescriptor, expectedResponse) - val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflow.workflowDescriptor, Map.empty, WorkflowOptions.empty, runtimeAttributeDefinitions), workflow.config) + val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflow.workflowDescriptor, + Map.empty, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ), + workflow.config + ) testWorkflow(workflow, backend) } @@ -76,37 +99,53 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite it should "send back an execution failure if the task fails" in { val expectedResponse = - JobFailedNonRetryableResponse(mockBackendJobDescriptorKey, WrongReturnCode("wf_goodbye.goodbye:NA:1", 1, None), Option(1)) - val workflow = TestWorkflow(buildWdlWorkflowDescriptor(GoodbyeWorld), TestConfig.backendRuntimeConfigDescriptor, expectedResponse) - val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflow.workflowDescriptor, Map.empty, WorkflowOptions.empty, runtimeAttributeDefinitions), workflow.config) + JobFailedNonRetryableResponse(mockBackendJobDescriptorKey, + WrongReturnCode("wf_goodbye.goodbye:NA:1", 1, None), + Option(1) + ) + val workflow = TestWorkflow(buildWdlWorkflowDescriptor(GoodbyeWorld), + TestConfig.backendRuntimeConfigDescriptor, + expectedResponse + ) + val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflow.workflowDescriptor, + Map.empty, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ), + workflow.config + ) testWorkflow(workflow, backend) } def localizationSpec(docker: Boolean): Assertion = { - def templateConf(localizers: String) = BackendConfigurationDescriptor(ConfigFactory.parseString( - s"""|{ - | root = "local-cromwell-executions" - | filesystems { - | local { - | localization = [ - | $localizers - | ] - | } - | } - | default-runtime-attributes { - | cpu: 1 - | failOnStderr: false - | continueOnReturnCode: 0 - | } - |} - |""".stripMargin), ConfigFactory.parseString("{}")) + def templateConf(localizers: String) = BackendConfigurationDescriptor( + ConfigFactory.parseString(s"""|{ + | root = "local-cromwell-executions" + | filesystems { + | local { + | localization = [ + | $localizers + | ] + | } + | } + | default-runtime-attributes { + | cpu: 1 + | failOnStderr: false + | continueOnReturnCode: 0 + | } + |} + |""".stripMargin), + ConfigFactory.parseString("{}") + ) val hardConf = templateConf("hard-link") val symConf = templateConf("soft-link") val copyConf = templateConf("copy") - val jsonInputFile = createCannedFile("localize", "content from json inputs", Option(DefaultPathBuilder.build(".").get)) - val callInputFile = createCannedFile("localize", "content from call inputs", Option(DefaultPathBuilder.build(".").get)) + val jsonInputFile = + createCannedFile("localize", "content from json inputs", Option(DefaultPathBuilder.build(".").get)) + val callInputFile = + createCannedFile("localize", "content from call inputs", Option(DefaultPathBuilder.build(".").get)) val inputs = Option(s"""{ "wf_localize.workflowFile": "${callInputFile.pathAsString}", "wf_localize.localize.inputFileFromJson": "${jsonInputFile.pathAsString}" @@ -115,9 +154,8 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite val expectedOutputs: CallOutputs = WomMocks.mockOutputExpectations( Map( "localize.out" -> WomArray(WomArrayType(WomStringType), - List( - WomString("content from json inputs"), - WomString("content from call inputs"))) + List(WomString("content from json inputs"), WomString("content from call inputs")) + ) ) ) @@ -135,24 +173,50 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite val runtime = if (docker) s"""runtime { docker: "$dockerImageUbuntu" } """ else "" val workflowDescriptor = buildWdlWorkflowDescriptor(InputFiles, inputs, runtime = runtime) val callInputs = Map( - "inputFileFromCallInputs" -> workflowDescriptor.knownValues.collectFirst({ - case (outputPort, resolvedValue) if outputPort.fullyQualifiedName == "wf_localize.workflowFile" => resolvedValue - }).get, - "inputFileFromJson" -> workflowDescriptor.knownValues.collectFirst({ - case (outputPort, resolvedValue) if outputPort.fullyQualifiedName == "wf_localize.localize.inputFileFromJson" => resolvedValue - }).get + "inputFileFromCallInputs" -> workflowDescriptor.knownValues.collectFirst { + case (outputPort, resolvedValue) if outputPort.fullyQualifiedName == "wf_localize.workflowFile" => + resolvedValue + }.get, + "inputFileFromJson" -> workflowDescriptor.knownValues.collectFirst { + case (outputPort, resolvedValue) + if outputPort.fullyQualifiedName == "wf_localize.localize.inputFileFromJson" => + resolvedValue + }.get ) - val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflowDescriptor, callInputs, WorkflowOptions.empty, runtimeAttributeDefinitions), conf) - val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, callInputs, WorkflowOptions.empty, runtimeAttributeDefinitions) - val expectedResponse = JobSucceededResponse(jobDescriptor.key, Some(0), expectedOutputs, None, Seq.empty, None, resultGenerationMode = RunOnBackend) + val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflowDescriptor, + callInputs, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ), + conf + ) + val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, + callInputs, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ) + val expectedResponse = JobSucceededResponse(jobDescriptor.key, + Some(0), + expectedOutputs, + None, + Seq.empty, + None, + resultGenerationMode = RunOnBackend + ) val jobPaths = JobPathsWithDocker(jobDescriptor.key, workflowDescriptor, conf.backendConfig) whenReady(backend.execute) { executionResponse => assertResponse(executionResponse, expectedResponse) - val localizedJsonInputFile = DefaultPathBuilder.get(jobPaths.callInputsRoot.pathAsString, jsonInputFile.parent.pathAsString.hashCode.toString + "/" + jsonInputFile.name) - val localizedCallInputFile = DefaultPathBuilder.get(jobPaths.callInputsRoot.pathAsString, callInputFile.parent.pathAsString.hashCode.toString + "/" + callInputFile.name) + val localizedJsonInputFile = + DefaultPathBuilder.get(jobPaths.callInputsRoot.pathAsString, + jsonInputFile.parent.pathAsString.hashCode.toString + "/" + jsonInputFile.name + ) + val localizedCallInputFile = + DefaultPathBuilder.get(jobPaths.callInputsRoot.pathAsString, + callInputFile.parent.pathAsString.hashCode.toString + "/" + callInputFile.name + ) localizedJsonInputFile.isSymbolicLink shouldBe isSymlink val realJsonInputFile = @@ -177,7 +241,11 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite it should "abort a job and kill a process" in { val workflowDescriptor = buildWdlWorkflowDescriptor(Sleep20) - val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, WorkflowOptions.empty, runtimeAttributeDefinitions) + val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, + Map.empty, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ) val backendRef = createBackendRef(jobDescriptor, TestConfig.backendRuntimeConfigDescriptor) val backend = backendRef.underlyingActor @@ -191,7 +259,11 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite def recoverSpec(completed: Boolean, writeReturnCode: Boolean = true): Assertion = { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld) - val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, WorkflowOptions.empty, runtimeAttributeDefinitions) + val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, + Map.empty, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ) val backendRef = createBackendRef(jobDescriptor, TestConfig.backendRuntimeConfigDescriptor) val backend = backendRef.underlyingActor @@ -267,14 +339,33 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite // If this is not the case, more context/logic will need to be moved to the backend so it can figure it out by itself val symbolMaps: Map[LocallyQualifiedName, WomInteger] = Map("intNumber" -> WomInteger(shard)) - val evaluatedAttributes = call.callable.runtimeAttributes.attributes.safeMapValues(_.evaluateValue(Map.empty, NoIoFunctionSet).getOrElse(fail("Can't evaluate runtime attribute"))) - val runtimeAttributes: Map[LocallyQualifiedName, WomValue] = RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, WorkflowOptions.empty)(evaluatedAttributes) + val evaluatedAttributes = call.callable.runtimeAttributes.attributes + .safeMapValues(_.evaluateValue(Map.empty, NoIoFunctionSet).getOrElse(fail("Can't evaluate runtime attribute"))) + val runtimeAttributes: Map[LocallyQualifiedName, WomValue] = + RuntimeAttributeDefinition.addDefaultsToAttributes(runtimeAttributeDefinitions, WorkflowOptions.empty)( + evaluatedAttributes + ) val jobDescriptor: BackendJobDescriptor = - BackendJobDescriptor(workflowDescriptor, BackendJobDescriptorKey(call, Option(shard), 1), runtimeAttributes, fqnWdlMapToDeclarationMap(symbolMaps), NoDocker, None, Map.empty) + BackendJobDescriptor(workflowDescriptor, + BackendJobDescriptorKey(call, Option(shard), 1), + runtimeAttributes, + fqnWdlMapToDeclarationMap(symbolMaps), + NoDocker, + None, + Map.empty + ) val backend = createBackend(jobDescriptor, TestConfig.backendRuntimeConfigDescriptor) val response = - JobSucceededResponse(mockBackendJobDescriptorKey, Some(0), WomMocks.mockOutputExpectations(Map("scattering.out" -> WomInteger(shard))), None, Seq.empty, None, resultGenerationMode = RunOnBackend) + JobSucceededResponse( + mockBackendJobDescriptorKey, + Some(0), + WomMocks.mockOutputExpectations(Map("scattering.out" -> WomInteger(shard))), + None, + Seq.empty, + None, + resultGenerationMode = RunOnBackend + ) executeJobAndAssertOutputs(backend, response) } } @@ -285,30 +376,55 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite "wf_localize.localize.inputFile": "$inputFile" }""") val workflowDescriptor = buildWdlWorkflowDescriptor(OutputProcess, inputs) - val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, WorkflowOptions.empty, runtimeAttributeDefinitions) + val jobDescriptor: BackendJobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, + Map.empty, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ) val backend = createBackend(jobDescriptor, TestConfig.backendRuntimeConfigDescriptor) - val jobPaths = JobPathsWithDocker(jobDescriptor.key, workflowDescriptor, TestConfig.backendRuntimeConfigDescriptor.backendConfig) + val jobPaths = + JobPathsWithDocker(jobDescriptor.key, workflowDescriptor, TestConfig.backendRuntimeConfigDescriptor.backendConfig) val expectedA = WomSingleFile(jobPaths.callExecutionRoot.resolve("a").toAbsolutePath.pathAsString) val expectedB = WomSingleFile(jobPaths.callExecutionRoot.resolve("dir").toAbsolutePath.resolve("b").pathAsString) - val expectedOutputs = WomMocks.mockOutputExpectations(Map( - "localize.o1" -> expectedA, - "localize.o2" -> WomArray(WomArrayType(WomSingleFileType), Seq(expectedA, expectedB)), - "localize.o3" -> WomSingleFile(inputFile) - )) - val expectedResponse = JobSucceededResponse(jobDescriptor.key, Some(0), expectedOutputs, None, Seq.empty, None, resultGenerationMode = RunOnBackend) + val expectedOutputs = WomMocks.mockOutputExpectations( + Map( + "localize.o1" -> expectedA, + "localize.o2" -> WomArray(WomArrayType(WomSingleFileType), Seq(expectedA, expectedB)), + "localize.o3" -> WomSingleFile(inputFile) + ) + ) + val expectedResponse = JobSucceededResponse(jobDescriptor.key, + Some(0), + expectedOutputs, + None, + Seq.empty, + None, + resultGenerationMode = RunOnBackend + ) executeJobAndAssertOutputs(backend, expectedResponse) } it should "fail post processing if an output file is not found" in { - val expectedResponse = JobFailedNonRetryableResponse(mockBackendJobDescriptorKey, - new FileNotFoundException("Could not process output, file not found:"), Option(0)) - val workflow = TestWorkflow(buildWdlWorkflowDescriptor(MissingOutputProcess), TestConfig.backendRuntimeConfigDescriptor, expectedResponse) - val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflow.workflowDescriptor, Map.empty, WorkflowOptions.empty, runtimeAttributeDefinitions), workflow.config) + val expectedResponse = + JobFailedNonRetryableResponse(mockBackendJobDescriptorKey, + new FileNotFoundException("Could not process output, file not found:"), + Option(0) + ) + val workflow = TestWorkflow(buildWdlWorkflowDescriptor(MissingOutputProcess), + TestConfig.backendRuntimeConfigDescriptor, + expectedResponse + ) + val backend = createBackend(jobDescriptorFromSingleCallWorkflow(workflow.workflowDescriptor, + Map.empty, + WorkflowOptions.empty, + runtimeAttributeDefinitions + ), + workflow.config + ) testWorkflow(workflow, backend) } - def createCannedFile(prefix: String, contents: String, parent: Option[Path] = None): Path = { + def createCannedFile(prefix: String, contents: String, parent: Option[Path] = None): Path = DefaultPathBuilder.createTempFile(prefix, ".out", parent).write(contents) - } } diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala index 34d5ca3cc73..409ef3c281f 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala @@ -14,8 +14,12 @@ import wom.values.WomSingleFile import scala.io.Source -class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers - with TableDrivenPropertyChecks with BackendSpec { +class SharedFileSystemSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks + with BackendSpec { behavior of "SharedFileSystem" @@ -27,14 +31,14 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ConfigFactory.parseString("""{localization: [cached-copy], max-hardlinks: 3 }""") private val localPathBuilder = List(DefaultPathBuilder) - def localizationTest(config: Config, docker: Boolean, fileInCallDir: Boolean = false, fileAlreadyExists: Boolean = false, symlink: Boolean = false, cachedCopy: Boolean = false, - linkNb: Int = 1): Path = { + linkNb: Int = 1 + ): Path = { val callDir = DefaultPathBuilder.createTempDirectory("SharedFileSystem") val orig = if (fileInCallDir) callDir.createChild("inputFile") else DefaultPathBuilder.createTempFile("inputFile") val dest = if (fileInCallDir) orig else callDir./(orig.parent.pathAsString.hashCode.toString)./(orig.name) @@ -54,11 +58,12 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val sharedFS: SharedFileSystem = new SharedFileSystem { override val pathBuilders: PathBuilders = localPathBuilder override val sharedFileSystemConfig: Config = config - override implicit def actorContext: ActorContext = null + implicit override def actorContext: ActorContext = null override lazy val cachedCopyDir: Option[Path] = Option(DefaultPathBuilder.createTempDirectory("cached-copy")) } val cachedFile: Option[Path] = sharedFS.cachedCopyDir.map( - _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name)) + _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name) + ) val localizedinputs = Map(inputs.head._1 -> WomSingleFile(dest.pathAsString)) val result = sharedFS.localizeInputs(callDir, docker = docker)(inputs) @@ -114,7 +119,7 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val sharedFS: SharedFileSystem = new SharedFileSystem { override val pathBuilders: PathBuilders = localPathBuilder override val sharedFileSystemConfig: Config = defaultLocalization - override implicit def actorContext: ActorContext = null + implicit override def actorContext: ActorContext = null } val result = sharedFS.localizeInputs(callDir, docker = false)(inputs) result.isFailure shouldBe true @@ -130,11 +135,12 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val sharedFS: SharedFileSystem = new SharedFileSystem { override val pathBuilders: PathBuilders = localPathBuilder override val sharedFileSystemConfig: Config = cachedCopyLocalization - override implicit def actorContext: ActorContext = null + implicit override def actorContext: ActorContext = null override lazy val cachedCopyDir: Option[Path] = Option(DefaultPathBuilder.createTempDirectory("cached-copy")) } val cachedFile: Option[Path] = sharedFS.cachedCopyDir.map( - _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name)) + _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name) + ) val results = callDirs.map(sharedFS.localizeInputs(_, docker = true)(inputs)) @@ -148,7 +154,7 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat dests.foreach(_.delete(swallowIOExceptions = true)) } -it should "copy the file again when the copy-cached file has exceeded the maximum number of hardlinks" in { + it should "copy the file again when the copy-cached file has exceeded the maximum number of hardlinks" in { val callDirs: IndexedSeq[Path] = 1 to 3 map { _ => DefaultPathBuilder.createTempDirectory("SharedFileSystem") } val orig = DefaultPathBuilder.createTempFile("inputFile") val dests = callDirs.map(_./(orig.parent.pathAsString.hashCode.toString)./(orig.name)) @@ -157,11 +163,12 @@ it should "copy the file again when the copy-cached file has exceeded the maximu val sharedFS: SharedFileSystem = new SharedFileSystem { override val pathBuilders: PathBuilders = localPathBuilder override val sharedFileSystemConfig: Config = cachedCopyLocalizationMaxHardlinks - override implicit def actorContext: ActorContext = null + implicit override def actorContext: ActorContext = null override lazy val cachedCopyDir: Option[Path] = Option(DefaultPathBuilder.createTempDirectory("cached-copy")) } val cachedFile: Option[Path] = sharedFS.cachedCopyDir.map( - _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name)) + _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name) + ) val results = callDirs.map(sharedFS.localizeInputs(_, docker = true)(inputs)) @@ -175,7 +182,6 @@ it should "copy the file again when the copy-cached file has exceeded the maximu dests.foreach(_.delete(swallowIOExceptions = true)) } - private[this] def countLinks(file: Path): Int = file.getAttribute("unix:nlink").asInstanceOf[Int] private[this] def isSymLink(file: Path): Boolean = file.isSymbolicLink diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/TestLocalAsyncJobExecutionActor.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/TestLocalAsyncJobExecutionActor.scala index 58fec3dd74f..f9881908894 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/TestLocalAsyncJobExecutionActor.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/TestLocalAsyncJobExecutionActor.scala @@ -10,15 +10,14 @@ import cromwell.core.SimpleIoActor import cromwell.services.keyvalue.InMemoryKvServiceActor class TestLocalAsyncJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackgroundAsyncJobExecutionActor { + extends BackgroundAsyncJobExecutionActor { override lazy val processArgs: SharedFileSystemCommand = { val script = jobPaths.script.pathAsString if (isDockerRun) { val docker = RuntimeAttributesValidation.extract(DockerValidation.instance, validatedRuntimeAttributes) val cwd = jobPaths.callRoot.pathAsString val dockerCwd = jobPathsWithDocker.callDockerRoot.pathAsString - SharedFileSystemCommand("/bin/bash", "-c", - s"docker run --rm -v $cwd:$dockerCwd -i $docker /bin/bash < $script") + SharedFileSystemCommand("/bin/bash", "-c", s"docker run --rm -v $cwd:$dockerCwd -i $docker /bin/bash < $script") } else { SharedFileSystemCommand("/bin/bash", script) } @@ -28,19 +27,26 @@ class TestLocalAsyncJobExecutionActor(override val standardParams: StandardAsync } object TestLocalAsyncJobExecutionActor { - def createBackend(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor) - (implicit system: ActorSystem): StandardSyncExecutionActor = { + def createBackend(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor)( + implicit system: ActorSystem + ): StandardSyncExecutionActor = createBackendRef(jobDescriptor, configurationDescriptor).underlyingActor - } - def createBackendRef(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor) - (implicit system: ActorSystem): TestActorRef[StandardSyncExecutionActor] = { - val serviceRegistryActor = system.actorOf(Props(new InMemoryKvServiceActor)) // We only really need the KV store for now + def createBackendRef(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor)( + implicit system: ActorSystem + ): TestActorRef[StandardSyncExecutionActor] = { + val serviceRegistryActor = + system.actorOf(Props(new InMemoryKvServiceActor)) // We only really need the KV store for now val ioActor = system.actorOf(SimpleIoActor.props) - val workflowPaths = new WorkflowPathsWithDocker(jobDescriptor.workflowDescriptor, configurationDescriptor.backendConfig) - val initializationData = new StandardInitializationData(workflowPaths, - StandardValidatedRuntimeAttributesBuilder.default(configurationDescriptor.backendRuntimeAttributesConfig).withValidation(DockerValidation.optional), - classOf[SharedFileSystemExpressionFunctions]) + val workflowPaths = + new WorkflowPathsWithDocker(jobDescriptor.workflowDescriptor, configurationDescriptor.backendConfig) + val initializationData = new StandardInitializationData( + workflowPaths, + StandardValidatedRuntimeAttributesBuilder + .default(configurationDescriptor.backendRuntimeAttributesConfig) + .withValidation(DockerValidation.optional), + classOf[SharedFileSystemExpressionFunctions] + ) val asyncClass = classOf[TestLocalAsyncJobExecutionActor] val params = DefaultStandardSyncExecutionActorParams( @@ -52,7 +58,8 @@ object TestLocalAsyncJobExecutionActor { backendInitializationDataOption = Option(initializationData), backendSingletonActorOption = None, asyncJobExecutionActorClass = asyncClass, - MinimumRuntimeSettings()) + MinimumRuntimeSettings() + ) TestActorRef(new StandardSyncExecutionActor(params)) } diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/DeclarationValidationSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/DeclarationValidationSpec.scala index 4a6a28a47ba..681e4a91baa 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/DeclarationValidationSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/DeclarationValidationSpec.scala @@ -12,16 +12,23 @@ import wdl.draft2.model._ import wom.types.WomIntegerType import wom.values.WomInteger -class DeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { +class DeclarationValidationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks { behavior of "DeclarationValidation" - + def validateCpu(key: String) = { val expression = WdlExpression.fromString("5") val declarationValidation = DeclarationValidation.fromDeclaration(callCachedRuntimeAttributesMap = Map.empty)( - Declaration(WomIntegerType, key, Option(expression), None, null)) - declarationValidation.extractWdlValueOption(ValidatedRuntimeAttributes(Map(key -> refineMV[Positive](5)))) shouldBe Some(WomInteger(5)) + Declaration(WomIntegerType, key, Option(expression), None, null) + ) + declarationValidation.extractWdlValueOption( + ValidatedRuntimeAttributes(Map(key -> refineMV[Positive](5))) + ) shouldBe Some(WomInteger(5)) } - + it should "validate cpu attributes" in { val keys = Table( "key", @@ -29,7 +36,7 @@ class DeclarationValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec wit "cpuMin", "cpuMax" ) - - forAll(keys) { validateCpu } + + forAll(keys)(validateCpu) } } diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/HttpFilesystemEnablementSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/HttpFilesystemEnablementSpec.scala index 0c63b2b8db8..bb96ce4b1ee 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/HttpFilesystemEnablementSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/config/HttpFilesystemEnablementSpec.scala @@ -10,7 +10,11 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike -class HttpFilesystemEnablementSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers with BeforeAndAfterAll { +class HttpFilesystemEnablementSpec + extends AnyWordSpecLike + with CromwellTimeoutSpec + with Matchers + with BeforeAndAfterAll { "The http filesystem on the default Local backend" should { "be enabled unless explicitly disabled" in { configuredFilesystems(LocalConfig) shouldEqual Set("http") @@ -20,7 +24,6 @@ class HttpFilesystemEnablementSpec extends AnyWordSpecLike with CromwellTimeoutS } } - object HttpFilesystemEnablementSpec { def configuredFilesystems(configs: Configurations): Set[String] = { val descriptor = new BackendConfigurationDescriptor(configs.local, configs.global) { @@ -34,10 +37,10 @@ object HttpFilesystemEnablementSpec { // This is only checking the http filesystem, no need to load all those other filesystems with their // filesystem classes that live in other subprojects. val globalFilesystemsConfig = - """ - |http { - | class = "cromwell.filesystems.http.HttpPathBuilderFactory" - |} + """ + |http { + | class = "cromwell.filesystems.http.HttpPathBuilderFactory" + |} """.stripMargin val globalFilesystems = ConfigFactory.parseString(globalFilesystemsConfig) diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala index bd171d757b4..d7a19685cd8 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActor.scala @@ -14,10 +14,23 @@ import common.exception.AggregatedMessageException import common.validation.ErrorOr.ErrorOr import common.validation.Validation._ import cromwell.backend.BackendJobLifecycleActor -import cromwell.backend.async.{AbortedExecutionHandle, ExecutionHandle, FailedNonRetryableExecutionHandle, PendingExecutionHandle} -import cromwell.backend.impl.tes.TesAsyncBackendJobExecutionActor.{determineWSMSasEndpointFromInputs, generateLocalizedSasScriptPreamble} +import cromwell.backend.async.{ + AbortedExecutionHandle, + ExecutionHandle, + FailedNonRetryableExecutionHandle, + PendingExecutionHandle +} +import cromwell.backend.impl.tes.TesAsyncBackendJobExecutionActor.{ + determineWSMSasEndpointFromInputs, + generateLocalizedSasScriptPreamble +} import cromwell.backend.impl.tes.TesResponseJsonFormatter._ -import cromwell.backend.standard.{ScriptPreambleData, StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} +import cromwell.backend.standard.{ + ScriptPreambleData, + StandardAsyncExecutionActor, + StandardAsyncExecutionActorParams, + StandardAsyncJob +} import cromwell.core.logging.JobLogger import cromwell.core.path.{DefaultPathBuilder, Path} import cromwell.core.retry.Retry._ @@ -63,7 +76,7 @@ case object Cancelled extends TesRunStatus { object TesAsyncBackendJobExecutionActor { val JobIdKey = "tes_job_id" - def generateLocalizedSasScriptPreamble(environmentVariableName: String, getSasWsmEndpoint: String) : String = { + def generateLocalizedSasScriptPreamble(environmentVariableName: String, getSasWsmEndpoint: String): String = // BEARER_TOKEN: https://learn.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/how-to-use-vm-token#get-a-token-using-http // NB: Scala string interpolation and bash variable substitution use similar syntax. $$ is an escaped $, much like \\ is an escaped \. s""" @@ -128,11 +141,9 @@ object TesAsyncBackendJobExecutionActor { |echo "Saving sas token: $${$environmentVariableName:0:4}**** to environment variable $environmentVariableName..." |### END ACQUIRE LOCAL SAS TOKEN ### |""".stripMargin - } - private def maybeConvertToBlob(pathToTest: Try[Path]): Try[BlobPath] = { + private def maybeConvertToBlob(pathToTest: Try[Path]): Try[BlobPath] = pathToTest.collect { case blob: BlobPath => blob } - } /** * Computes an endpoint that can be used to retrieve a sas token for a particular blob storage container. @@ -149,29 +160,44 @@ object TesAsyncBackendJobExecutionActor { def determineWSMSasEndpointFromInputs(taskInputs: List[Input], pathGetter: String => Try[Path], logger: JobLogger, - blobConverter: Try[Path] => Try[BlobPath] = maybeConvertToBlob): Try[String] = { + blobConverter: Try[Path] => Try[BlobPath] = maybeConvertToBlob + ): Try[String] = { // Collect all of the inputs that are valid blob paths val blobFiles = taskInputs - .collect{ case Input(_, _, Some(url), _, _, _) => blobConverter(pathGetter(url)) } - .collect{ case Success(blob) => blob } + .collect { case Input(_, _, Some(url), _, _, _) => blobConverter(pathGetter(url)) } + .collect { case Success(blob) => blob } // Log if not all input files live in the same container. if (blobFiles.map(_.container).distinct.size > 1) { - logger.info(s"While parsing blob inputs, found more than one container. Generating SAS token based on first file in the list.") + logger.info( + s"While parsing blob inputs, found more than one container. Generating SAS token based on first file in the list." + ) } // We use the first blob file in the list to determine the correct blob container. - blobFiles.headOption.map{blobPath => - blobPath.getFilesystemManager.blobTokenGenerator match { - case wsmGenerator: WSMBlobSasTokenGenerator => wsmGenerator.getWSMSasFetchEndpoint(blobPath, Some(Duration.of(24, ChronoUnit.HOURS))) - case _ => Failure(new UnsupportedOperationException("Blob file does not have an associated WSMBlobSasTokenGenerator")) + blobFiles.headOption + .map { blobPath => + blobPath.getFilesystemManager.blobTokenGenerator match { + case wsmGenerator: WSMBlobSasTokenGenerator => + wsmGenerator.getWSMSasFetchEndpoint(blobPath, Some(Duration.of(24, ChronoUnit.HOURS))) + case _ => + Failure(new UnsupportedOperationException("Blob file does not have an associated WSMBlobSasTokenGenerator")) + } } - }.getOrElse(Failure(new NoSuchElementException("Could not infer blob storage container from task inputs: No valid blob files provided."))) + .getOrElse( + Failure( + new NoSuchElementException( + "Could not infer blob storage container from task inputs: No valid blob files provided." + ) + ) + ) } } class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackendJobLifecycleActor with StandardAsyncExecutionActor with TesJobCachingActorHelper { + extends BackendJobLifecycleActor + with StandardAsyncExecutionActor + with TesJobCachingActorHelper { implicit val actorSystem = context.system implicit val materializer = ActorMaterializer() @@ -183,7 +209,8 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn override lazy val pollBackOff: SimpleExponentialBackoff = tesConfiguration.pollBackoff override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff = tesConfiguration.executeOrRecoverBackoff - private lazy val realDockerImageUsed: String = jobDescriptor.maybeCallCachingEligible.dockerHash.getOrElse(runtimeAttributes.dockerImage) + private lazy val realDockerImageUsed: String = + jobDescriptor.maybeCallCachingEligible.dockerHash.getOrElse(runtimeAttributes.dockerImage) override lazy val dockerImageUsed: Option[String] = Option(realDockerImageUsed) private val tesEndpoint = workflowDescriptor.workflowOptions.getOrElse("endpoint", tesConfiguration.endpointURL) @@ -194,7 +221,8 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn OutputMode.withName( configurationDescriptor.backendConfig .getAs[String]("output-mode") - .getOrElse("granular").toUpperCase + .getOrElse("granular") + .toUpperCase ) } @@ -215,22 +243,28 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn * * @return Bash code to run at the start of a task. */ - override def scriptPreamble: ErrorOr[ScriptPreambleData] = { + override def scriptPreamble: ErrorOr[ScriptPreambleData] = runtimeAttributes.localizedSasEnvVar match { - case Some(environmentVariableName) => { // Case: user wants a sas token. Return the computed preamble or die trying. - val workflowName = workflowDescriptor.callable.name - val callInputFiles = jobDescriptor.fullyQualifiedInputs.safeMapValues { - _.collectAsSeq { case w: WomFile => w } - } - val taskInputs: List[Input] = TesTask.buildTaskInputs(callInputFiles, workflowName, mapCommandLineWomFile) - val computedEndpoint = determineWSMSasEndpointFromInputs(taskInputs, getPath, jobLogger) - computedEndpoint.map(endpoint => ScriptPreambleData(generateLocalizedSasScriptPreamble(environmentVariableName, endpoint), executeInSubshell = false)) - }.toErrorOr - case _ => ScriptPreambleData("", executeInSubshell = false).valid // Case: user doesn't want a sas token. Empty preamble is the correct preamble. + case Some(environmentVariableName) => + { // Case: user wants a sas token. Return the computed preamble or die trying. + val workflowName = workflowDescriptor.callable.name + val callInputFiles = jobDescriptor.fullyQualifiedInputs.safeMapValues { + _.collectAsSeq { case w: WomFile => w } + } + val taskInputs: List[Input] = TesTask.buildTaskInputs(callInputFiles, workflowName, mapCommandLineWomFile) + val computedEndpoint = determineWSMSasEndpointFromInputs(taskInputs, getPath, jobLogger) + computedEndpoint.map(endpoint => + ScriptPreambleData(generateLocalizedSasScriptPreamble(environmentVariableName, endpoint), + executeInSubshell = false + ) + ) + }.toErrorOr + case _ => + ScriptPreambleData("", + executeInSubshell = false + ).valid // Case: user doesn't want a sas token. Empty preamble is the correct preamble. } - } - - override def mapCommandLineWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineWomFile(womFile: WomFile): WomFile = womFile.mapFile(value => (getPath(value), asAdHocFile(womFile)) match { case (Success(path: Path), Some(adHocFile)) => @@ -240,9 +274,8 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn case _ => mapCommandLineJobInputWomFile(womFile).value } ) - } - override def mapCommandLineJobInputWomFile(womFile: WomFile): WomFile = { + override def mapCommandLineJobInputWomFile(womFile: WomFile): WomFile = womFile.mapFile(value => getPath(value) match { case Success(drsPath: DrsPath) => @@ -261,33 +294,35 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn // lives in the workflow execution directory, strip off that prefix from the path we're // generating inside `inputs/` to keep the total path length under control. // In Terra on Azure, this saves us 200+ characters. - tesJobPaths.callInputsDockerRoot.resolve( - path.pathStringWithoutPrefix(tesJobPaths.workflowPaths.workflowRoot) - ).pathAsString + tesJobPaths.callInputsDockerRoot + .resolve( + path.pathStringWithoutPrefix(tesJobPaths.workflowPaths.workflowRoot) + ) + .pathAsString case Success(path: BlobPath) if path.startsWith(tesJobPaths.workflowPaths.executionRoot) => // See comment above... if this file is in the execution root, strip that off. // In Terra on Azure, this saves us 160+ characters. - tesJobPaths.callInputsDockerRoot.resolve( - path.pathStringWithoutPrefix(tesJobPaths.workflowPaths.executionRoot) - ).pathAsString + tesJobPaths.callInputsDockerRoot + .resolve( + path.pathStringWithoutPrefix(tesJobPaths.workflowPaths.executionRoot) + ) + .pathAsString case Success(path: Path) => tesJobPaths.callInputsDockerRoot.resolve(path.pathWithoutScheme.stripPrefix("/")).pathAsString case _ => value } ) - } - override lazy val commandDirectory: Path = { + override lazy val commandDirectory: Path = runtimeAttributes.dockerWorkingDir match { case Some(path) => DefaultPathBuilder.get(path) case None => tesJobPaths.callExecutionDockerRoot } - } def createTaskMessage(): ErrorOr[Task] = { - val tesTask = (commandScriptContents, outputMode).mapN({ - case (contents, mode) => TesTask( + val tesTask = (commandScriptContents, outputMode).mapN { case (contents, mode) => + TesTask( jobDescriptor, configurationDescriptor, jobLogger, @@ -299,25 +334,26 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn realDockerImageUsed, mapCommandLineWomFile, jobShell, - mode) - }) + mode + ) + } tesTask.map(TesTask.makeTask) } - def writeScriptFile(): Future[Unit] = { + def writeScriptFile(): Future[Unit] = commandScriptContents.fold( errors => Future.failed(new RuntimeException(errors.toList.mkString(", "))), asyncIo.writeAsync(jobPaths.script, _, Seq.empty) ) - } override def executeAsync(): Future[ExecutionHandle] = { // create call exec dir tesJobPaths.callExecutionRoot.createPermissionedDirectories() val taskMessageFuture = createTaskMessage().fold( errors => Future.failed(new RuntimeException(errors.toList.mkString(", "))), - Future.successful) + Future.successful + ) for { _ <- writeScriptFile() @@ -328,7 +364,12 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn } override def reconnectAsync(jobId: StandardAsyncJob) = { - val handle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState](jobDescriptor, jobId, None, previousState = None) + val handle = PendingExecutionHandle[StandardAsyncJob, StandardAsyncRunInfo, StandardAsyncRunState](jobDescriptor, + jobId, + None, + previousState = + None + ) Future.successful(handle) } @@ -343,15 +384,17 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn val returnCodeTmp = jobPaths.returnCode.plusExt("kill") returnCodeTmp.write(s"$SIGTERM\n") - try { + try returnCodeTmp.moveTo(jobPaths.returnCode) - } catch { + catch { case _: FileAlreadyExistsException => // If the process has already completed, there will be an existing rc file. returnCodeTmp.delete(true) } - makeRequest[CancelTaskResponse](HttpRequest(method = HttpMethods.POST, uri = s"$tesEndpoint/${job.jobId}:cancel")) onComplete { + makeRequest[CancelTaskResponse]( + HttpRequest(method = HttpMethods.POST, uri = s"$tesEndpoint/${job.jobId}:cancel") + ) onComplete { case Success(_) => jobLogger.info("{} Aborted {}", tag: Any, job.jobId) case Failure(ex) => jobLogger.warn("{} Failed to abort {}: {}", tag, job.jobId, ex.getMessage) } @@ -361,12 +404,12 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn override def requestsAbortAndDiesImmediately: Boolean = false - override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[TesRunStatus] = { + override def pollStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[TesRunStatus] = for { status <- queryStatusAsync(handle) errorLog <- status match { - case Error(_) | Failed(_) => getErrorLogs(handle) - case _ => Future.successful(Seq.empty[String]) + case Error(_) | Failed(_) => getErrorLogs(handle) + case _ => Future.successful(Seq.empty[String]) } statusWithLog = status match { case Error(_) => Error(errorLog) @@ -374,9 +417,8 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn case _ => status } } yield statusWithLog - } - private def queryStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[TesRunStatus] = { + private def queryStatusAsync(handle: StandardAsyncPendingExecutionHandle): Future[TesRunStatus] = makeRequest[MinimalTaskView](HttpRequest(uri = s"$tesEndpoint/${handle.pendingJob.jobId}?view=MINIMAL")) map { response => val state = response.state @@ -400,16 +442,14 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn case _ => Running } } - } - private def getErrorLogs(handle: StandardAsyncPendingExecutionHandle): Future[Seq[String]] = { + private def getErrorLogs(handle: StandardAsyncPendingExecutionHandle): Future[Seq[String]] = makeRequest[Task](HttpRequest(uri = s"$tesEndpoint/${handle.pendingJob.jobId}?view=FULL")) map { response => response.logs.flatMap(_.lastOption).flatMap(_.system_logs).getOrElse(Seq.empty[String]) } - } override def customPollStatusFailure: PartialFunction[(ExecutionHandle, Exception), ExecutionHandle] = { - case (oldHandle: StandardAsyncPendingExecutionHandle@unchecked, e: Exception) => + case (oldHandle: StandardAsyncPendingExecutionHandle @unchecked, e: Exception) => jobLogger.error(s"$tag TES Job ${oldHandle.pendingJob.jobId} has not been found, failing call") FailedNonRetryableExecutionHandle(e, kvPairsToSave = None) } @@ -421,26 +461,23 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn Future.successful(FailedNonRetryableExecutionHandle(exception, returnCode, None)) } - override def handleExecutionFailure(status: StandardAsyncRunState, returnCode: Option[Int]) = { + override def handleExecutionFailure(status: StandardAsyncRunState, returnCode: Option[Int]) = status match { case Cancelled => Future.successful(AbortedExecutionHandle) case Error(_) | Failed(_) => handleExecutionError(status, returnCode) case _ => super.handleExecutionFailure(status, returnCode) } - } - override def isTerminal(runStatus: TesRunStatus): Boolean = { + override def isTerminal(runStatus: TesRunStatus): Boolean = runStatus.isTerminal - } - override def isDone(runStatus: TesRunStatus): Boolean = { + override def isDone(runStatus: TesRunStatus): Boolean = runStatus match { case Complete => true case _ => false } - } - override def mapOutputWomFile(womFile: WomFile): WomFile = { + override def mapOutputWomFile(womFile: WomFile): WomFile = womFile mapFile { path => val absPath = getPath(path) match { case Success(absoluteOutputPath) if absoluteOutputPath.isAbsolute => absoluteOutputPath @@ -451,7 +488,6 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn throw new FileNotFoundException(s"Could not process output, file not found: ${absPath.pathAsString}") } else absPath.pathAsString } - } // Headers that should be included with all requests to the TES server private def requestHeaders: List[HttpHeader] = @@ -462,16 +498,18 @@ class TesAsyncBackendJobExecutionActor(override val standardParams: StandardAsyn } }.toList - private def makeRequest[A](request: HttpRequest)(implicit um: Unmarshaller[ResponseEntity, A]): Future[A] = { + private def makeRequest[A](request: HttpRequest)(implicit um: Unmarshaller[ResponseEntity, A]): Future[A] = for { response <- withRetry(() => Http().singleRequest(request.withHeaders(requestHeaders))) - data <- if (response.status.isFailure()) { - response.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String) flatMap { errorBody => - Future.failed(new RuntimeException(s"Failed TES request: Code ${response.status.intValue()}, Body = $errorBody")) + data <- + if (response.status.isFailure()) { + response.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String) flatMap { errorBody => + Future.failed( + new RuntimeException(s"Failed TES request: Code ${response.status.intValue()}, Body = $errorBody") + ) + } + } else { + Unmarshal(response.entity).to[A] } - } else { - Unmarshal(response.entity).to[A] - } } yield data - } } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendInitializationData.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendInitializationData.scala index 48f55e2f30b..53385ebb284 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendInitializationData.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendInitializationData.scala @@ -2,8 +2,7 @@ package cromwell.backend.impl.tes import cromwell.backend.standard.{StandardInitializationData, StandardValidatedRuntimeAttributesBuilder} -case class TesBackendInitializationData -( +case class TesBackendInitializationData( override val workflowPaths: TesWorkflowPaths, override val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, tesConfiguration: TesConfiguration diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendLifecycleActorFactory.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendLifecycleActorFactory.scala index 46770e40032..9cb502ef952 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendLifecycleActorFactory.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesBackendLifecycleActorFactory.scala @@ -6,7 +6,7 @@ import cromwell.backend.standard._ import wom.graph.CommandCallNode case class TesBackendLifecycleActorFactory(name: String, configurationDescriptor: BackendConfigurationDescriptor) - extends StandardLifecycleActorFactory { + extends StandardLifecycleActorFactory { override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = classOf[TesInitializationActor] @@ -17,8 +17,11 @@ case class TesBackendLifecycleActorFactory(name: String, configurationDescriptor val tesConfiguration = new TesConfiguration(configurationDescriptor) - override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], - serviceRegistryActor: ActorRef, restarting: Boolean): StandardInitializationActorParams = { + override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, + ioActor: ActorRef, + calls: Set[CommandCallNode], + serviceRegistryActor: ActorRef, + restarting: Boolean + ): StandardInitializationActorParams = TesInitializationActorParams(workflowDescriptor, calls, tesConfiguration, serviceRegistryActor) - } } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesConfiguration.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesConfiguration.scala index 253f60114be..b90b406569a 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesConfiguration.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesConfiguration.scala @@ -13,34 +13,30 @@ class TesConfiguration(val configurationDescriptor: BackendConfigurationDescript val endpointURL = configurationDescriptor.backendConfig.getString("endpoint") val runtimeConfig = configurationDescriptor.backendRuntimeAttributesConfig val useBackendParameters = - configurationDescriptor - .backendConfig + configurationDescriptor.backendConfig .as[Option[Boolean]](TesConfiguration.useBackendParametersKey) .getOrElse(false) val pollBackoff = - configurationDescriptor - .backendConfig + configurationDescriptor.backendConfig .as[Option[Config]]("poll-backoff") .map(SimpleExponentialBackoff(_)) .getOrElse(TesConfiguration.defaultPollBackoff) val executeOrRecoverBackoff = - configurationDescriptor - .backendConfig + configurationDescriptor.backendConfig .as[Option[Config]]("execute-or-recover-backoff") .map(SimpleExponentialBackoff(_)) .getOrElse(TesConfiguration.defaultExecOrRecoverBackoff) // Used for testing only. Include a bearer token for authenticating with the TES server final val bearerPrefix: String = "Bearer " - val token: Option[String] = { + val token: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("bearer-token").map { t => if (!t.startsWith(bearerPrefix)) s"${bearerPrefix}${t}" else t } - } } object TesConfiguration { diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesExpressionFunctions.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesExpressionFunctions.scala index 11b3b2bfb1b..19fe187c0f7 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesExpressionFunctions.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesExpressionFunctions.scala @@ -3,10 +3,10 @@ package cromwell.backend.impl.tes import cromwell.backend.sfs.SharedFileSystemExpressionFunctions import cromwell.backend.standard.StandardExpressionFunctionsParams -class TesExpressionFunctions(standardParams: StandardExpressionFunctionsParams) extends SharedFileSystemExpressionFunctions(standardParams) { +class TesExpressionFunctions(standardParams: StandardExpressionFunctionsParams) + extends SharedFileSystemExpressionFunctions(standardParams) { - override def preMapping(str: String) = { + override def preMapping(str: String) = if (str.startsWith("/") || str.startsWith("ftp://")) str else standardParams.callContext.root.resolve(str).toString - } } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesInitializationActor.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesInitializationActor.scala index 101187e2364..9d4d1b4be53 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesInitializationActor.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesInitializationActor.scala @@ -12,8 +12,7 @@ import wom.graph.CommandCallNode import scala.concurrent.Future import scala.util.Try -case class TesInitializationActorParams -( +case class TesInitializationActorParams( workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], tesConfiguration: TesConfiguration, @@ -22,14 +21,12 @@ case class TesInitializationActorParams override val configurationDescriptor: BackendConfigurationDescriptor = tesConfiguration.configurationDescriptor } -class TesInitializationActor(params: TesInitializationActorParams) - extends StandardInitializationActor(params) { +class TesInitializationActor(params: TesInitializationActorParams) extends StandardInitializationActor(params) { private val tesConfiguration = params.tesConfiguration - override lazy val pathBuilders: Future[List[PathBuilder]] = { + override lazy val pathBuilders: Future[List[PathBuilder]] = standardParams.configurationDescriptor.pathBuildersWithDefault(workflowDescriptor.workflowOptions) - } override lazy val workflowPaths: Future[TesWorkflowPaths] = pathBuilders map { new TesWorkflowPaths(workflowDescriptor, tesConfiguration.configurationDescriptor.backendConfig, _) @@ -41,10 +38,13 @@ class TesInitializationActor(params: TesInitializationActorParams) override def validateWorkflowOptions(): Try[Unit] = { def validateIdentities() = { val optionsMap = workflowDescriptor.workflowOptions.toMap - (optionsMap.get(TesWorkflowOptionKeys.WorkflowExecutionIdentity), optionsMap.get(TesWorkflowOptionKeys.DataAccessIdentity)) match { + (optionsMap.get(TesWorkflowOptionKeys.WorkflowExecutionIdentity), + optionsMap.get(TesWorkflowOptionKeys.DataAccessIdentity) + ) match { case (None, None) => ().validNel case (Some(_), Some(_)) => ().validNel - case _ => s"Workflow options ${TesWorkflowOptionKeys.WorkflowExecutionIdentity} and ${TesWorkflowOptionKeys.DataAccessIdentity} are both required if one is provided.".invalidNel + case _ => + s"Workflow options ${TesWorkflowOptionKeys.WorkflowExecutionIdentity} and ${TesWorkflowOptionKeys.DataAccessIdentity} are both required if one is provided.".invalidNel } } @@ -76,8 +76,7 @@ class TesInitializationActor(params: TesInitializationActorParams) // This is difficult because we're dealing with WomExpression rather than WomValue. s"Key/s [$notSupportedAttrString] is/are not explicitly supported by backend. Those with string values will " + "be passed to TES server in backend_parameters map, other attributes will not be part of job executions." - } - else { + } else { s"Key/s [$notSupportedAttrString] is/are not supported by backend. " + s"Unsupported attributes will not be part of job executions." } @@ -86,11 +85,10 @@ class TesInitializationActor(params: TesInitializationActorParams) } } - override def beforeAll(): Future[Option[BackendInitializationData]] = { + override def beforeAll(): Future[Option[BackendInitializationData]] = workflowPaths map { paths => publishWorkflowRoot(paths.workflowRoot.toString) paths.workflowRoot.createPermissionedDirectories() Option(TesBackendInitializationData(paths, runtimeAttributesBuilder, tesConfiguration)) } - } } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobCachingActorHelper.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobCachingActorHelper.scala index 6c4b495d362..0f132e55cc6 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobCachingActorHelper.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobCachingActorHelper.scala @@ -1,6 +1,5 @@ package cromwell.backend.impl.tes - import akka.actor.Actor import cromwell.backend.standard.StandardCachingActorHelper import cromwell.core.logging.JobLogging @@ -8,9 +7,8 @@ import cromwell.core.logging.JobLogging trait TesJobCachingActorHelper extends StandardCachingActorHelper { this: Actor with JobLogging => - lazy val initializationData: TesBackendInitializationData = { + lazy val initializationData: TesBackendInitializationData = backendInitializationDataAs[TesBackendInitializationData] - } lazy val tesWorkflowPaths: TesWorkflowPaths = workflowPaths.asInstanceOf[TesWorkflowPaths] @@ -18,7 +16,9 @@ trait TesJobCachingActorHelper extends StandardCachingActorHelper { lazy val tesConfiguration: TesConfiguration = initializationData.tesConfiguration - lazy val runtimeAttributes = TesRuntimeAttributes(validatedRuntimeAttributes, jobDescriptor.runtimeAttributes, tesConfiguration) - override protected def nonStandardMetadata: Map[String, Any] = super.nonStandardMetadata ++ tesJobPaths.azureLogPathsForMetadata + lazy val runtimeAttributes = + TesRuntimeAttributes(validatedRuntimeAttributes, jobDescriptor.runtimeAttributes, tesConfiguration) + override protected def nonStandardMetadata: Map[String, Any] = + super.nonStandardMetadata ++ tesJobPaths.azureLogPathsForMetadata } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobPaths.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobPaths.scala index ab7b2b27916..05887fc3fa5 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobPaths.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesJobPaths.scala @@ -7,9 +7,7 @@ import cromwell.core.path._ import cromwell.filesystems.blob.BlobPath object TesJobPaths { - def apply(jobKey: BackendJobDescriptorKey, - workflowDescriptor: BackendWorkflowDescriptor, - config: Config) = { + def apply(jobKey: BackendJobDescriptorKey, workflowDescriptor: BackendWorkflowDescriptor, config: Config) = { val workflowPaths = TesWorkflowPaths(workflowDescriptor, config, WorkflowPaths.DefaultPathBuilders) new TesJobPaths(workflowPaths, jobKey) } @@ -17,13 +15,13 @@ object TesJobPaths { case class TesJobPaths private[tes] (override val workflowPaths: TesWorkflowPaths, jobKey: BackendJobDescriptorKey, - override val isCallCacheCopyAttempt: Boolean = false) extends JobPaths { + override val isCallCacheCopyAttempt: Boolean = false +) extends JobPaths { import JobPaths._ - override lazy val callExecutionRoot = { + override lazy val callExecutionRoot = callRoot.resolve("execution") - } val callDockerRoot = callPathBuilder(workflowPaths.dockerWorkflowRoot, jobKey, isCallCacheCopyAttempt) val callExecutionDockerRoot = callDockerRoot.resolve("execution") val callInputsDockerRoot = callDockerRoot.resolve("inputs") @@ -38,22 +36,21 @@ case class TesJobPaths private[tes] (override val workflowPaths: TesWorkflowPath * While passing it outside of terra won't do any harm, we could consider making this optional and/or configurable. */ private val taskFullPath = callRoot./("tes_task") - val tesTaskRoot : String = taskFullPath match { + val tesTaskRoot: String = taskFullPath match { case blob: BlobPath => blob.pathWithoutContainer case anyOtherPath: Path => anyOtherPath.pathAsString } // Like above: Nothing should rely on these files existing, since only the Azure TES implementation will actually create them. // Used to send the Azure TES log paths to the frontend. - val azureLogPathsForMetadata : Map[String, Any] = Map( + val azureLogPathsForMetadata: Map[String, Any] = Map( "tes_stdout" -> taskFullPath./("stdout.txt").pathAsString, "tes_stderr" -> taskFullPath./("stderr.txt").pathAsString ) // Given an output path, return a path localized to the storage file system - def storageOutput(path: String): String = { + def storageOutput(path: String): String = callExecutionRoot.resolve(path).toString - } // Given an output path, return a path localized to the container file system def containerOutput(cwd: Path, path: String): String = containerExec(cwd, path) @@ -62,9 +59,8 @@ case class TesJobPaths private[tes] (override val workflowPaths: TesWorkflowPath // callDockerRoot.resolve("outputs").resolve(name).toString // Given an file name, return a path localized to the container's execution directory - def containerExec(cwd: Path, path: String): String = { + def containerExec(cwd: Path, path: String): String = cwd.resolve(path).toString - } override def forCallCacheCopyAttempts: JobPaths = this.copy(isCallCacheCopyAttempt = true) } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala index 48ade7b234a..117e0ee79a7 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala @@ -5,7 +5,11 @@ import cats.syntax.validated._ import com.typesafe.config.Config import common.validation.ErrorOr.ErrorOr import cromwell.backend.google.pipelines.common.DisksValidation -import cromwell.backend.google.pipelines.common.io.{PipelinesApiAttachedDisk, PipelinesApiEmptyMountedDisk, PipelinesApiWorkingDisk} +import cromwell.backend.google.pipelines.common.io.{ + PipelinesApiAttachedDisk, + PipelinesApiEmptyMountedDisk, + PipelinesApiWorkingDisk +} import cromwell.backend.standard.StandardValidatedRuntimeAttributesBuilder import cromwell.backend.validation._ import eu.timepit.refined.api.Refined @@ -27,7 +31,8 @@ case class TesRuntimeAttributes(continueOnReturnCode: ContinueOnReturnCode, disk: Option[MemorySize], preemptible: Boolean, localizedSasEnvVar: Option[String], - backendParameters: Map[String, Option[String]]) + backendParameters: Map[String, Option[String]] +) object TesRuntimeAttributes { val DockerWorkingDirKey = "dockerWorkingDir" @@ -35,22 +40,29 @@ object TesRuntimeAttributes { val PreemptibleKey = "preemptible" val LocalizedSasKey = "azureSasEnvironmentVariable" - private def cpuValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int Refined Positive] = CpuValidation.optional + private def cpuValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int Refined Positive] = + CpuValidation.optional private def failOnStderrValidation(runtimeConfig: Option[Config]) = FailOnStderrValidation.default(runtimeConfig) - private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = ContinueOnReturnCodeValidation.default(runtimeConfig) + private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = + ContinueOnReturnCodeValidation.default(runtimeConfig) - private def diskSizeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[MemorySize] = MemoryValidation.optional(DiskSizeKey) + private def diskSizeValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[MemorySize] = + MemoryValidation.optional(DiskSizeKey) - private def diskSizeCompatValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Seq[PipelinesApiAttachedDisk]] = + private def diskSizeCompatValidation( + runtimeConfig: Option[Config] + ): OptionalRuntimeAttributesValidation[Seq[PipelinesApiAttachedDisk]] = DisksValidation.optional - private def memoryValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[MemorySize] = MemoryValidation.optional(RuntimeAttributesKeys.MemoryKey) + private def memoryValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[MemorySize] = + MemoryValidation.optional(RuntimeAttributesKeys.MemoryKey) private val dockerValidation: RuntimeAttributesValidation[String] = DockerValidation.instance - private val dockerWorkingDirValidation: OptionalRuntimeAttributesValidation[String] = DockerWorkingDirValidation.optional + private val dockerWorkingDirValidation: OptionalRuntimeAttributesValidation[String] = + DockerWorkingDirValidation.optional private def preemptibleValidation(runtimeConfig: Option[Config]) = PreemptibleValidation.default(runtimeConfig) private def localizedSasValidation: OptionalRuntimeAttributesValidation[String] = LocalizedSasValidation.optional @@ -58,40 +70,44 @@ object TesRuntimeAttributes { // !! NOTE !! If new validated attributes are added to TesRuntimeAttributes, be sure to include // their validations here so that they will be handled correctly with backendParameters. // Location 2 of 2 - StandardValidatedRuntimeAttributesBuilder.default(backendRuntimeConfig).withValidation( - cpuValidation(backendRuntimeConfig), - memoryValidation(backendRuntimeConfig), - diskSizeValidation(backendRuntimeConfig), - diskSizeCompatValidation(backendRuntimeConfig), - dockerValidation, - dockerWorkingDirValidation, - preemptibleValidation(backendRuntimeConfig), - localizedSasValidation - ) + StandardValidatedRuntimeAttributesBuilder + .default(backendRuntimeConfig) + .withValidation( + cpuValidation(backendRuntimeConfig), + memoryValidation(backendRuntimeConfig), + diskSizeValidation(backendRuntimeConfig), + diskSizeCompatValidation(backendRuntimeConfig), + dockerValidation, + dockerWorkingDirValidation, + preemptibleValidation(backendRuntimeConfig), + localizedSasValidation + ) def makeBackendParameters(runtimeAttributes: Map[String, WomValue], keysToExclude: Set[String], - config: TesConfiguration): Map[String, Option[String]] = { - + config: TesConfiguration + ): Map[String, Option[String]] = if (config.useBackendParameters) - runtimeAttributes - .view.filterKeys(k => !keysToExclude.contains(k)) - .flatMap( _ match { + runtimeAttributes.view + .filterKeys(k => !keysToExclude.contains(k)) + .flatMap(_ match { case (key, WomString(s)) => Option((key, Option(s))) case (key, WomOptionalValue(WomStringType, Some(WomString(optS)))) => Option((key, Option(optS))) case (key, WomOptionalValue(WomStringType, None)) => Option((key, None)) case _ => None - }).toMap + }) + .toMap else Map.empty - } - private def detectDiskFormat(backendRuntimeConfig: Option[Config], validatedRuntimeAttributes: ValidatedRuntimeAttributes): Option[MemorySize] = { + private def detectDiskFormat(backendRuntimeConfig: Option[Config], + validatedRuntimeAttributes: ValidatedRuntimeAttributes + ): Option[MemorySize] = { - def adaptPapiDisks(disks: Seq[PipelinesApiAttachedDisk]): MemorySize = { + def adaptPapiDisks(disks: Seq[PipelinesApiAttachedDisk]): MemorySize = disks match { case disk :: Nil if disk.isInstanceOf[PipelinesApiWorkingDisk] => - MemorySize(disk.sizeGb.toDouble, MemoryUnit.GB) + MemorySize(disk.sizeGb.toDouble, MemoryUnit.GB) case _ :: _ => // When a user specifies only a custom disk, we add the default disk in the background, so we technically have multiple disks. // But we don't want to confuse the user with `multiple disks` message when they only put one. @@ -101,16 +117,21 @@ object TesRuntimeAttributes { // Multiple `local-disk` is not legal, but possible and should be detected throw new IllegalArgumentException("Expecting exactly one disk definition on this backend, found multiple") } - } val maybeTesDisk: Option[MemorySize] = - RuntimeAttributesValidation.extractOption(diskSizeValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) + RuntimeAttributesValidation.extractOption(diskSizeValidation(backendRuntimeConfig).key, + validatedRuntimeAttributes + ) val maybePapiDisk: Option[Seq[PipelinesApiAttachedDisk]] = - RuntimeAttributesValidation.extractOption(diskSizeCompatValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) + RuntimeAttributesValidation.extractOption(diskSizeCompatValidation(backendRuntimeConfig).key, + validatedRuntimeAttributes + ) (maybeTesDisk, maybePapiDisk) match { case (Some(tesDisk: MemorySize), _) => - Option(tesDisk) // If WDLs are in circulation with both `disk` and `disks`, pick the one intended for this backend + Option( + tesDisk + ) // If WDLs are in circulation with both `disk` and `disks`, pick the one intended for this backend case (None, Some(papiDisks: Seq[PipelinesApiAttachedDisk])) => Option(adaptPapiDisks(papiDisks)) case _ => @@ -118,21 +139,29 @@ object TesRuntimeAttributes { } } - def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, rawRuntimeAttributes: Map[String, WomValue], config: TesConfiguration): TesRuntimeAttributes = { + def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, + rawRuntimeAttributes: Map[String, WomValue], + config: TesConfiguration + ): TesRuntimeAttributes = { val backendRuntimeConfig = config.runtimeConfig val docker: String = RuntimeAttributesValidation.extract(dockerValidation, validatedRuntimeAttributes) - val dockerWorkingDir: Option[String] = RuntimeAttributesValidation.extractOption(dockerWorkingDirValidation.key, validatedRuntimeAttributes) - val cpu: Option[Int Refined Positive] = RuntimeAttributesValidation.extractOption(cpuValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val memory: Option[MemorySize] = RuntimeAttributesValidation.extractOption(memoryValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) + val dockerWorkingDir: Option[String] = + RuntimeAttributesValidation.extractOption(dockerWorkingDirValidation.key, validatedRuntimeAttributes) + val cpu: Option[Int Refined Positive] = + RuntimeAttributesValidation.extractOption(cpuValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) + val memory: Option[MemorySize] = + RuntimeAttributesValidation.extractOption(memoryValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) val disk: Option[MemorySize] = detectDiskFormat(backendRuntimeConfig, validatedRuntimeAttributes) val failOnStderr: Boolean = RuntimeAttributesValidation.extract(failOnStderrValidation(backendRuntimeConfig), validatedRuntimeAttributes) val continueOnReturnCode: ContinueOnReturnCode = - RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(backendRuntimeConfig), validatedRuntimeAttributes) - val preemptible: Boolean = { + RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(backendRuntimeConfig), + validatedRuntimeAttributes + ) + val preemptible: Boolean = RuntimeAttributesValidation.extract(preemptibleValidation(backendRuntimeConfig), validatedRuntimeAttributes) - } - val localizedSas: Option[String] = RuntimeAttributesValidation.extractOption(localizedSasValidation.key, validatedRuntimeAttributes) + val localizedSas: Option[String] = + RuntimeAttributesValidation.extractOption(localizedSasValidation.key, validatedRuntimeAttributes) // !! NOTE !! If new validated attributes are added to TesRuntimeAttributes, be sure to include // their validations here so that they will be handled correctly with backendParameters. @@ -177,8 +206,8 @@ object DockerWorkingDirValidation { class DockerWorkingDirValidation extends StringRuntimeAttributesValidation(TesRuntimeAttributes.DockerWorkingDirKey) { // NOTE: Docker's current test specs don't like WdlInteger, etc. auto converted to WdlString. - override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { - case WomString(value) => value.validNel + override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { case WomString(value) => + value.validNel } } @@ -196,9 +225,10 @@ class DockerWorkingDirValidation extends StringRuntimeAttributesValidation(TesRu object PreemptibleValidation { lazy val instance: RuntimeAttributesValidation[Boolean] = new PreemptibleValidation - def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = instance.withDefault( - configDefaultWdlValue(runtimeConfig) getOrElse WomBoolean(false)) - def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WomValue] = instance.configDefaultWomValue(runtimeConfig) + def default(runtimeConfig: Option[Config]): RuntimeAttributesValidation[Boolean] = + instance.withDefault(configDefaultWdlValue(runtimeConfig) getOrElse WomBoolean(false)) + def configDefaultWdlValue(runtimeConfig: Option[Config]): Option[WomValue] = + instance.configDefaultWomValue(runtimeConfig) } class PreemptibleValidation extends BooleanRuntimeAttributesValidation(TesRuntimeAttributes.PreemptibleKey) { @@ -241,11 +271,10 @@ class LocalizedSasValidation extends StringRuntimeAttributesValidation(TesRuntim matcher.find } - override protected def invalidValueMessage(value: WomValue): String = { + override protected def invalidValueMessage(value: WomValue): String = s"Invalid Runtime Attribute value for ${TesRuntimeAttributes.LocalizedSasKey}. Value must be a string containing only letters, numbers, and underscores." - } - override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { - case WomString(value) => if(isValidBashVariableName(value)) value.validNel else Validated.invalidNel(invalidValueMessage(WomString(value))) + override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { case WomString(value) => + if (isValidBashVariableName(value)) value.validNel else Validated.invalidNel(invalidValueMessage(WomString(value))) } } diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala index d775367ac74..54df895d617 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesTask.scala @@ -18,8 +18,8 @@ import wom.values._ import scala.collection.immutable.Map -final case class WorkflowExecutionIdentityConfig(value: String) {override def toString: String = value.toString} -final case class WorkflowExecutionIdentityOption(value: String) {override def toString: String = value} +final case class WorkflowExecutionIdentityConfig(value: String) { override def toString: String = value.toString } +final case class WorkflowExecutionIdentityOption(value: String) { override def toString: String = value } final case class TesTask(jobDescriptor: BackendJobDescriptor, configurationDescriptor: BackendConfigurationDescriptor, jobLogger: JobLogger, @@ -31,7 +31,8 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, dockerImageUsed: String, mapCommandLineWomFile: WomFile => WomFile, jobShell: String, - outputMode: OutputMode) { + outputMode: OutputMode +) { private val workflowDescriptor = jobDescriptor.workflowDescriptor private val workflowName = workflowDescriptor.callable.name @@ -41,8 +42,7 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, .getAs[String]("workflow-execution-identity") .map(WorkflowExecutionIdentityConfig) private val workflowExecutionIdentityOption: Option[WorkflowExecutionIdentityOption] = - workflowDescriptor - .workflowOptions + workflowDescriptor.workflowOptions .get(TesWorkflowOptionKeys.WorkflowExecutionIdentity) .toOption .map(WorkflowExecutionIdentityOption) @@ -50,9 +50,8 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, val description: String = jobDescriptor.toString // TODO validate "project" field of workflowOptions - val project = { + val project = workflowDescriptor.workflowOptions.getOrElse("project", "") - } // contains the script to be executed private val commandScript = Input( @@ -74,28 +73,34 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, private def writeFunctionFiles: Map[FullyQualifiedName, Seq[WomFile]] = instantiatedCommand.createdFiles map { f => f.file.value.md5SumShort -> List(f.file) } toMap - private val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = jobDescriptor - .fullyQualifiedInputs + private val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = jobDescriptor.fullyQualifiedInputs .safeMapValues { _.collectAsSeq { case w: WomFile => w } } lazy val inputs: Seq[Input] = { - val result = TesTask.buildTaskInputs(callInputFiles ++ writeFunctionFiles, workflowName, mapCommandLineWomFile) ++ Seq(commandScript) - jobLogger.info(s"Calculated TES inputs (found ${result.size}): " + result.mkString(System.lineSeparator(),System.lineSeparator(),System.lineSeparator())) + val result = + TesTask.buildTaskInputs(callInputFiles ++ writeFunctionFiles, workflowName, mapCommandLineWomFile) ++ Seq( + commandScript + ) + jobLogger.info( + s"Calculated TES inputs (found ${result.size}): " + result.mkString(System.lineSeparator(), + System.lineSeparator(), + System.lineSeparator() + ) + ) result } // TODO add TES logs to standard outputs - private lazy val standardOutputs = Seq("rc", "stdout", "stderr").map { - f => - Output( - name = Option(f), - description = Option(fullyQualifiedTaskName + "." + f), - url = Option(tesPaths.storageOutput(f)), - path = tesPaths.containerOutput(containerWorkDir, f), - `type` = Option("FILE") - ) + private lazy val standardOutputs = Seq("rc", "stdout", "stderr").map { f => + Output( + name = Option(f), + description = Option(fullyQualifiedTaskName + "." + f), + url = Option(tesPaths.storageOutput(f)), + path = tesPaths.containerOutput(containerWorkDir, f), + `type` = Option("FILE") + ) } // TODO extract output file variable names and match with Files below @@ -113,12 +118,13 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, // TODO WOM: this should be pushed back into WOM. // It's also a mess, evaluateFiles returns an ErrorOr but can still throw. We might want to use an EitherT, although // if it fails we just want to fallback to an empty list anyway... - def evaluateFiles(output: OutputDefinition): List[WomFile] = { - Try ( - output.expression.evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType).map(_.toList map { _.file }) + def evaluateFiles(output: OutputDefinition): List[WomFile] = + Try( + output.expression + .evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType) + .map(_.toList map { _.file }) ).getOrElse(List.empty[WomFile].validNel) - .getOrElse(List.empty) - } + .getOrElse(List.empty) jobDescriptor.taskCall.callable.outputs .flatMap(evaluateFiles) @@ -129,7 +135,7 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, val globName = GlobFunctions.globName(g.value) val globDirName = "globDir." + index val globDirectory = globName + "/" - val globListName = "globList." + index + val globListName = "globList." + index val globListFile = globName + ".list" Seq( Output( @@ -140,7 +146,7 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, `type` = Option("DIRECTORY") ), Output( - name = Option(globListName), + name = Option(globListName), description = Option(fullyQualifiedTaskName + "." + globListName), url = Option(tesPaths.storageOutput(globListFile)), path = tesPaths.containerOutput(containerWorkDir, globListFile), @@ -149,7 +155,8 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, ) } - private val womOutputs = outputWomFiles.flatMap(_.flattenFiles) + private val womOutputs = outputWomFiles + .flatMap(_.flattenFiles) .zipWithIndex .flatMap { case (f: WomSingleFile, index) => @@ -167,7 +174,7 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, case (d: WomUnlistedDirectory, index) => val directoryPathName = "dirPath." + index val directoryPath = d.value.ensureSlashed - val directoryListName = "dirList." + index + val directoryListName = "dirList." + index val directoryList = d.value.ensureUnslashed + ".list" Seq( Output( @@ -178,7 +185,7 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, `type` = Option("DIRECTORY") ), Output( - name = Option(directoryListName), + name = Option(directoryListName), description = Option(fullyQualifiedTaskName + "." + directoryListName), url = Option(tesPaths.storageOutput(directoryList)), path = tesPaths.containerOutput(containerWorkDir, directoryList), @@ -187,7 +194,8 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, ) } - private val additionalGlobOutput = jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(handleGlobFile(_, womOutputs.size)) + private val additionalGlobOutput = + jobDescriptor.taskCall.callable.additionalGlob.toList.flatMap(handleGlobFile(_, womOutputs.size)) private lazy val cwdOutput = Output( name = Option("execution.dir.output"), @@ -198,30 +206,37 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, ) val outputs: Seq[Output] = { - val result = outputMode match { + val result = outputMode match { case OutputMode.GRANULAR => standardOutputs ++ Seq(commandScriptOut) ++ womOutputs ++ additionalGlobOutput case OutputMode.ROOT => List(cwdOutput) ++ additionalGlobOutput } - jobLogger.info(s"Calculated TES outputs (found ${result.size}): " + result.mkString(System.lineSeparator(),System.lineSeparator(),System.lineSeparator())) + jobLogger.info( + s"Calculated TES outputs (found ${result.size}): " + result.mkString(System.lineSeparator(), + System.lineSeparator(), + System.lineSeparator() + ) + ) result } val preferedWorkflowExecutionIdentity = TesTask.getPreferredWorkflowExecutionIdentity( - workflowExecutionIdentityConfig, - workflowExecutionIdentityOption + workflowExecutionIdentityConfig, + workflowExecutionIdentityOption ) - val executors = Seq(Executor( - image = dockerImageUsed, - command = Seq(jobShell, commandScript.path), - workdir = runtimeAttributes.dockerWorkingDir, - stdout = Option(tesPaths.containerOutput(containerWorkDir, "stdout")), - stderr = Option(tesPaths.containerOutput(containerWorkDir, "stderr")), - stdin = None, - env = None - )) + val executors = Seq( + Executor( + image = dockerImageUsed, + command = Seq(jobShell, commandScript.path), + workdir = runtimeAttributes.dockerWorkingDir, + stdout = Option(tesPaths.containerOutput(containerWorkDir, "stdout")), + stderr = Option(tesPaths.containerOutput(containerWorkDir, "stderr")), + stdin = None, + env = None + ) + ) val resources: Resources = TesTask.makeResources( runtimeAttributes, @@ -235,11 +250,13 @@ final case class TesTask(jobDescriptor: BackendJobDescriptor, object TesTask { // Helper to determine which source to use for a workflowExecutionIdentity def getPreferredWorkflowExecutionIdentity(configIdentity: Option[WorkflowExecutionIdentityConfig], - workflowOptionsIdentity: Option[WorkflowExecutionIdentityOption]): Option[String] = { + workflowOptionsIdentity: Option[WorkflowExecutionIdentityOption] + ): Option[String] = configIdentity.map(_.value).orElse(workflowOptionsIdentity.map(_.value)) - } def makeResources(runtimeAttributes: TesRuntimeAttributes, - workflowExecutionId: Option[String], internalPathPrefix: Option[String]): Resources = { + workflowExecutionId: Option[String], + internalPathPrefix: Option[String] + ): Resources = { /* * workflowExecutionId: This was added in BT-409 to let us pass information to an Azure * TES server about which user identity to run tasks as. @@ -249,7 +266,7 @@ object TesTask { * a working directory that the TES task can use. */ val internalPathPrefixKey = "internal_path_prefix" - val backendParameters : Map[String, Option[String]] = runtimeAttributes.backendParameters ++ + val backendParameters: Map[String, Option[String]] = runtimeAttributes.backendParameters ++ workflowExecutionId .map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option(_)) .toMap ++ @@ -273,26 +290,27 @@ object TesTask { ) } - def buildTaskInputs(taskFiles: Map[FullyQualifiedName, Seq[WomFile]], workflowName: String, womMapFn: WomFile => WomFile): List[Input] = { - taskFiles.flatMap { - case (fullyQualifiedName, files) => files.flatMap(_.flattenFiles).zipWithIndex.map { - case (f, index) => - val inputType = f match { - case _: WomUnlistedDirectory => "DIRECTORY" - case _: WomSingleFile => "FILE" - case _: WomGlobFile => "FILE" - } - Input( - name = Option(fullyQualifiedName + "." + index), - description = Option(workflowName + "." + fullyQualifiedName + "." + index), - url = Option(f.value), - path = womMapFn(f).value, - `type` = Option(inputType), - content = None - ) + def buildTaskInputs(taskFiles: Map[FullyQualifiedName, Seq[WomFile]], + workflowName: String, + womMapFn: WomFile => WomFile + ): List[Input] = + taskFiles.flatMap { case (fullyQualifiedName, files) => + files.flatMap(_.flattenFiles).zipWithIndex.map { case (f, index) => + val inputType = f match { + case _: WomUnlistedDirectory => "DIRECTORY" + case _: WomSingleFile => "FILE" + case _: WomGlobFile => "FILE" + } + Input( + name = Option(fullyQualifiedName + "." + index), + description = Option(workflowName + "." + fullyQualifiedName + "." + index), + url = Option(f.value), + path = womMapFn(f).value, + `type` = Option(inputType), + content = None + ) } }.toList - } def makeTags(workflowDescriptor: BackendWorkflowDescriptor): Map[String, Option[String]] = { // In addition to passing through any workflow labels, include relevant workflow ids as tags. @@ -304,7 +322,7 @@ object TesTask { ) } - def makeTask(tesTask: TesTask): Task = { + def makeTask(tesTask: TesTask): Task = Task( id = None, state = None, @@ -318,7 +336,6 @@ object TesTask { tags = Option(tesTask.tags), logs = None ) - } } // Field requirements in classes below based off GA4GH schema @@ -332,7 +349,8 @@ final case class Task(id: Option[String], executors: Seq[Executor], volumes: Option[Seq[String]], tags: Option[Map[String, Option[String]]], - logs: Option[Seq[TaskLog]]) + logs: Option[Seq[TaskLog]] +) final case class Executor(image: String, command: Seq[String], @@ -340,42 +358,45 @@ final case class Executor(image: String, stdout: Option[String], stderr: Option[String], stdin: Option[String], - env: Option[Map[String, String]]) + env: Option[Map[String, String]] +) final case class Input(name: Option[String], description: Option[String], url: Option[String], path: String, `type`: Option[String], - content: Option[String]) + content: Option[String] +) final case class Output(name: Option[String], description: Option[String], url: Option[String], path: String, - `type`: Option[String]) + `type`: Option[String] +) final case class Resources(cpu_cores: Option[Int], ram_gb: Option[Double], disk_gb: Option[Double], preemptible: Option[Boolean], zones: Option[Seq[String]], - backend_parameters: Option[Map[String, Option[String]]]) + backend_parameters: Option[Map[String, Option[String]]] +) -final case class OutputFileLog(url: String, - path: String, - size_bytes: Int) +final case class OutputFileLog(url: String, path: String, size_bytes: Int) final case class TaskLog(start_time: Option[String], end_time: Option[String], metadata: Option[Map[String, String]], logs: Option[Seq[ExecutorLog]], outputs: Option[Seq[OutputFileLog]], - system_logs: Option[Seq[String]]) + system_logs: Option[Seq[String]] +) final case class ExecutorLog(start_time: Option[String], end_time: Option[String], stdout: Option[String], stderr: Option[String], - exit_code: Option[Int]) - + exit_code: Option[Int] +) diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesWorkflowPaths.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesWorkflowPaths.scala index c85cee1a0c7..0b6db1fa9a4 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesWorkflowPaths.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesWorkflowPaths.scala @@ -7,8 +7,9 @@ import cromwell.core.path.{PathBuilder, PathFactory} import net.ceedubs.ficus.Ficus._ case class TesWorkflowPaths(override val workflowDescriptor: BackendWorkflowDescriptor, - override val config: Config, - override val pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) extends WorkflowPaths { + override val config: Config, + override val pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders +) extends WorkflowPaths { val DockerRootString = config.as[Option[String]]("dockerRoot").getOrElse("/cromwell-executions") var DockerRoot = PathFactory.buildPath(DockerRootString, pathBuilders) @@ -17,10 +18,9 @@ case class TesWorkflowPaths(override val workflowDescriptor: BackendWorkflowDesc } val dockerWorkflowRoot = workflowPathBuilder(DockerRoot) - override def toJobPaths(workflowPaths: WorkflowPaths, - jobKey: BackendJobDescriptorKey): TesJobPaths = { + override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): TesJobPaths = new TesJobPaths(workflowPaths.asInstanceOf[TesWorkflowPaths], jobKey) - } - override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor) + override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = + this.copy(workflowDescriptor = workflowDescriptor) } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala index 11267d238cf..53ba95606d0 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesAsyncBackendJobExecutionActorSpec.scala @@ -17,7 +17,8 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit val fullyQualifiedName = "this.name.is.more.than.qualified" val workflowName = "mockWorkflow" - val someBlobUrl = "https://lz813a3d637adefec2c6e88f.blob.core.windows.net/sc-d8143fd8-aa07-446d-9ba0-af72203f1794/nyxp6c/tes-internal/configuration/supported-vm-sizes" + val someBlobUrl = + "https://lz813a3d637adefec2c6e88f.blob.core.windows.net/sc-d8143fd8-aa07-446d-9ba0-af72203f1794/nyxp6c/tes-internal/configuration/supported-vm-sizes" val someNotBlobUrl = "https://www.google.com/path/to/exile" var index = 0 @@ -29,7 +30,7 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit `type` = Option("FILE"), content = None ) - index = index+1 + index = index + 1 val blobInput_1 = Input( name = Option(fullyQualifiedName + "." + index), @@ -39,7 +40,7 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit `type` = Option("FILE"), content = None ) - index = index+1 + index = index + 1 val notBlobInput_1 = Input( name = Option(fullyQualifiedName + "." + index), @@ -49,7 +50,7 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit `type` = Option("FILE"), content = None ) - index = index+1 + index = index + 1 val notBlobInput_2 = Input( name = Option(fullyQualifiedName + "." + index), @@ -68,7 +69,9 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit def generateMockWsmTokenGenerator: WSMBlobSasTokenGenerator = { val mockTokenGenerator = mock[WSMBlobSasTokenGenerator] val expectedTokenDuration: Duration = Duration.of(24, ChronoUnit.HOURS) - mockTokenGenerator.getWSMSasFetchEndpoint(any[BlobPath], any[Option[Duration]]) returns Try(s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken?sasExpirationDuration=${expectedTokenDuration.getSeconds.toInt}") + mockTokenGenerator.getWSMSasFetchEndpoint(any[BlobPath], any[Option[Duration]]) returns Try( + s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken?sasExpirationDuration=${expectedTokenDuration.getSeconds.toInt}" + ) mockTokenGenerator } def generateMockFsm: BlobFileSystemManager = { @@ -77,7 +80,7 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit mockFsm.blobTokenGenerator returns mockGenerator mockFsm } - //path to a blob file + // path to a blob file def generateMockBlobPath: BlobPath = { val mockBlobPath = mock[BlobPath] mockBlobPath.pathAsString returns someBlobUrl @@ -90,7 +93,7 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit mockBlobPath } - //Path to a file that isn't a blob file + // Path to a file that isn't a blob file def generateMockDefaultPath: cromwell.core.path.Path = { val mockDefaultPath: cromwell.core.path.Path = mock[cromwell.core.path.Path] mockDefaultPath.pathAsString returns someNotBlobUrl @@ -99,60 +102,77 @@ class TesAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with Matchers wit def pathGetter(pathString: String): Try[cromwell.core.path.Path] = { val mockBlob: BlobPath = generateMockBlobPath val mockDefault: cromwell.core.path.Path = generateMockDefaultPath - if(pathString.contains(someBlobUrl)) Try(mockBlob) else Try(mockDefault) + if (pathString.contains(someBlobUrl)) Try(mockBlob) else Try(mockDefault) } def blobConverter(pathToConvert: Try[cromwell.core.path.Path]): Try[BlobPath] = { val mockBlob: BlobPath = generateMockBlobPath - if(pathToConvert.get.pathAsString.contains(someBlobUrl)) Try(mockBlob) else Failure(new Exception("failed")) + if (pathToConvert.get.pathAsString.contains(someBlobUrl)) Try(mockBlob) else Failure(new Exception("failed")) } it should "not return sas endpoint when no blob paths are provided" in { val mockLogger: JobLogger = mock[JobLogger] val emptyInputs: List[Input] = List() val bloblessInputs: List[Input] = List(notBlobInput_1, notBlobInput_2) - TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(emptyInputs, pathGetter, mockLogger, blobConverter).isFailure shouldBe true - TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(bloblessInputs, pathGetter, mockLogger, blobConverter).isFailure shouldBe true + TesAsyncBackendJobExecutionActor + .determineWSMSasEndpointFromInputs(emptyInputs, pathGetter, mockLogger, blobConverter) + .isFailure shouldBe true + TesAsyncBackendJobExecutionActor + .determineWSMSasEndpointFromInputs(bloblessInputs, pathGetter, mockLogger, blobConverter) + .isFailure shouldBe true } it should "return a sas endpoint based on inputs when blob paths are provided" in { val mockLogger: JobLogger = mock[JobLogger] - val expectedTokenLifetimeSeconds = 24 * 60 * 60 //assert that cromwell asks for 24h token duration. - val expected = s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken?sasExpirationDuration=${expectedTokenLifetimeSeconds}" + val expectedTokenLifetimeSeconds = 24 * 60 * 60 // assert that cromwell asks for 24h token duration. + val expected = + s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken?sasExpirationDuration=${expectedTokenLifetimeSeconds}" val blobInput: List[Input] = List(blobInput_0) val blobInputs: List[Input] = List(blobInput_0, blobInput_1) val mixedInputs: List[Input] = List(notBlobInput_1, blobInput_0, blobInput_1) - TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(blobInput, pathGetter, mockLogger, blobConverter).get shouldEqual expected - TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(blobInputs, pathGetter, mockLogger, blobConverter).get shouldEqual expected - TesAsyncBackendJobExecutionActor.determineWSMSasEndpointFromInputs(mixedInputs, pathGetter, mockLogger, blobConverter).get shouldEqual expected + TesAsyncBackendJobExecutionActor + .determineWSMSasEndpointFromInputs(blobInput, pathGetter, mockLogger, blobConverter) + .get shouldEqual expected + TesAsyncBackendJobExecutionActor + .determineWSMSasEndpointFromInputs(blobInputs, pathGetter, mockLogger, blobConverter) + .get shouldEqual expected + TesAsyncBackendJobExecutionActor + .determineWSMSasEndpointFromInputs(mixedInputs, pathGetter, mockLogger, blobConverter) + .get shouldEqual expected } it should "contain expected strings in the bash script" in { val mockEnvironmentVariableNameFromWom = "mock_env_var_for_storing_sas_token" - val expectedEndpoint = s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken" + val expectedEndpoint = + s"$testWsmEndpoint/api/workspaces/v1/$testWorkspaceId/resources/controlled/azure/storageContainer/$testContainerResourceId/getSasToken" val beginSubstring = "### BEGIN ACQUIRE LOCAL SAS TOKEN ###" val endSubstring = "### END ACQUIRE LOCAL SAS TOKEN ###" val curlCommandSubstring = s""" - |sas_response_json=$$(curl -s \\ - | --retry 3 \\ - | --retry-delay 2 \\ - | -X POST "$expectedEndpoint" \\ - | -H "Content-Type: application/json" \\ - | -H "accept: */*" \\ - | -H "Authorization: Bearer $${BEARER_TOKEN}" \\ - | -H "Content-Length: 0" \\ - | -d "") - |""".stripMargin - val exportCommandSubstring = s"""export $mockEnvironmentVariableNameFromWom=$$(echo "$${sas_response_json}" | jq -r '.token')""" - val echoCommandSubstring = s"""echo "Saving sas token: $${$mockEnvironmentVariableNameFromWom:0:4}**** to environment variable $mockEnvironmentVariableNameFromWom..."""" - val generatedBashScript = TesAsyncBackendJobExecutionActor.generateLocalizedSasScriptPreamble(mockEnvironmentVariableNameFromWom, expectedEndpoint) - - generatedBashScript should include (beginSubstring) - generatedBashScript should include (endSubstring) - generatedBashScript should include (curlCommandSubstring) - generatedBashScript should include (echoCommandSubstring) - generatedBashScript should include (exportCommandSubstring) + |sas_response_json=$$(curl -s \\ + | --retry 3 \\ + | --retry-delay 2 \\ + | -X POST "$expectedEndpoint" \\ + | -H "Content-Type: application/json" \\ + | -H "accept: */*" \\ + | -H "Authorization: Bearer $${BEARER_TOKEN}" \\ + | -H "Content-Length: 0" \\ + | -d "") + |""".stripMargin + val exportCommandSubstring = + s"""export $mockEnvironmentVariableNameFromWom=$$(echo "$${sas_response_json}" | jq -r '.token')""" + val echoCommandSubstring = + s"""echo "Saving sas token: $${$mockEnvironmentVariableNameFromWom:0:4}**** to environment variable $mockEnvironmentVariableNameFromWom..."""" + val generatedBashScript = + TesAsyncBackendJobExecutionActor.generateLocalizedSasScriptPreamble(mockEnvironmentVariableNameFromWom, + expectedEndpoint + ) + + generatedBashScript should include(beginSubstring) + generatedBashScript should include(endSubstring) + generatedBashScript should include(curlCommandSubstring) + generatedBashScript should include(echoCommandSubstring) + generatedBashScript should include(exportCommandSubstring) } } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesConfigurationSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesConfigurationSpec.scala index a695e5d6e33..144941859c3 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesConfigurationSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesConfigurationSpec.scala @@ -20,13 +20,15 @@ class TesConfigurationSpec extends AnyFlatSpec with Matchers { ) ) - def backoffsAreEquivalent(expectedBackoff: SimpleExponentialBackoff, actualBackoff: SimpleExponentialBackoff): Boolean = { + def backoffsAreEquivalent(expectedBackoff: SimpleExponentialBackoff, + actualBackoff: SimpleExponentialBackoff + ): Boolean = { val b1 = expectedBackoff.googleBackoff val b2 = actualBackoff.googleBackoff b1.getInitialIntervalMillis == b2.getInitialIntervalMillis && - b1.getMaxIntervalMillis == b2.getMaxIntervalMillis && - b1.getMultiplier == b2.getMultiplier && - b1.getRandomizationFactor == b2.getRandomizationFactor + b1.getMaxIntervalMillis == b2.getMaxIntervalMillis && + b1.getMultiplier == b2.getMultiplier && + b1.getRandomizationFactor == b2.getRandomizationFactor } it should "use default backoffs when no custom config provided" in { @@ -38,7 +40,9 @@ class TesConfigurationSpec extends AnyFlatSpec with Matchers { it should "use configured backoffs if they exist" in { val tesConfig = makeTesConfig(TesTestConfig.backendConfigWithBackoffs) backoffsAreEquivalent(SimpleExponentialBackoff(5 seconds, 1 minute, 2.5, .7), tesConfig.pollBackoff) shouldBe true - backoffsAreEquivalent(SimpleExponentialBackoff(3 minutes, 1 hours, 5, .1), tesConfig.executeOrRecoverBackoff) shouldBe true + backoffsAreEquivalent(SimpleExponentialBackoff(3 minutes, 1 hours, 5, .1), + tesConfig.executeOrRecoverBackoff + ) shouldBe true } it should "fail if user defines an invalid backoff" in { diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala index a081f26c910..a34062b3e88 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala @@ -18,8 +18,7 @@ import wom.graph.CommandCallNode import scala.concurrent.duration._ -class TesInitializationActorSpec extends TestKitSuite - with AnyWordSpecLike with Matchers with ImplicitSender { +class TesInitializationActorSpec extends TestKitSuite with AnyWordSpecLike with Matchers with ImplicitSender { val Timeout: FiniteDuration = 10.second.dilated val HelloWorld: String = @@ -67,9 +66,10 @@ class TesInitializationActorSpec extends TestKitSuite |} |""".stripMargin - - private def getActorRef(workflowDescriptor: BackendWorkflowDescriptor, calls: Set[CommandCallNode], - conf: BackendConfigurationDescriptor) = { + private def getActorRef(workflowDescriptor: BackendWorkflowDescriptor, + calls: Set[CommandCallNode], + conf: BackendConfigurationDescriptor + ) = { val params = TesInitializationActorParams(workflowDescriptor, calls, new TesConfiguration(conf), emptyActor) val props = Props(new TesInitializationActor(params)) system.actorOf(props, "TesInitializationActor" + UUID.randomUUID) @@ -84,8 +84,8 @@ class TesInitializationActorSpec extends TestKitSuite "TesInitializationActor" should { "log a warning message when there are unsupported runtime attributes" in { within(Timeout) { - val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, - runtime = """runtime { docker: "ubuntu/latest" test: true }""") + val workflowDescriptor = + buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { docker: "ubuntu/latest" test: true }""") val backend = getActorRef(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, conf) val eventPattern = "Key/s [test] is/are not supported by backend. Unsupported attributes will not be part of job executions." @@ -101,15 +101,17 @@ class TesInitializationActorSpec extends TestKitSuite def initializeActor(workflowOptions: WorkflowOptions): Unit = { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, - runtime = """runtime { docker: "ubuntu/latest" }""", - options = workflowOptions) + runtime = """runtime { docker: "ubuntu/latest" }""", + options = workflowOptions + ) val backend = getActorRef(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, conf) backend ! Initialize } def nonStringErrorMessage(key: String) = s"Workflow option $key must be a string" - val bothRequiredErrorMessage = s"Workflow options ${TesWorkflowOptionKeys.WorkflowExecutionIdentity} and ${TesWorkflowOptionKeys.DataAccessIdentity} are both required if one is provided" + val bothRequiredErrorMessage = + s"Workflow options ${TesWorkflowOptionKeys.WorkflowExecutionIdentity} and ${TesWorkflowOptionKeys.DataAccessIdentity} are both required if one is provided" "fail when WorkflowExecutionIdentity is not a string and DataAccessIdentity is missing" in { within(Timeout) { @@ -121,9 +123,13 @@ class TesInitializationActorSpec extends TestKitSuite case InitializationSuccess(s) => fail(s"InitializationFailed was expected but got $s") case InitializationFailed(failure) => val expectedMsg = nonStringErrorMessage(TesWorkflowOptionKeys.WorkflowExecutionIdentity) - if (!(failure.getMessage.contains(expectedMsg) && - failure.getMessage.contains(bothRequiredErrorMessage))) { - fail(s"Exception message did not contain both '$expectedMsg' and '$bothRequiredErrorMessage'. Was '$failure'") + if ( + !(failure.getMessage.contains(expectedMsg) && + failure.getMessage.contains(bothRequiredErrorMessage)) + ) { + fail( + s"Exception message did not contain both '$expectedMsg' and '$bothRequiredErrorMessage'. Was '$failure'" + ) } } } @@ -131,10 +137,14 @@ class TesInitializationActorSpec extends TestKitSuite "fail when WorkflowExecutionIdentity is a string but DataAccessIdentity is not a string" in { within(Timeout) { - val workflowOptions = WorkflowOptions(JsObject(Map( - TesWorkflowOptionKeys.WorkflowExecutionIdentity -> JsString("5"), - TesWorkflowOptionKeys.DataAccessIdentity -> JsNumber(6) - ))) + val workflowOptions = WorkflowOptions( + JsObject( + Map( + TesWorkflowOptionKeys.WorkflowExecutionIdentity -> JsString("5"), + TesWorkflowOptionKeys.DataAccessIdentity -> JsNumber(6) + ) + ) + ) initializeActor(workflowOptions) expectMsgPF() { case InitializationSuccess(s) => fail(s"InitializationFailed was expected but got $s") @@ -147,10 +157,14 @@ class TesInitializationActorSpec extends TestKitSuite "successfully start when both WorkflowExecutionIdentity and DataAccessIdentity are strings" in { within(Timeout) { - val workflowOptions = WorkflowOptions(JsObject(Map( - TesWorkflowOptionKeys.WorkflowExecutionIdentity -> JsString("5"), - TesWorkflowOptionKeys.DataAccessIdentity -> JsString("6") - ))) + val workflowOptions = WorkflowOptions( + JsObject( + Map( + TesWorkflowOptionKeys.WorkflowExecutionIdentity -> JsString("5"), + TesWorkflowOptionKeys.DataAccessIdentity -> JsString("6") + ) + ) + ) initializeActor(workflowOptions) expectMsgPF() { case InitializationSuccess(_) => @@ -164,13 +178,18 @@ class TesInitializationActorSpec extends TestKitSuite val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") val backend = getActorRef(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, conf) backend ! Initialize - expectMsgPF() { - case InitializationFailed(failure) => - failure match { - case exception: RuntimeAttributeValidationFailures => - if (!exception.getMessage.equals("Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!")) - fail("Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'.") - } + expectMsgPF() { case InitializationFailed(failure) => + failure match { + case exception: RuntimeAttributeValidationFailures => + if ( + !exception.getMessage.equals( + "Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!" + ) + ) + fail( + "Exception message is not equal to 'Runtime validation failed:\nTask hello has an invalid runtime attribute docker = !! NOT FOUND !!'." + ) + } } } } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala index 830e0cbe70c..1fb43503fb6 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala @@ -31,11 +31,14 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec val expectedDefaultsPlusUbuntuDocker = expectedDefaults.copy(dockerImage = "ubuntu:latest") - def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]) = { - WorkflowOptions(JsObject(Map( - "default_runtime_attributes" -> JsObject(defaults) - ))) - } + def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]) = + WorkflowOptions( + JsObject( + Map( + "default_runtime_attributes" -> JsObject(defaults) + ) + ) + ) "TesRuntimeAttributes" should { @@ -63,7 +66,10 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec "fail to validate an invalid failOnStderr entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "failOnStderr" -> WomString("yes")) - assertFailure(runtimeAttributes, "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'") + assertFailure( + runtimeAttributes, + "Expecting failOnStderr runtime attribute to be a Boolean or a String with values of 'true' or 'false'" + ) } "validate a valid preemptible entry" in { @@ -73,13 +79,16 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec } "validate a valid azureSasEnvironmentVariable entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), TesRuntimeAttributes.LocalizedSasKey -> WomString("THIS_IS_VALID")) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), TesRuntimeAttributes.LocalizedSasKey -> WomString("THIS_IS_VALID")) val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(localizedSasEnvVar = Some("THIS_IS_VALID")) assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid azureSasEnvironmentVariable entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), TesRuntimeAttributes.LocalizedSasKey -> WomString("THIS IS INVALID")) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), + TesRuntimeAttributes.LocalizedSasKey -> WomString("THIS IS INVALID") + ) assertFailure(runtimeAttributes, "Value must be a string containing only letters, numbers, and underscores.") } @@ -103,35 +112,53 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec "fail to validate an invalid string preemptible entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "preemptible" -> WomString("yes")) - assertFailure(runtimeAttributes, "Expecting preemptible runtime attribute to be an Integer, Boolean, or a String with values of 'true' or 'false'") + assertFailure( + runtimeAttributes, + "Expecting preemptible runtime attribute to be an Integer, Boolean, or a String with values of 'true' or 'false'" + ) } "fail to validate an invalid type preemptible entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "preemptible" -> WomFloat(3.14)) - assertFailure(runtimeAttributes, "Expecting preemptible runtime attribute to be an Integer, Boolean, or a String with values of 'true' or 'false'") + assertFailure( + runtimeAttributes, + "Expecting preemptible runtime attribute to be an Integer, Boolean, or a String with values of 'true' or 'false'" + ) } "validate a valid continueOnReturnCode entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomInteger(1)) - val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1))) + val expectedRuntimeAttributes = + expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1))) assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } "validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) - val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), + "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2))) + ) + val expectedRuntimeAttributes = + expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } "coerce then validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2")))) - val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), + "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2"))) + ) + val expectedRuntimeAttributes = + expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid continueOnReturnCode entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomString("value")) - assertFailure(runtimeAttributes, "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]") + assertFailure( + runtimeAttributes, + "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" + ) } "validate a valid cpu entry" in assertSuccess( @@ -158,7 +185,9 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec "fail to validate an invalid memory entry" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "memory" -> WomString("blah")) - assertFailure(runtimeAttributes, "Expecting memory runtime attribute to be an Integer or String with format '8 GB'") + assertFailure(runtimeAttributes, + "Expecting memory runtime attribute to be an Integer or String with format '8 GB'" + ) } "validate a valid disk entry" in { @@ -185,7 +214,8 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec } "refuse multiple `local-disk` instances" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("local-disk 10 SSD, local-disk 20 SSD")) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("local-disk 10 SSD, local-disk 20 SSD")) assertFailure(runtimeAttributes, "Expecting exactly one disk definition on this backend, found multiple") } @@ -195,29 +225,41 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec } "refuse custom AND multiple mount points" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("/mnt/tmp 10 LOCAL, local-disk 20 HDD")) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("/mnt/tmp 10 LOCAL, local-disk 20 HDD")) assertFailure(runtimeAttributes, "Disks with custom mount points are not supported by this backend") } "not accept a single comma" ignore { // Surprisingly, the PAPI code we call under the covers validates `,` and give the user a default disk. val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString(",")) - assertFailure(runtimeAttributes, "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: ','") + assertFailure( + runtimeAttributes, + "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: ','" + ) } "not accept empty string" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("")) - assertFailure(runtimeAttributes, "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: ''") + assertFailure( + runtimeAttributes, + "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: ''" + ) } "not accept `banana`" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomString("banana")) - assertFailure(runtimeAttributes, "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: 'banana'") + assertFailure( + runtimeAttributes, + "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: 'banana'" + ) } "not accept a random number (chosen by fair dice roll)" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomInteger(4)) - assertFailure(runtimeAttributes, "Expecting disks runtime attribute to be a comma separated String or Array[String]") + assertFailure(runtimeAttributes, + "Expecting disks runtime attribute to be a comma separated String or Array[String]" + ) } "validate a valid dockerWorkingDir entry" in { @@ -248,12 +290,12 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec } "exclude unknown non-string attributes from backend parameters" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "foo" -> WomInteger(5), "bar" -> WomString("baz")) + val runtimeAttributes = + Map("docker" -> WomString("ubuntu:latest"), "foo" -> WomInteger(5), "bar" -> WomString("baz")) val expectedRuntimeAttributes = expectedDefaults.copy(backendParameters = Map("bar" -> Option("baz"))) assertSuccess(runtimeAttributes, expectedRuntimeAttributes, tesConfig = mockTesConfigWithBackendParams) } - "turn populated optional unknown string attributes into backend parameters" in { val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "foo" -> WomOptionalValue(WomString("bar"))) val expectedRuntimeAttributes = expectedDefaults.copy(backendParameters = Map("foo" -> Option("bar"))) @@ -267,7 +309,8 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec } } - private val mockConfigurationDescriptor = BackendConfigurationDescriptor(TesTestConfig.backendConfig, TestConfig.globalConfig) + private val mockConfigurationDescriptor = + BackendConfigurationDescriptor(TesTestConfig.backendConfig, TestConfig.globalConfig) private val mockTesConfiguration = new TesConfiguration(mockConfigurationDescriptor) private val mockTesConfigWithBackendParams = new TesConfiguration( mockConfigurationDescriptor.copy(backendConfig = TesTestConfig.backendConfigWithBackendParams) @@ -276,7 +319,8 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec private def assertSuccess(runtimeAttributes: Map[String, WomValue], expectedRuntimeAttributes: TesRuntimeAttributes, workflowOptions: WorkflowOptions = emptyWorkflowOptions, - tesConfig: TesConfiguration = mockTesConfiguration): Unit = { + tesConfig: TesConfiguration = mockTesConfiguration + ): Unit = { try { val actualRuntimeAttributes = toTesRuntimeAttributes(runtimeAttributes, workflowOptions, tesConfig) @@ -290,7 +334,8 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec private def assertFailure(runtimeAttributes: Map[String, WomValue], exMsg: String, workflowOptions: WorkflowOptions = emptyWorkflowOptions, - tesConfig: TesConfiguration = mockTesConfiguration): Unit = { + tesConfig: TesConfiguration = mockTesConfiguration + ): Unit = { try { toTesRuntimeAttributes(runtimeAttributes, workflowOptions, tesConfig) fail("A RuntimeException was expected.") @@ -304,13 +349,15 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec private val staticRuntimeAttributeDefinitions: Set[RuntimeAttributeDefinition] = TesRuntimeAttributes.runtimeAttributesBuilder(mockTesConfiguration.runtimeConfig).definitions.toSet - private def toTesRuntimeAttributes(runtimeAttributes: Map[String, WomValue], workflowOptions: WorkflowOptions, - tesConfiguration: TesConfiguration): TesRuntimeAttributes = { + tesConfiguration: TesConfiguration + ): TesRuntimeAttributes = { val runtimeAttributesBuilder = TesRuntimeAttributes.runtimeAttributesBuilder(tesConfiguration.runtimeConfig) - val defaultedAttributes = RuntimeAttributeDefinition.addDefaultsToAttributes( - staticRuntimeAttributeDefinitions, workflowOptions)(runtimeAttributes) + val defaultedAttributes = + RuntimeAttributeDefinition.addDefaultsToAttributes(staticRuntimeAttributeDefinitions, workflowOptions)( + runtimeAttributes + ) val validatedRuntimeAttributes = runtimeAttributesBuilder.build(defaultedAttributes, NOPLogger.NOP_LOGGER) TesRuntimeAttributes(validatedRuntimeAttributes, runtimeAttributes, tesConfiguration) } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala index a5fd3a3a7e2..21e818bbc5a 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTaskSpec.scala @@ -15,12 +15,7 @@ import wom.InstantiatedCommand import java.util.UUID -class TesTaskSpec - extends AnyFlatSpec - with CromwellTimeoutSpec - with Matchers - with BackendSpec - with MockSugar { +class TesTaskSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BackendSpec with MockSugar { val runtimeAttributes = new TesRuntimeAttributes( ContinueOnReturnCodeSet(Set(0)), @@ -40,19 +35,25 @@ class TesTaskSpec it should "create the correct resources when an identity is passed in WorkflowOptions" in { val wei = Option("abc123") TesTask.makeResources(runtimeAttributes, wei, internalPathPrefix) shouldEqual - Resources(None, None, None, Option(false), None, - Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), - expectedTuple)) - ) + Resources(None, + None, + None, + Option(false), + None, + Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), expectedTuple)) + ) } it should "create the correct resources when an empty identity is passed in WorkflowOptions" in { val wei = Option("") TesTask.makeResources(runtimeAttributes, wei, internalPathPrefix) shouldEqual - Resources(None, None, None, Option(false), None, - Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option(""), - expectedTuple)) - ) + Resources(None, + None, + None, + Option(false), + None, + Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option(""), expectedTuple)) + ) } it should "create the correct resources when no identity is passed in WorkflowOptions" in { @@ -66,9 +67,13 @@ class TesTaskSpec val weio = Option(WorkflowExecutionIdentityOption("def456")) val wei = TesTask.getPreferredWorkflowExecutionIdentity(weic, weio) TesTask.makeResources(runtimeAttributes, wei, internalPathPrefix) shouldEqual - Resources(None, None, None, Option(false), None, Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), - expectedTuple)) - ) + Resources(None, + None, + None, + Option(false), + None, + Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), expectedTuple)) + ) } it should "create the correct resources when no identity is passed in via backend config" in { @@ -76,58 +81,65 @@ class TesTaskSpec val weio = Option(WorkflowExecutionIdentityOption("def456")) val wei = TesTask.getPreferredWorkflowExecutionIdentity(weic, weio) TesTask.makeResources(runtimeAttributes, wei, internalPathPrefix) shouldEqual - Resources(None, None, None, Option(false), None, Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("def456"), - expectedTuple)) - ) + Resources(None, + None, + None, + Option(false), + None, + Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("def456"), expectedTuple)) + ) } it should "correctly set the internal path prefix when provided as a backend parameter" in { val wei = Option("abc123") val internalPathPrefix = Option("mock/path/to/tes/task") TesTask.makeResources(runtimeAttributes, wei, internalPathPrefix) shouldEqual - Resources(None, None, None, Option(false), None, - Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), - "internal_path_prefix" -> internalPathPrefix) - )) + Resources( + None, + None, + None, + Option(false), + None, + Option( + Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), + "internal_path_prefix" -> internalPathPrefix + ) + ) + ) } it should "correctly resolve the path to .../tes_task and add the k/v pair to backend parameters" in { val emptyWorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) - val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld, - labels = Labels("foo" -> "bar")) - val jobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, - Map.empty, - emptyWorkflowOptions, - Set.empty) - val tesPaths = TesJobPaths(jobDescriptor.key, - jobDescriptor.workflowDescriptor, - TestConfig.emptyConfig) + val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld, labels = Labels("foo" -> "bar")) + val jobDescriptor = + jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, emptyWorkflowOptions, Set.empty) + val tesPaths = TesJobPaths(jobDescriptor.key, jobDescriptor.workflowDescriptor, TestConfig.emptyConfig) val expectedKey = "internal_path_prefix" val expectedValue = Option(tesPaths.tesTaskRoot) - //Assert path correctly ends up in the resources + // Assert path correctly ends up in the resources val wei = Option("abc123") TesTask.makeResources(runtimeAttributes, wei, expectedValue) shouldEqual - Resources(None, None, None, Option(false), None, - Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), - expectedKey -> expectedValue)) + Resources( + None, + None, + None, + Option(false), + None, + Option(Map(TesWorkflowOptionKeys.WorkflowExecutionIdentity -> Option("abc123"), expectedKey -> expectedValue)) ) } it should "copy labels to tags" in { val jobLogger = mock[JobLogger] val emptyWorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) - val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld, - labels = Labels("foo" -> "bar")) - val jobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, - Map.empty, - emptyWorkflowOptions, - Set.empty) - val tesPaths = TesJobPaths(jobDescriptor.key, - jobDescriptor.workflowDescriptor, - TestConfig.emptyConfig) - val tesTask = TesTask(jobDescriptor, + val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld, labels = Labels("foo" -> "bar")) + val jobDescriptor = + jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, emptyWorkflowOptions, Set.empty) + val tesPaths = TesJobPaths(jobDescriptor.key, jobDescriptor.workflowDescriptor, TestConfig.emptyConfig) + val tesTask = TesTask( + jobDescriptor, TestConfig.emptyBackendConfigDescriptor, jobLogger, tesPaths, @@ -138,7 +150,8 @@ class TesTaskSpec "", Map.empty, "", - OutputMode.ROOT) + OutputMode.ROOT + ) val task = TesTask.makeTask(tesTask) @@ -156,25 +169,23 @@ class TesTaskSpec val jobLogger = mock[JobLogger] val emptyWorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])) val workflowDescriptor = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld) - val jobDescriptor = jobDescriptorFromSingleCallWorkflow(workflowDescriptor, - Map.empty, - emptyWorkflowOptions, - Set.empty) - val tesPaths = TesJobPaths(jobDescriptor.key, - jobDescriptor.workflowDescriptor, - TestConfig.emptyConfig) - val tesTask = TesTask(jobDescriptor, - TestConfig.emptyBackendConfigDescriptor, - jobLogger, - tesPaths, - runtimeAttributes, - DefaultPathBuilder.build("").get, - "", - InstantiatedCommand("command"), - "", - Map.empty, - "", - OutputMode.ROOT) + val jobDescriptor = + jobDescriptorFromSingleCallWorkflow(workflowDescriptor, Map.empty, emptyWorkflowOptions, Set.empty) + val tesPaths = TesJobPaths(jobDescriptor.key, jobDescriptor.workflowDescriptor, TestConfig.emptyConfig) + val tesTask = TesTask( + jobDescriptor, + TestConfig.emptyBackendConfigDescriptor, + jobLogger, + tesPaths, + runtimeAttributes, + DefaultPathBuilder.build("").get, + "", + InstantiatedCommand("command"), + "", + Map.empty, + "", + OutputMode.ROOT + ) val task = TesTask.makeTask(tesTask) diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTestConfig.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTestConfig.scala index 7cbfd7e5348..7bb14c02a61 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTestConfig.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesTestConfig.scala @@ -112,4 +112,3 @@ object TesTestConfig { val backendConfigWithInvalidBackoffs = ConfigFactory.parseString(backendConfigStringWithInvalidBackoffs) } - diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala index 8d06277dbb5..e9d913eef0f 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala @@ -48,7 +48,9 @@ class TesWorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat subWd.id returns subWorkflowId val workflowPaths = TesWorkflowPaths(subWd, TesTestConfig.backendConfig) - workflowPaths.workflowRoot.toString shouldBe File(s"local-cromwell-executions/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId").pathAsString + workflowPaths.workflowRoot.toString shouldBe File( + s"local-cromwell-executions/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" + ).pathAsString workflowPaths.dockerWorkflowRoot.toString shouldBe s"/cromwell-executions/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" } } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TestWorkflows.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TestWorkflows.scala index e98ca521075..8c73d36d49a 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TestWorkflows.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TestWorkflows.scala @@ -7,26 +7,27 @@ object TestWorkflows { case class TestWorkflow(workflowDescriptor: BackendWorkflowDescriptor, config: BackendConfigurationDescriptor, - expectedResponse: BackendJobExecutionResponse) + expectedResponse: BackendJobExecutionResponse + ) val HelloWorld = s""" - |task hello { - | String addressee = "you " - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_hello { - | call hello - |} - |""".stripMargin + |task hello { + | String addressee = "you " + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_hello { + | call hello + |} + |""".stripMargin val GoodbyeWorld = """ @@ -46,26 +47,26 @@ object TestWorkflows { val InputFiles = s""" - |task localize { - | File inputFileFromJson - | File inputFileFromCallInputs - | command { - | cat $${inputFileFromJson} - | echo "" - | cat $${inputFileFromCallInputs} - | } - | output { - | Array[String] out = read_lines(stdout()) - | } - | - | RUNTIME - |} - | - |workflow wf_localize { - | File workflowFile - | call localize { input: inputFileFromCallInputs = workflowFile } - |} - |""".stripMargin + |task localize { + | File inputFileFromJson + | File inputFileFromCallInputs + | command { + | cat $${inputFileFromJson} + | echo "" + | cat $${inputFileFromCallInputs} + | } + | output { + | Array[String] out = read_lines(stdout()) + | } + | + | RUNTIME + |} + | + |workflow wf_localize { + | File workflowFile + | call localize { input: inputFileFromCallInputs = workflowFile } + |} + |""".stripMargin val Sleep10 = """ @@ -83,25 +84,25 @@ object TestWorkflows { val Scatter = s""" - |task scattering { - | Int intNumber - | command { - | echo $${intNumber} - | } - | output { - | Int out = read_string(stdout()) - | } - |} - | - |workflow wf_scattering { - | Array[Int] numbers = [1, 2, 3] - | scatter (i in numbers) { - | call scattering { input: intNumber = i } - | } - |} - |""".stripMargin + |task scattering { + | Int intNumber + | command { + | echo $${intNumber} + | } + | output { + | Int out = read_string(stdout()) + | } + |} + | + |workflow wf_scattering { + | Array[Int] numbers = [1, 2, 3] + | scatter (i in numbers) { + | call scattering { input: intNumber = i } + | } + |} + |""".stripMargin - val OutputProcess = { + val OutputProcess = """ |task localize { | File inputFile @@ -121,9 +122,8 @@ object TestWorkflows { | call localize |} |""".stripMargin - } - val MissingOutputProcess = { + val MissingOutputProcess = """ |task localize { | command { @@ -137,5 +137,4 @@ object TestWorkflows { | call localize |} |""".stripMargin - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala index 24194807f17..f681b45ad62 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala @@ -16,102 +16,113 @@ import scala.language.postfixOps object AstTools { - class InterpolatedTerminal(val rootTerminal: Terminal, innerTerminal: Terminal, columnOffset: Int) extends Terminal( - innerTerminal.getId, - innerTerminal.getTerminalStr, - innerTerminal.getSourceString, - innerTerminal.getResource, - // We want the line of the rootTerminal because innerTerminal is created purely from the expression string - // So its first line will be line 0, which is very unlikely to be line 0 in the WDL file - rootTerminal.getLine, - // Column offset tells us the position of innerTerminal in the rootTerminal string - // Therefore rootTerminal.getColumn + columnOffset gives us the column position for innerTerminal in the file (+1 to be on the $) - rootTerminal.getColumn + columnOffset + 1 - ) + class InterpolatedTerminal(val rootTerminal: Terminal, innerTerminal: Terminal, columnOffset: Int) + extends Terminal( + innerTerminal.getId, + innerTerminal.getTerminalStr, + innerTerminal.getSourceString, + innerTerminal.getResource, + // We want the line of the rootTerminal because innerTerminal is created purely from the expression string + // So its first line will be line 0, which is very unlikely to be line 0 in the WDL file + rootTerminal.getLine, + // Column offset tells us the position of innerTerminal in the rootTerminal string + // Therefore rootTerminal.getColumn + columnOffset gives us the column position for innerTerminal in the file (+1 to be on the $) + rootTerminal.getColumn + columnOffset + 1 + ) implicit class EnhancedAstNode(val astNode: AstNode) extends AnyVal { def findAsts(name: String): Seq[Ast] = AstTools.findAsts(astNode, name) - def findAstsWithTrail(name: String, trail: Seq[AstNode] = Seq.empty): Map[Ast, Seq[AstNode]] = { + def findAstsWithTrail(name: String, trail: Seq[AstNode] = Seq.empty): Map[Ast, Seq[AstNode]] = astNode match { case x: Ast => val thisAst = if (x.getName.equals(name)) Map(x -> trail) else Map.empty[Ast, Seq[AstNode]] - combine(x.getAttributes.values.asScala.flatMap{_.findAstsWithTrail(name, trail :+ x)}.toMap, thisAst) - case x: AstList => x.asScala.toVector.flatMap{_.findAstsWithTrail(name, trail :+ x)}.toMap + combine(x.getAttributes.values.asScala.flatMap(_.findAstsWithTrail(name, trail :+ x)).toMap, thisAst) + case x: AstList => x.asScala.toVector.flatMap(_.findAstsWithTrail(name, trail :+ x)).toMap case _: Terminal => Map.empty[Ast, Seq[AstNode]] case _ => Map.empty[Ast, Seq[AstNode]] } - } private def findTerminalsInInterpolatedString(t: Terminal, terminalType: String, trail: Seq[AstNode], parentTerminal: Option[Terminal], - columnOffset: Int) = { + columnOffset: Int + ) = { /* - * Find all interpolations in the string terminal. - * e.g: String a = "hello ${you}" - * We'll create an expression from "you" and remember the position in the string - * "hello ${you}" at which we found "${you}". + * Find all interpolations in the string terminal. + * e.g: String a = "hello ${you}" + * We'll create an expression from "you" and remember the position in the string + * "hello ${you}" at which we found "${you}". */ val interpolatedExpressionAstNodesAndTheirMatchPosition = InterpolationTagPattern .findAllMatchIn(t.getSourceString) - .foldLeft(List.empty[(AstNode, Int)])((nodes, exprValue) => { + .foldLeft(List.empty[(AstNode, Int)]) { (nodes, exprValue) => // This is the interpolated expression e.g ${my_var} val v = exprValue.group(0) // Create an expression from the content and remember the position of the match in the overall terminal string // so we can point to it in the error message if needed (WdlExpression.fromString(v.substring(2, v.length - 1)).ast, exprValue.start) +: nodes - }) + } interpolatedExpressionAstNodesAndTheirMatchPosition match { // If there's no interpolated expression and the parent terminal is of the right type, // create an interpolated terminal and we're done case Nil if t.getTerminalStr == terminalType => - val finalTerminal = parentTerminal map { parent => new InterpolatedTerminal(parent, t, columnOffset) } getOrElse t + val finalTerminal = parentTerminal map { parent => + new InterpolatedTerminal(parent, t, columnOffset) + } getOrElse t Map(finalTerminal -> trail) // No interpolated terminal and the parent terminal is not of the right type, we're done case Nil => Map.empty[Terminal, Seq[AstNode]] // We found some interpolated terminals, recursively find their inner terminals and propagate the root terminal. // Also propagate the accumulated columnOffset. The regex index match will start // over at 0 in the next round of matching so we need to keep track of the offset as we recurse - case expressions => expressions.flatMap({ - case (innerNode, offset) => innerNode.findTerminalsWithTrail(terminalType, trail :+ t, Option(parentTerminal.getOrElse(t)), columnOffset + offset) - }).toMap + case expressions => + expressions.flatMap { case (innerNode, offset) => + innerNode.findTerminalsWithTrail(terminalType, + trail :+ t, + Option(parentTerminal.getOrElse(t)), + columnOffset + offset + ) + }.toMap } } def findTerminalsWithTrail(terminalType: String, trail: Seq[AstNode] = Seq.empty, parentTerminal: Option[Terminal] = None, - columnOffset: Int = 0): Map[Terminal, Seq[AstNode]] = { + columnOffset: Int = 0 + ): Map[Terminal, Seq[AstNode]] = astNode match { - case o: Ast if o.isObjectLiteral => o.getAttribute("map").astListAsVector flatMap { - case a: Ast => a.getAttribute("value").findTerminalsWithTrail(terminalType, trail :+ o, parentTerminal) - case _: AstNode => Seq.empty - } toMap - case a: Ast => a.getAttributes.values.asScala flatMap { _.findTerminalsWithTrail(terminalType, trail :+ a) } toMap + case o: Ast if o.isObjectLiteral => + o.getAttribute("map").astListAsVector flatMap { + case a: Ast => a.getAttribute("value").findTerminalsWithTrail(terminalType, trail :+ o, parentTerminal) + case _: AstNode => Seq.empty + } toMap + case a: Ast => + a.getAttributes.values.asScala flatMap { _.findTerminalsWithTrail(terminalType, trail :+ a) } toMap case a: AstList => a.asScala.toVector flatMap { _.findTerminalsWithTrail(terminalType, trail :+ a) } toMap case t: Terminal if t.getTerminalStr == terminalType => - val finalTerminal = parentTerminal map { parent => new InterpolatedTerminal(parent, t, columnOffset) } getOrElse t + val finalTerminal = parentTerminal map { parent => + new InterpolatedTerminal(parent, t, columnOffset) + } getOrElse t Map(finalTerminal -> trail) case t: Terminal => findTerminalsInInterpolatedString(t, terminalType, trail, parentTerminal, columnOffset) case _ => Map.empty[Terminal, Seq[AstNode]] } - } - def findFirstTerminal: Option[Terminal] = { + def findFirstTerminal: Option[Terminal] = Option(astNode) flatMap { case l: AstList => l.astListAsVector.flatMap(_.findFirstTerminal).headOption - case a: Ast => a.getAttributes.asScala.toMap.flatMap({ case (_, v) => v.findFirstTerminal }).headOption + case a: Ast => a.getAttributes.asScala.toMap.flatMap { case (_, v) => v.findFirstTerminal }.headOption case t: Terminal => Option(t) } - } def findTopLevelMemberAccesses(): Iterable[Ast] = AstTools.findTopLevelMemberAccesses(astNode) def sourceString: String = astNode match { case t: Terminal => t.getSourceString case a: Ast => a.toPrettyString } def astListAsVector: Seq[AstNode] = astNode.asInstanceOf[AstList].asScala.toVector - def womType(wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): WomType = { + def womType(wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): WomType = astNode match { case t: Terminal => t.getSourceString match { @@ -150,10 +161,11 @@ object AstTools { } case _ => throw new UnsupportedOperationException(s"Unexpected WDL type AST: ${astNode.sourceString}") } - } def isOptionalType(a: Ast) = a.getName.equals("OptionalType") - def optionalType(a: Ast, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter) = WomOptionalType(a.getAttribute("innerType").womType(wdlSyntaxErrorFormatter)) + def optionalType(a: Ast, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter) = WomOptionalType( + a.getAttribute("innerType").womType(wdlSyntaxErrorFormatter) + ) def isNonEmptyType(a: Ast) = a.getName.equals("NonEmptyType") def nonEmptyType(a: Ast, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter) = { @@ -168,21 +180,33 @@ object AstTools { def astToMap(ast: Ast) = { val mapType = womType.asInstanceOf[WomMapType] - val elements = ast.getAttribute("map").asInstanceOf[AstList].asScala.toVector.map({ kvnode => - val k = kvnode.asInstanceOf[Ast].getAttribute("key").womValue(mapType.keyType, wdlSyntaxErrorFormatter) - val v = kvnode.asInstanceOf[Ast].getAttribute("value").womValue(mapType.valueType, wdlSyntaxErrorFormatter) - k -> v - }).toMap + val elements = ast + .getAttribute("map") + .asInstanceOf[AstList] + .asScala + .toVector + .map { kvnode => + val k = kvnode.asInstanceOf[Ast].getAttribute("key").womValue(mapType.keyType, wdlSyntaxErrorFormatter) + val v = kvnode.asInstanceOf[Ast].getAttribute("value").womValue(mapType.valueType, wdlSyntaxErrorFormatter) + k -> v + } + .toMap WomMap(mapType, elements) } def astToObject(ast: Ast) = { - val elements = ast.getAttribute("map").asInstanceOf[AstList].asScala.toVector.map({ kvnode => - val k = kvnode.asInstanceOf[Ast].getAttribute("key").sourceString - val v = kvnode.asInstanceOf[Ast].getAttribute("value").womValue(WomStringType, wdlSyntaxErrorFormatter) - k -> v - }).toMap + val elements = ast + .getAttribute("map") + .asInstanceOf[AstList] + .asScala + .toVector + .map { kvnode => + val k = kvnode.asInstanceOf[Ast].getAttribute("key").sourceString + val v = kvnode.asInstanceOf[Ast].getAttribute("value").womValue(WomStringType, wdlSyntaxErrorFormatter) + k -> v + } + .toMap WomObject(elements) } @@ -194,9 +218,13 @@ object AstTools { a.womValue(womType, wdlSyntaxErrorFormatter) } else if (subElements.lengthCompare(2) == 0 && womType.isInstanceOf[WomPairType]) { val pairType = womType.asInstanceOf[WomPairType] - WomPair(subElements.head.womValue(pairType.leftType, wdlSyntaxErrorFormatter), subElements(1).womValue(pairType.rightType, wdlSyntaxErrorFormatter)) + WomPair(subElements.head.womValue(pairType.leftType, wdlSyntaxErrorFormatter), + subElements(1).womValue(pairType.rightType, wdlSyntaxErrorFormatter) + ) } else { - throw new SyntaxError(s"Could not convert AST to a $womType (${Option(astNode).map(_.toString).getOrElse("No AST")})") + throw new SyntaxError( + s"Could not convert AST to a $womType (${Option(astNode).map(_.toString).getOrElse("No AST")})" + ) } } @@ -208,31 +236,41 @@ object AstTools { WomSingleFile(t.getSourceString) case t: Terminal if t.getTerminalStr == "string" && womType == WomGlobFileType => WomGlobFile(t.getSourceString) - case t: Terminal if t.getTerminalStr == "integer" && womType == WomIntegerType => WomInteger(t.getSourceString.toInt) - case t: Terminal if t.getTerminalStr == "float" && womType == WomFloatType => WomFloat(t.getSourceString.toDouble) - case t: Terminal if t.getTerminalStr == "boolean" && womType == WomBooleanType => t.getSourceString.toLowerCase match { - case "true" => WomBoolean.True - case "false" => WomBoolean.False - } + case t: Terminal if t.getTerminalStr == "integer" && womType == WomIntegerType => + WomInteger(t.getSourceString.toInt) + case t: Terminal if t.getTerminalStr == "float" && womType == WomFloatType => + WomFloat(t.getSourceString.toDouble) + case t: Terminal if t.getTerminalStr == "boolean" && womType == WomBooleanType => + t.getSourceString.toLowerCase match { + case "true" => WomBoolean.True + case "false" => WomBoolean.False + } // TODO: The below cases, ArrayLiteral and MapLiteral, ObjectLiteral are brittle. They recursively call this womValue(). // However, those recursive calls might contain full-on expressions instead of just other literals. This // whole thing ought to be part of the regular expression evaluator, though I imagine that's non-trivial. case a: Ast if a.getName == "ArrayLiteral" && womType.isInstanceOf[WomArrayType] => val arrType = womType.asInstanceOf[WomArrayType] - val elements = a.getAttribute("values").astListAsVector map {node => node.womValue(arrType.memberType, wdlSyntaxErrorFormatter)} + val elements = a.getAttribute("values").astListAsVector map { node => + node.womValue(arrType.memberType, wdlSyntaxErrorFormatter) + } WomArray(arrType, elements) case a: Ast if a.getName == "TupleLiteral" => astTupleToValue(a) case a: Ast if a.getName == "MapLiteral" && womType.isInstanceOf[WomMapType] => astToMap(a) case a: Ast if a.getName == "ObjectLiteral" && womType == WomObjectType => astToObject(a) - case _ => throw new SyntaxError(s"Could not convert AST to a $womType (${Option(astNode).map(_.toString).getOrElse("No AST")})") + case _ => + throw new SyntaxError( + s"Could not convert AST to a $womType (${Option(astNode).map(_.toString).getOrElse("No AST")})" + ) } } } implicit class EnhancedAstSeq(val astSeq: Seq[Ast]) extends AnyVal { - def duplicatesByName: Seq[Ast] = { - astSeq.groupBy(_.getAttribute("name").sourceString).collect({case (_ ,v) if v.lengthCompare(1) > 0 => v.head}).toVector - } + def duplicatesByName: Seq[Ast] = + astSeq + .groupBy(_.getAttribute("name").sourceString) + .collect { case (_, v) if v.lengthCompare(1) > 0 => v.head } + .toVector } object AstNodeName { @@ -261,7 +299,7 @@ object AstTools { def getAst(workflowSource: WorkflowSource, resource: String): Ast = { val parser = new WdlParser() val tokens = parser.lex(workflowSource, resource) - val terminalMap = (tokens.asScala.toVector map {(_, workflowSource)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, workflowSource) }).toMap val syntaxErrorFormatter = WdlSyntaxErrorFormatter(terminalMap) parser.parse(tokens, syntaxErrorFormatter).toAst.asInstanceOf[Ast] } @@ -275,7 +313,7 @@ object AstTools { */ def getAst(wdlFile: Path): Ast = getAst(File(wdlFile).contentAsString, File(wdlFile).name) - def findAsts(ast: AstNode, name: String): Seq[Ast] = { + def findAsts(ast: AstNode, name: String): Seq[Ast] = ast match { case x: Ast => val thisAst = if (x.getName.equals(name)) Seq(x) else Seq.empty[Ast] @@ -284,16 +322,14 @@ object AstTools { case _: Terminal => Seq.empty[Ast] case _ => Seq.empty[Ast] } - } - def findTerminals(ast: AstNode): Seq[Terminal] = { + def findTerminals(ast: AstNode): Seq[Terminal] = ast match { case x: Ast => x.getAttributes.values.asScala.flatMap(findTerminals).toSeq case x: AstList => x.asScala.toVector.flatMap(findTerminals) case x: Terminal => Seq(x) case _ => Seq.empty[Terminal] } - } /** * All MemberAccess ASTs that are not contained in other MemberAccess ASTs @@ -301,14 +337,18 @@ object AstTools { * The reason this returns a collection would be expressions such as "a.b.c + a.b.d", each one of those * would have its own MemberAccess - "a.b.c" and "a.b.d" */ - def findTopLevelMemberAccesses(expr: AstNode): Iterable[Ast] = expr.findAstsWithTrail("MemberAccess").filterNot { - case (_, v) => v exists { - case a: Ast => a.getName == "MemberAccess" - case _ => false + def findTopLevelMemberAccesses(expr: AstNode): Iterable[Ast] = expr + .findAstsWithTrail("MemberAccess") + .filterNot { case (_, v) => + v exists { + case a: Ast => a.getName == "MemberAccess" + case _ => false + } } - }.keys + .keys + + final case class VariableReference private[wdl] (terminal: Terminal, trail: Iterable[AstNode], from: Scope) { - final case class VariableReference private[wdl](terminal: Terminal, trail: Iterable[AstNode], from: Scope) { /** * If this is a simple MemberAccess (both sides are terminals), * find the rhs corresponding to "terminal" in the trail. @@ -318,31 +358,34 @@ object AstTools { * this will return Some(Terminal("b")) */ private lazy val terminalSubIdentifier: Option[Terminal] = trail.collectFirst { - case a: Ast if a.isMemberAccess - && a.getAttribute("lhs") == terminal - && a.getAttribute("rhs").isTerminal => a.getAttribute("rhs").asInstanceOf[Terminal] + case a: Ast + if a.isMemberAccess + && a.getAttribute("lhs") == terminal + && a.getAttribute("rhs").isTerminal => + a.getAttribute("rhs").asInstanceOf[Terminal] } - private lazy val fullVariableReferenceString: String = terminal.getSourceString + (terminalSubIdentifier map { "." + _.getSourceString } getOrElse "") + private lazy val fullVariableReferenceString: String = + terminal.getSourceString + (terminalSubIdentifier map { "." + _.getSourceString } getOrElse "") private def findResolvableSubstring(name: String, previous: String): Option[String] = { - lazy val popLastNamePiece: Option[String] = { + lazy val popLastNamePiece: Option[String] = name.lastIndexOf(".") match { case -1 => None case i => Option(name.substring(0, i)) } - } from.resolveVariable(name) match { - case Some(_: WdlTaskCall | _ : WdlWorkflowCall) => Option(previous) + case Some(_: WdlTaskCall | _: WdlWorkflowCall) => Option(previous) case Some(_) => Option(name) case None => popLastNamePiece.flatMap(findResolvableSubstring(_, name)) } } - lazy val referencedVariableName: String = { - findResolvableSubstring(fullVariableReferenceString, fullVariableReferenceString).getOrElse(fullVariableReferenceString) - } + lazy val referencedVariableName: String = + findResolvableSubstring(fullVariableReferenceString, fullVariableReferenceString).getOrElse( + fullVariableReferenceString + ) } /** @@ -351,16 +394,14 @@ object AstTools { * These represent anything that would need to be have scope resolution done on it to determine the value */ def findVariableReferences(expr: AstNode, from: Scope): Iterable[VariableReference] = { - def isMemberAccessRhs(identifier: Terminal, trail: Seq[AstNode]): Boolean = { + def isMemberAccessRhs(identifier: Terminal, trail: Seq[AstNode]): Boolean = // e.g. for MemberAccess ast representing source code A.B.C, this would return true for only B,C and not A - trail.collect({ case a: Ast if a.isMemberAccess && a.getAttribute("rhs") == identifier => a }).nonEmpty - } - def isFunctionName(identifier: Terminal, trail: Seq[AstNode]): Boolean = { + trail.collect { case a: Ast if a.isMemberAccess && a.getAttribute("rhs") == identifier => a }.nonEmpty + def isFunctionName(identifier: Terminal, trail: Seq[AstNode]): Boolean = trail.lastOption match { case Some(last: Ast) if last.isFunctionCall && last.getAttribute("name") == identifier => true case _ => false } - } /* terminal is the "lefter" lhs * trail is how we arrived to identifier from the original ast @@ -391,9 +432,10 @@ object AstTools { * * There also might be other types of nodes in trail than MemberAccess depending the expression. */ - expr.findTerminalsWithTrail("identifier").collect({ - case (terminal, trail) if !isMemberAccessRhs(terminal, trail) && !isFunctionName(terminal, trail) => VariableReference(terminal, trail, from) - }) + expr.findTerminalsWithTrail("identifier").collect { + case (terminal, trail) if !isMemberAccessRhs(terminal, trail) && !isFunctionName(terminal, trail) => + VariableReference(terminal, trail, from) + } } @@ -410,9 +452,10 @@ object AstTools { val callTaskName = ast.getAttribute("task").asInstanceOf[Terminal] /* Filter out all empty 'input' sections first */ - val callInputSections = ast.findAsts(AstNodeName.Inputs).map {inputSectionAst => + val callInputSections = ast.findAsts(AstNodeName.Inputs).map { inputSectionAst => inputSectionAst.getAttribute("map").findAsts(AstNodeName.IOMapping) match { - case kvPairs: Seq[Ast] if kvPairs.isEmpty => throw new SyntaxError(wdlSyntaxErrorFormatter.emptyInputSection(callTaskName)) + case kvPairs: Seq[Ast] if kvPairs.isEmpty => + throw new SyntaxError(wdlSyntaxErrorFormatter.emptyInputSection(callTaskName)) case _ => inputSectionAst } } @@ -424,34 +467,42 @@ object AstTools { case asts: Seq[Ast] => /* Uses of .head here are assumed by the above code that ensures that there are no empty maps */ val secondInputSectionIOMappings = asts(1).getAttribute("map").astListAsVector - val firstKeyTerminal = secondInputSectionIOMappings.head.asInstanceOf[Ast].getAttribute("key").asInstanceOf[Terminal] + val firstKeyTerminal = + secondInputSectionIOMappings.head.asInstanceOf[Ast].getAttribute("key").asInstanceOf[Terminal] throw new SyntaxError(wdlSyntaxErrorFormatter.multipleInputStatementsOnCall(firstKeyTerminal)) } } - def terminalMap(ast: Ast, source: WorkflowSource) = (findTerminals(ast) map {(_, source)}).toMap + def terminalMap(ast: Ast, source: WorkflowSource) = (findTerminals(ast) map { (_, source) }).toMap - def wdlSectionToStringMap(ast: Ast, node: String, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Map[String, String] = { + def wdlSectionToStringMap(ast: Ast, + node: String, + wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter + ): Map[String, String] = ast.findAsts(node) match { case a if a.isEmpty => Map.empty[String, String] case a if a.lengthCompare(1) == 0 => // Yes, even 'meta {}' and 'parameter_meta {}' sections have RuntimeAttribute ASTs. // In hindsight, this was a poor name for the AST. - a.head.findAsts(AstNodeName.RuntimeAttribute).map({ ast => - val key = ast.getAttribute("key").asInstanceOf[Terminal] - val value = ast.getAttribute("value") - if (!value.isInstanceOf[Terminal] || value.asInstanceOf[Terminal].getTerminalStr != "string") { - // Keys are parsed as identifiers, but values are parsed as expressions. - // For now, only accept expressions that are strings - throw new SyntaxError(wdlSyntaxErrorFormatter.expressionExpectedToBeString(key)) + a.head + .findAsts(AstNodeName.RuntimeAttribute) + .map { ast => + val key = ast.getAttribute("key").asInstanceOf[Terminal] + val value = ast.getAttribute("value") + if (!value.isInstanceOf[Terminal] || value.asInstanceOf[Terminal].getTerminalStr != "string") { + // Keys are parsed as identifiers, but values are parsed as expressions. + // For now, only accept expressions that are strings + throw new SyntaxError(wdlSyntaxErrorFormatter.expressionExpectedToBeString(key)) + } + key.sourceString -> value.sourceString } - key.sourceString -> value.sourceString - }).toMap - case _ => throw new SyntaxError(wdlSyntaxErrorFormatter.expectedAtMostOneSectionPerTask(node, ast.getAttribute("name").asInstanceOf[Terminal])) + .toMap + case _ => + throw new SyntaxError( + wdlSyntaxErrorFormatter.expectedAtMostOneSectionPerTask(node, ast.getAttribute("name").asInstanceOf[Terminal]) + ) } - } - private def combine[T, U](map1: Map[T, Seq[U]], map2: Map[T, Seq[U]]): Map[T, Seq[U]] = { - map1 ++ map2.map{ case (k,v) => k -> (v ++ map1.getOrElse(k, Seq.empty)) } - } + private def combine[T, U](map1: Map[T, Seq[U]], map2: Map[T, Seq[U]]): Map[T, Seq[U]] = + map1 ++ map2.map { case (k, v) => k -> (v ++ map1.getOrElse(k, Seq.empty)) } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/Declaration.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/Declaration.scala index 0d6d5ed49e0..9956562a753 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/Declaration.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/Declaration.scala @@ -6,12 +6,18 @@ import common.validation.ErrorOr.{ErrorOr, ShortCircuitingFlatMap} import wdl.draft2.model import wdl.draft2.model.AstTools.EnhancedAstNode import wdl.draft2.parser.WdlParser.{Ast, AstNode} -import wom.callable.Callable.{InputDefinition, OverridableInputDefinitionWithDefault, OptionalInputDefinition, RequiredInputDefinition} +import wom.callable.Callable.{ + InputDefinition, + OptionalInputDefinition, + OverridableInputDefinitionWithDefault, + RequiredInputDefinition +} import wom.graph._ import wom.graph.expression.{ExposedExpressionNode, ExpressionNode} import wom.types.{WomArrayType, WomOptionalType, WomType} object DeclarationInterface { + /** * Depending on who is asking, the type of a declaration can vary. * e.g @@ -30,15 +36,14 @@ object DeclarationInterface { * Array[String] s2 = a.o # Outside the scatter it's an Array[String] * } */ - def relativeWdlType(from: Scope, target: DeclarationInterface, womType: WomType): WomType = { + def relativeWdlType(from: Scope, target: DeclarationInterface, womType: WomType): WomType = target.closestCommonAncestor(from) map { ancestor => - target.ancestrySafe.takeWhile(_ != ancestor).foldLeft(womType){ + target.ancestrySafe.takeWhile(_ != ancestor).foldLeft(womType) { case (acc, _: Scatter) => WomArrayType(acc) case (acc, _: If) => WomOptionalType(acc).flatOptionalType case (acc, _) => acc } } getOrElse womType - } } /** @@ -90,9 +95,8 @@ trait DeclarationInterface extends WdlGraphNodeWithUpstreamReferences { final lazy val upstreamReferences = expression.toSeq.flatMap(_.variableReferences(this)) - override def toString: String = { + override def toString: String = s"[Declaration type=${womType.stableName} name=$unqualifiedName expr=${expression.map(_.toWomString)}]" - } } object Declaration { @@ -114,7 +118,7 @@ object Declaration { final case class IntermediateValueDeclarationNode(expressionNode: ExpressionNode) extends WdlDeclarationNode final case class GraphOutputDeclarationNode(graphOutputNode: GraphOutputNode) extends WdlDeclarationNode - def apply(ast: Ast, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter, parent: Option[Scope]): Declaration = { + def apply(ast: Ast, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter, parent: Option[Scope]): Declaration = Declaration( ast.getAttribute("type").womType(wdlSyntaxErrorFormatter), ast.getAttribute("name").sourceString, @@ -125,31 +129,62 @@ object Declaration { parent, ast ) - } - def buildWdlDeclarationNode(decl: DeclarationInterface, localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean): ErrorOr[WdlDeclarationNode] = { + def buildWdlDeclarationNode(decl: DeclarationInterface, + localLookup: Map[String, GraphNodePort.OutputPort], + outerLookup: Map[String, GraphNodePort.OutputPort], + preserveIndexForOuterLookups: Boolean + ): ErrorOr[WdlDeclarationNode] = { def declarationAsExpressionNode(wdlExpression: WdlExpression) = { val womExpression = WdlWomExpression(wdlExpression, decl) for { - inputMapping <- WdlWomExpression.findInputsforExpression(womExpression, localLookup, outerLookup, preserveIndexForOuterLookups, decl) - expressionNode <- ExposedExpressionNode.fromInputMapping(decl.womIdentifier, womExpression, decl.womType, inputMapping) + inputMapping <- WdlWomExpression.findInputsforExpression(womExpression, + localLookup, + outerLookup, + preserveIndexForOuterLookups, + decl + ) + expressionNode <- ExposedExpressionNode.fromInputMapping(decl.womIdentifier, + womExpression, + decl.womType, + inputMapping + ) } yield IntermediateValueDeclarationNode(expressionNode) } def workflowOutputAsGraphOutputNode(wdlExpression: WdlExpression) = { val womExpression = WdlWomExpression(wdlExpression, decl) for { - inputMapping <- WdlWomExpression.findInputsforExpression(womExpression, localLookup, outerLookup, preserveIndexForOuterLookups, decl) - graphOutputNode <- ExpressionBasedGraphOutputNode.fromInputMapping(decl.womIdentifier, womExpression, decl.womType, inputMapping) + inputMapping <- WdlWomExpression.findInputsforExpression(womExpression, + localLookup, + outerLookup, + preserveIndexForOuterLookups, + decl + ) + graphOutputNode <- ExpressionBasedGraphOutputNode.fromInputMapping(decl.womIdentifier, + womExpression, + decl.womType, + inputMapping + ) } yield GraphOutputDeclarationNode(graphOutputNode) } def asWorkflowInput(inputDefinition: InputDefinition): GraphInputNode = inputDefinition match { - case RequiredInputDefinition(_, womType, _, _) => RequiredGraphInputNode(decl.womIdentifier, womType, decl.womIdentifier.fullyQualifiedName.value) - case OptionalInputDefinition(_, optionalType, _, _) => OptionalGraphInputNode(decl.womIdentifier, optionalType, decl.womIdentifier.fullyQualifiedName.value) - case OverridableInputDefinitionWithDefault(_, womType, default, _, _) => OptionalGraphInputNodeWithDefault(decl.womIdentifier, womType, default, decl.womIdentifier.fullyQualifiedName.value) - case other => throw new RuntimeException(s"Programmer Error! If you got here you probably changed draft 2 to try to do some draft 3 like things, but this draft 2 function isn't set up to produce or handle ${other.getClass.getSimpleName} yet!") + case RequiredInputDefinition(_, womType, _, _) => + RequiredGraphInputNode(decl.womIdentifier, womType, decl.womIdentifier.fullyQualifiedName.value) + case OptionalInputDefinition(_, optionalType, _, _) => + OptionalGraphInputNode(decl.womIdentifier, optionalType, decl.womIdentifier.fullyQualifiedName.value) + case OverridableInputDefinitionWithDefault(_, womType, default, _, _) => + OptionalGraphInputNodeWithDefault(decl.womIdentifier, + womType, + default, + decl.womIdentifier.fullyQualifiedName.value + ) + case other => + throw new RuntimeException( + s"Programmer Error! If you got here you probably changed draft 2 to try to do some draft 3 like things, but this draft 2 function isn't set up to produce or handle ${other.getClass.getSimpleName} yet!" + ) } (decl.asWorkflowInput, decl) match { @@ -165,4 +200,5 @@ case class Declaration(womType: WomType, unqualifiedName: String, expression: Option[WdlExpression], override val parent: Option[Scope], - ast: Ast) extends DeclarationInterface + ast: Ast +) extends DeclarationInterface diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/If.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/If.scala index 9bba2d1797d..e6bcdcce9b8 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/If.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/If.scala @@ -8,7 +8,9 @@ import wdl.draft2.parser.WdlParser.Ast * @param index Index of the if block. The index is computed during tree generation to reflect WDL scope structure. * @param condition WDL Expression representing the condition in which to execute this If-block */ -case class If(index: Int, condition: WdlExpression, ast: Ast) extends WdlGraphNodeWithUpstreamReferences with WorkflowScoped { +case class If(index: Int, condition: WdlExpression, ast: Ast) + extends WdlGraphNodeWithUpstreamReferences + with WorkflowScoped { val unqualifiedName = s"${If.FQNIdentifier}_$index" override def appearsInFqn = false @@ -23,7 +25,6 @@ object If { /** * @param index Index of the if block. The index is computed during tree generation to reflect WDL scope structure. */ - def apply(ast: Ast, index: Int): If = { + def apply(ast: Ast, index: Int): If = new If(index, WdlExpression(ast.getAttribute("expression")), ast) - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scatter.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scatter.scala index 9613b27bafb..aac86ce859c 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scatter.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scatter.scala @@ -8,7 +8,9 @@ import wdl.draft2.parser.WdlParser.{Ast, Terminal} * @param item Item which this block is scattering over * @param collection Wdl Expression corresponding to the collection this scatter is looping through */ -case class Scatter(index: Int, item: String, collection: WdlExpression, ast: Ast) extends WdlGraphNodeWithUpstreamReferences with WorkflowScoped { +case class Scatter(index: Int, item: String, collection: WdlExpression, ast: Ast) + extends WdlGraphNodeWithUpstreamReferences + with WorkflowScoped { val unqualifiedName = s"${Scatter.FQNIdentifier}_$index" override def appearsInFqn = false diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scope.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scope.scala index caeec3ef3a8..243a1235b20 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scope.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/Scope.scala @@ -26,21 +26,19 @@ trait Scope { */ def parent: Option[Scope] = _parent private var _parent: Option[Scope] = None - def parent_=[Child <: Scope](scope: Scope): Unit = { + def parent_=[Child <: Scope](scope: Scope): Unit = if (this._parent.isEmpty) this._parent = Option(scope) else throw new UnsupportedOperationException("parent is write-once") - } /** * Child scopes, in the order that they appear in the source code */ def children: Seq[Scope] = _children private var _children: Seq[Scope] = Seq.empty - def children_=[Child <: Scope](children: Seq[Child]): Unit = { + def children_=[Child <: Scope](children: Seq[Child]): Unit = if (this._children.isEmpty) { this._children = children } else throw new UnsupportedOperationException("children is write-once") - } lazy val childGraphNodes: Set[WdlGraphNode] = children.toSet.filterByType[WdlGraphNode] lazy val childGraphNodesSorted: Checked[List[WdlGraphNode]] = { @@ -56,7 +54,8 @@ trait Scope { case Left(_) => val cycleNodes = childGraphNodes.filter(cgn => cgn.upstreamAncestry.contains(cgn)).map(_.toString).toList.sorted s"This workflow contains cyclic dependencies containing these edges: ${cycleNodes.mkString(", ")}".invalidNelCheck - case Right(topologicalOrder) => topologicalOrder.toList.map(_.value).filter(childGraphNodes.contains).validNelCheck + case Right(topologicalOrder) => + topologicalOrder.toList.map(_.value).filter(childGraphNodes.contains).validNelCheck } } @@ -65,11 +64,10 @@ trait Scope { */ def namespace: WdlNamespace = _namespace private var _namespace: WdlNamespace = _ - def namespace_=[Child <: WdlNamespace](ns: WdlNamespace): Unit = { + def namespace_=[Child <: WdlNamespace](ns: WdlNamespace): Unit = if (Option(this._namespace).isEmpty) { this._namespace = ns } else throw new UnsupportedOperationException("namespace is write-once") - } /** * Seq(parent, grandparent, great grandparent, ..., WdlNamespace) @@ -82,7 +80,7 @@ trait Scope { // This is needed in one specific case during WdlNamespace validation // where we need to compute the ancestries at a point where the full // parent branch has not been set yet. - private [wdl] def ancestrySafe: Seq[Scope] = parent match { + private[wdl] def ancestrySafe: Seq[Scope] = parent match { case Some(p) => Seq(p) ++ p.ancestrySafe case None => Seq.empty[Scope] } @@ -95,7 +93,7 @@ trait Scope { /** * Descendants that are Calls */ - lazy val calls: Set[WdlCall] = descendants.collect({ case c: WdlCall => c }) + lazy val calls: Set[WdlCall] = descendants.collect { case c: WdlCall => c } lazy val taskCalls: Set[WdlTaskCall] = calls collect { case c: WdlTaskCall => c } @@ -104,28 +102,27 @@ trait Scope { /** * Descendants that are Scatters */ - lazy val scatters: Set[Scatter] = descendants.collect({ case s: Scatter => s }) + lazy val scatters: Set[Scatter] = descendants.collect { case s: Scatter => s } /** * Declarations within this Scope, in the order that they appear in source code */ - lazy val declarations: Seq[Declaration] = children.collect({ case d: Declaration => d}) + lazy val declarations: Seq[Declaration] = children.collect { case d: Declaration => d } /** * String identifier for this scope. this.namespace.resolve(this.fullyQualifiedName) == this */ - lazy val fullyQualifiedName = { + lazy val fullyQualifiedName = (ancestry.filter(_.appearsInFqn).map(_.unqualifiedName).reverse :+ unqualifiedName).mkString(".") - } - + lazy val womIdentifier = { /* Limit the fully qualified name to the parent workflow, if it exists. * The reason for this is if this scope comes from an imported namespace, * the name of the namespace will be part of the FQN. While useful to construct the hierarchy and dependencies, * this is not desirable when being used by underlying engines. - */ - val womFullyQualifiedName = ancestry.collectFirst { - case workflow: WdlWorkflow => locallyQualifiedName(workflow) + */ + val womFullyQualifiedName = ancestry.collectFirst { case workflow: WdlWorkflow => + locallyQualifiedName(workflow) } getOrElse fullyQualifiedName WomIdentifier(unqualifiedName, womFullyQualifiedName) @@ -141,31 +138,30 @@ trait Scope { * o.locallyQualified(a) = "a.o" * o.locallyQualified(w) = o.fullyQualifiedName = "w.a.o" */ - def locallyQualifiedName(relativeTo: Scope): String = { + def locallyQualifiedName(relativeTo: Scope): String = // Take ancestries until we reach relativeTo - (ancestry.takeWhile(_ != relativeTo) + (ancestry + .takeWhile(_ != relativeTo) // we want relativeTo in the lqn but it's been rejected by takeWhile wo add it back .:+(relativeTo) - // Reverse because we start from the scope and climb up the ancestry tree but in the end we want a top-bottom lqn + // Reverse because we start from the scope and climb up the ancestry tree but in the end we want a top-bottom lqn .reverse // Get rid of scatters, ifs... because we don't want them here .filter(_.appearsInFqn) // Take the unqualifiedName of each scope .map(_.unqualifiedName) - // Add the current scope + // Add the current scope :+ unqualifiedName) // Concatenate all of this .mkString(".") - } /** * String identifier for this scope, with hidden scope information. * * this.namespace.resolve(this.fullyQualifiedNameWithIndexScopes) == this */ - def fullyQualifiedNameWithIndexScopes = { + def fullyQualifiedNameWithIndexScopes = (Seq(this) ++ ancestry).reverse.map(_.unqualifiedName).filter(_.nonEmpty).mkString(".") - } /** * Given another scope, returns the closest common ancestor between the two scopes, @@ -180,7 +176,7 @@ trait Scope { def closestCommonScatter(other: Scope): Option[Scatter] = { val otherAncestry = other.ancestrySafe - ancestrySafe collectFirst { + ancestrySafe collectFirst { case s: Scatter if otherAncestry.contains(s) => s } } @@ -190,10 +186,11 @@ trait Scope { * until it finds a GraphNode with the `name` as its unqualifiedName */ def resolveVariable(name: String, relativeTo: Scope = this, ignoreLocal: Boolean = false): Option[WdlGraphNode] = { - val siblingScopes = if (children.contains(relativeTo)) - // For declarations, only resolve to declarations that are lexically before this declaration - children.dropRight(children.size - children.indexOf(relativeTo) ) - else children + val siblingScopes = + if (children.contains(relativeTo)) + // For declarations, only resolve to declarations that are lexically before this declaration + children.dropRight(children.size - children.indexOf(relativeTo)) + else children val siblingCallOutputs = siblingScopes flatMap { case c: WdlTaskCall => c.outputs @@ -203,12 +200,13 @@ trait Scope { val localLookup = if (ignoreLocal) Seq.empty - else (siblingScopes ++ siblingCallOutputs) collect { - case d: Declaration if d.unqualifiedName == name => d - case c: WdlTaskCall if c.unqualifiedName == name => c - case co: CallOutput if co.unqualifiedName == name => co - case o: TaskOutput if o.unqualifiedName == name => o - } + else + (siblingScopes ++ siblingCallOutputs) collect { + case d: Declaration if d.unqualifiedName == name => d + case c: WdlTaskCall if c.unqualifiedName == name => c + case co: CallOutput if co.unqualifiedName == name => co + case o: TaskOutput if o.unqualifiedName == name => o + } // If this is a scatter and the variable being resolved is the item val scatterLookup = Seq(this) collect { @@ -237,39 +235,63 @@ trait Scope { wdlFunctions: WdlFunctions[WomValue], outputResolver: OutputResolver = NoOutputResolver, shards: Map[Scatter, Int] = Map.empty[Scatter, Int], - relativeTo: Scope = this): String => WomValue = { + relativeTo: Scope = this + ): String => WomValue = { def handleScatterResolution(scatter: Scatter): Try[WomValue] = { // This case will happen if `name` references a Scatter.item (i.e. `x` in expression scatter(x in y) {...}) - val evaluatedCollection = scatter.collection.evaluate(scatter.lookupFunction(knownInputs, wdlFunctions, outputResolver, shards), wdlFunctions) + val evaluatedCollection = scatter.collection.evaluate( + scatter.lookupFunction(knownInputs, wdlFunctions, outputResolver, shards), + wdlFunctions + ) val scatterShard = shards.get(scatter) (evaluatedCollection, scatterShard) match { case (Success(WomArrayLike(array)), Some(shard)) if 0 <= shard && shard < array.value.size => array.value.lift(shard) match { case Some(v) => Success(v) - case None => Failure(new VariableLookupException(s"Could not find value for shard index $shard in scatter collection $array")) + case None => + Failure( + new VariableLookupException(s"Could not find value for shard index $shard in scatter collection $array") + ) } case (Success(WomArrayLike(array)), Some(shard)) => - Failure(new VariableLookupException(s"Scatter expression (${scatter.collection.toWomString}) evaluated to an array of ${array.value.size} elements, but element $shard was requested.")) + Failure( + new VariableLookupException( + s"Scatter expression (${scatter.collection.toWomString}) evaluated to an array of ${array.value.size} elements, but element $shard was requested." + ) + ) case (Success(_: WomArrayLike), None) => - Failure(ScatterIndexNotFound(s"Could not find the shard mapping to this scatter ${scatter.fullyQualifiedName}")) + Failure( + ScatterIndexNotFound(s"Could not find the shard mapping to this scatter ${scatter.fullyQualifiedName}") + ) case (Success(value: WomValue), _) => - Failure(new VariableLookupException(s"Expected scatter expression (${scatter.collection.toWomString}) to evaluate to an Array. Instead, got a $value")) + Failure( + new VariableLookupException( + s"Expected scatter expression (${scatter.collection.toWomString}) to evaluate to an Array. Instead, got a $value" + ) + ) case (failure @ Failure(_), _) => failure case (_, None) => - Failure(new VariableLookupException(s"Could not find a shard for scatter block with expression (${scatter.collection.toWomString})")) + Failure( + new VariableLookupException( + s"Could not find a shard for scatter block with expression (${scatter.collection.toWomString})" + ) + ) } } def fromOutputs(node: WdlGraphNode) = { - def withShard(s: Scatter) = { + def withShard(s: Scatter) = shards.get(s) map { shard => outputResolver(node, Option(shard)) } getOrElse { - Failure(ScatterIndexNotFound(s"Could not find a shard for scatter block with expression (${s.collection.toWomString})")) + Failure( + ScatterIndexNotFound( + s"Could not find a shard for scatter block with expression (${s.collection.toWomString})" + ) + ) } - } this match { case s: Scatter if descendants.contains(node) => withShard(s) @@ -282,18 +304,22 @@ trait Scope { } def handleDeclarationEvaluation(declaration: DeclarationInterface): Try[WomValue] = { - def declarationExcludingLookup(lookup: String => WomValue): String => WomValue = name => { - if (name.equals(declaration.unqualifiedName)) throw new IllegalArgumentException(s"Declaration for '$name' refers to its own value") + def declarationExcludingLookup(lookup: String => WomValue): String => WomValue = name => + if (name.equals(declaration.unqualifiedName)) + throw new IllegalArgumentException(s"Declaration for '$name' refers to its own value") else lookup(name) - } def evaluate = declaration.expression match { case Some(e) => - val parentLookup = declaration.parent.map(_.lookupFunction(knownInputs, wdlFunctions, outputResolver, shards)).getOrElse(NoLookup) + val parentLookup = declaration.parent + .map(_.lookupFunction(knownInputs, wdlFunctions, outputResolver, shards)) + .getOrElse(NoLookup) e.evaluate(declarationExcludingLookup(parentLookup), wdlFunctions) case None => - Failure(new VariableLookupException(s"Declaration ${declaration.fullyQualifiedName} does not have an expression")) + Failure( + new VariableLookupException(s"Declaration ${declaration.fullyQualifiedName} does not have an expression") + ) } fromOutputs(declaration) recoverWith { case _ => evaluate } @@ -308,14 +334,32 @@ trait Scope { knownInputs.get(scope.fullyQualifiedName) map Success.apply getOrElse { Failure(new VariableLookupException(s"Could not find value in inputs map.")) } - case callOutput: CallOutput => handleCallEvaluation(callOutput.call) flatMap { - case outputs: WdlCallOutputsObject => outputs.outputs.get(callOutput.unqualifiedName).map(Success(_)).getOrElse(Failure(new Exception(s"No output ${callOutput.unqualifiedName} found in ${callOutput.call.unqualifiedName}'s outputs"))) - case other => Failure(new Exception(s"Call outputs unexpectedly evaluated to a ${other.womType.stableName}")) - } + case callOutput: CallOutput => + handleCallEvaluation(callOutput.call) flatMap { + case outputs: WdlCallOutputsObject => + outputs.outputs + .get(callOutput.unqualifiedName) + .map(Success(_)) + .getOrElse( + Failure( + new Exception( + s"No output ${callOutput.unqualifiedName} found in ${callOutput.call.unqualifiedName}'s outputs" + ) + ) + ) + case other => + Failure(new Exception(s"Call outputs unexpectedly evaluated to a ${other.womType.stableName}")) + } case call: WdlCall => handleCallEvaluation(call) case scatter: Scatter => handleScatterResolution(scatter) - case declaration: DeclarationInterface if declaration.expression.isDefined => handleDeclarationEvaluation(declaration) - case scope => Failure(new VariableLookupException(s"Variable $name resolved to scope ${scope.fullyQualifiedName} but cannot be evaluated.")) + case declaration: DeclarationInterface if declaration.expression.isDefined => + handleDeclarationEvaluation(declaration) + case scope => + Failure( + new VariableLookupException( + s"Variable $name resolved to scope ${scope.fullyQualifiedName} but cannot be evaluated." + ) + ) } getOrElse { Failure(VariableNotFoundException(name)) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/TaskOutput.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/TaskOutput.scala index 569ccc84cf1..34df17efad4 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/TaskOutput.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/TaskOutput.scala @@ -14,11 +14,18 @@ object TaskOutput { TaskOutput(name, womType, expression, ast, parent) } - def buildWomOutputDefinition(taskOutput: TaskOutput) = { - OutputDefinition(LocalName(taskOutput.unqualifiedName), taskOutput.womType, WdlWomExpression(taskOutput.requiredExpression, from = taskOutput)) - } + def buildWomOutputDefinition(taskOutput: TaskOutput) = + OutputDefinition(LocalName(taskOutput.unqualifiedName), + taskOutput.womType, + WdlWomExpression(taskOutput.requiredExpression, from = taskOutput) + ) } -final case class TaskOutput(unqualifiedName: String, womType: WomType, requiredExpression: WdlExpression, ast: Ast, override val parent: Option[Scope]) extends Output { +final case class TaskOutput(unqualifiedName: String, + womType: WomType, + requiredExpression: WdlExpression, + ast: Ast, + override val parent: Option[Scope] +) extends Output { lazy val womOutputDefinition: OutputDefinition = TaskOutput.buildWomOutputDefinition(this) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala index 8d1973bd109..42723ee9a97 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala @@ -16,7 +16,8 @@ object WdlCall { namespaces: Seq[WdlNamespace], tasks: Seq[WdlTask], workflows: Seq[WdlWorkflow], - wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): WdlCall = { + wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter + ): WdlCall = { val alias: Option[String] = ast.getAttribute("alias") match { case x: Terminal => Option(x.getSourceString) case _ => None @@ -36,14 +37,12 @@ object WdlCall { } } - private def processCallInput(ast: Ast, - wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Map[String, WdlExpression] = { + private def processCallInput(ast: Ast, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Map[String, WdlExpression] = AstTools.callInputSectionIOMappings(ast, wdlSyntaxErrorFormatter) map { a => val key = a.getAttribute("key").sourceString val expression = new WdlExpression(a.getAttribute("value")) (key, expression) } toMap - } } /** @@ -61,7 +60,9 @@ object WdlCall { sealed abstract class WdlCall(val alias: Option[String], val callable: WdlCallable, val inputMappings: Map[String, WdlExpression], - val ast: Ast) extends WdlGraphNodeWithInputs with WorkflowScoped { + val ast: Ast +) extends WdlGraphNodeWithInputs + with WorkflowScoped { val unqualifiedName: String = alias getOrElse callable.unqualifiedName def callType: String @@ -83,7 +84,8 @@ sealed abstract class WdlCall(val alias: Option[String], * * NB Only used in tests, womtool and some external tools (eg FC's workflow input enumerator) */ - def workflowInputs: Seq[InputDefinition] = declarations.filterNot(i => inputMappings.contains(i.unqualifiedName)).flatMap(_.asWorkflowInput) + def workflowInputs: Seq[InputDefinition] = + declarations.filterNot(i => inputMappings.contains(i.unqualifiedName)).flatMap(_.asWorkflowInput) override def toString: String = s"[Call $fullyQualifiedName]" @@ -98,12 +100,13 @@ sealed abstract class WdlCall(val alias: Option[String], def evaluateTaskInputs(inputs: WorkflowCoercedInputs, wdlFunctions: WdlFunctions[WomValue], outputResolver: OutputResolver = NoOutputResolver, - shards: Map[Scatter, Int] = Map.empty[Scatter, Int]): Try[EvaluatedTaskInputs] = { + shards: Map[Scatter, Int] = Map.empty[Scatter, Int] + ): Try[EvaluatedTaskInputs] = { type EvaluatedDeclarations = Map[Declaration, Try[WomValue]] def doDeclaration(currentInputs: EvaluatedDeclarations, declaration: Declaration): EvaluatedDeclarations = { - val newInputs = inputs ++ currentInputs.collect{ - case (decl, Success(value)) => decl.fullyQualifiedName -> value + val newInputs = inputs ++ currentInputs.collect { case (decl, Success(value)) => + decl.fullyQualifiedName -> value } val lookup = lookupFunction(newInputs, wdlFunctions, outputResolver, shards, relativeTo = declaration) val evaluatedDeclaration = Try(lookup(declaration.unqualifiedName)) @@ -141,21 +144,30 @@ sealed abstract class WdlCall(val alias: Option[String], wdlFunctions: WdlFunctions[WomValue], outputResolver: OutputResolver = NoOutputResolver, shards: Map[Scatter, Int] = Map.empty[Scatter, Int], - relativeTo: Scope = this): (String => WomValue) = + relativeTo: Scope = this + ): (String => WomValue) = (name: String) => { val inputMappingsWithMatchingName = Try( - inputMappings.getOrElse(name, throw new Exception(s"Could not find $name in input section of call $fullyQualifiedName")) + inputMappings.getOrElse( + name, + throw new Exception(s"Could not find $name in input section of call $fullyQualifiedName") + ) ) val declarationsWithMatchingName = Try( - declarations.find(_.unqualifiedName == name).getOrElse(throw new Exception(s"No declaration named $name for call $fullyQualifiedName")) + declarations + .find(_.unqualifiedName == name) + .getOrElse(throw new Exception(s"No declaration named $name for call $fullyQualifiedName")) ) val inputMappingsLookup = for { inputExpr <- inputMappingsWithMatchingName parent <- Try(parent.getOrElse(throw new Exception(s"Call $unqualifiedName has no parent"))) - evaluatedExpr <- inputExpr.evaluate(parent.lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo), wdlFunctions) + evaluatedExpr <- inputExpr.evaluate( + parent.lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo), + wdlFunctions + ) // Coerce the input into the declared type: declaration <- declarationsWithMatchingName coerced <- declaration.womType.coerceRawValue(evaluatedExpr) @@ -174,7 +186,10 @@ sealed abstract class WdlCall(val alias: Option[String], val declarationExprLookup = for { declaration <- declarationsWithMatchingName declarationExpr <- Try(declaration.expression.getOrElse(throw VariableNotFoundException(declaration))) - evaluatedExpr <- declarationExpr.evaluate(lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo), wdlFunctions) + evaluatedExpr <- declarationExpr.evaluate( + lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo), + wdlFunctions + ) } yield evaluatedExpr val taskParentResolution = for { @@ -185,11 +200,11 @@ sealed abstract class WdlCall(val alias: Option[String], val resolutions = Seq(inputMappingsLookup, declarationExprLookup, declarationLookup, taskParentResolution) resolutions collectFirst { case Success(value) => value } getOrElse { - resolutions.toList.flatMap({ + resolutions.toList.flatMap { case Failure(_: VariableNotFoundException) => None case Failure(ex) => Option(ex) // Only take failures that are not VariableNotFoundExceptions case _ => None - }) match { + } match { case Nil => throw VariableNotFoundException(name) case exs => throw new VariableLookupException(name, exs) } @@ -197,9 +212,17 @@ sealed abstract class WdlCall(val alias: Option[String], } } -case class WdlTaskCall(override val alias: Option[String], task: WdlTask, override val inputMappings: Map[String, WdlExpression], override val ast: Ast) extends WdlCall(alias, task, inputMappings, ast) { +case class WdlTaskCall(override val alias: Option[String], + task: WdlTask, + override val inputMappings: Map[String, WdlExpression], + override val ast: Ast +) extends WdlCall(alias, task, inputMappings, ast) { override val callType = "call" } -case class WdlWorkflowCall(override val alias: Option[String], calledWorkflow: WdlWorkflow, override val inputMappings: Map[String, WdlExpression], override val ast: Ast) extends WdlCall(alias, calledWorkflow, inputMappings, ast) { +case class WdlWorkflowCall(override val alias: Option[String], + calledWorkflow: WdlWorkflow, + override val inputMappings: Map[String, WdlExpression], + override val ast: Ast +) extends WdlCall(alias, calledWorkflow, inputMappings, ast) { override val callType = "workflow" } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala index 14e32d76ce0..0784371cdfe 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala @@ -47,9 +47,11 @@ object WdlExpression { def isMapLiteral: Boolean = ast.getName == "MapLiteral" def isObjectLiteral: Boolean = ast.getName == "ObjectLiteral" def isArrayOrMapLookup: Boolean = ast.getName == "ArrayOrMapLookup" - def params: Vector[AstNode] = Option(ast.getAttribute("params")).map(_.asInstanceOf[AstList].asScala.toVector).getOrElse(Vector.empty) + def params: Vector[AstNode] = + Option(ast.getAttribute("params")).map(_.asInstanceOf[AstList].asScala.toVector).getOrElse(Vector.empty) def name = ast.getAttribute("name").asInstanceOf[Terminal].getSourceString - def isFunctionCallWithFirstParameterBeingFile = ast.isFunctionCall && ast.params.nonEmpty && WdlFunctionsWithFirstParameterBeingFile.contains(ast.functionName) + def isFunctionCallWithFirstParameterBeingFile = + ast.isFunctionCall && ast.params.nonEmpty && WdlFunctionsWithFirstParameterBeingFile.contains(ast.functionName) def isGlobFunctionCall = ast.isFunctionCall && ast.params.size == 1 && "glob".equals(ast.functionName) } @@ -76,9 +78,19 @@ object WdlExpression { type ScopedLookupFunction = String => WomValue val BinaryOperators = Set( - "Add", "Subtract", "Multiply", "Divide", "Remainder", - "GreaterThan", "LessThan", "GreaterThanOrEqual", "LessThanOrEqual", - "Equals", "NotEquals", "LogicalAnd", "LogicalOr" + "Add", + "Subtract", + "Multiply", + "Divide", + "Remainder", + "GreaterThan", + "LessThan", + "GreaterThanOrEqual", + "LessThanOrEqual", + "Equals", + "NotEquals", + "LogicalAnd", + "LogicalOr" ) val UnaryOperators = Set("LogicalNot", "UnaryPlus", "UnaryNegation") @@ -100,24 +112,33 @@ object WdlExpression { def evaluate(ast: AstNode, lookup: ScopedLookupFunction, functions: WdlFunctions[WomValue]): Try[WomValue] = ValueEvaluator(lookup, functions).evaluate(ast) - def evaluateFiles(ast: AstNode, lookup: ScopedLookupFunction, functions: WdlFunctions[WomValue], coerceTo: WomType = WomAnyType) = + def evaluateFiles(ast: AstNode, + lookup: ScopedLookupFunction, + functions: WdlFunctions[WomValue], + coerceTo: WomType = WomAnyType + ) = FileEvaluator(ValueEvaluator(lookup, functions), coerceTo).evaluate(ast) - def evaluateType(ast: AstNode, lookup: (String) => WomType, functions: WdlFunctions[WomType], from: Option[Scope] = None) = + def evaluateType(ast: AstNode, + lookup: (String) => WomType, + functions: WdlFunctions[WomType], + from: Option[Scope] = None + ) = TypeEvaluator(lookup, functions, from).evaluate(ast) def fromString(expression: WorkflowSource): WdlExpression = { val tokens = parser.lex(expression, "string") - val terminalMap = (tokens.asScala.toVector map {(_, expression)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, expression) }).toMap val parseTree = parser.parse_e(tokens, WdlSyntaxErrorFormatter(terminalMap)) new WdlExpression(parseTree.toAst) } - def toString(ast: AstNode, highlighter: SyntaxHighlighter = NullSyntaxHighlighter): String = { + def toString(ast: AstNode, highlighter: SyntaxHighlighter = NullSyntaxHighlighter): String = ast match { - case t: Terminal if Seq("identifier", "integer", "float", "boolean").contains(t.getTerminalStr) => t.getSourceString + case t: Terminal if Seq("identifier", "integer", "float", "boolean").contains(t.getTerminalStr) => + t.getSourceString case t: Terminal if t.getTerminalStr == "string" => s""""${t.getSourceString.replaceAll("\"", "\\" + "\"")}"""" - case a:Ast if a.isBinaryOperator => + case a: Ast if a.isBinaryOperator => val lhs = Option(a.getAttribute("lhs")).map(toString(_, highlighter)).getOrElse("") val rhs = Option(a.getAttribute("rhs")).map(toString(_, highlighter)).getOrElse("") a.getName match { @@ -148,10 +169,10 @@ object WdlExpression { val f = toString(ifFalse, highlighter) s"if $c then $t else $f" case a: Ast if a.isArrayLiteral => - val evaluatedElements = a.getAttribute("values").astListAsVector map {x => toString(x, highlighter)} + val evaluatedElements = a.getAttribute("values").astListAsVector map { x => toString(x, highlighter) } s"[${evaluatedElements.mkString(", ")}]" case a: Ast if a.isTupleLiteral => - val evaluatedElements = a.getAttribute("values").astListAsVector map { x => toString(x, highlighter)} + val evaluatedElements = a.getAttribute("values").astListAsVector map { x => toString(x, highlighter) } s"(${evaluatedElements.mkString(", ")})" case a: Ast if a.isMapLiteral => val evaluatedMap = a.getAttribute("map").astListAsVector map { kv => @@ -179,7 +200,6 @@ object WdlExpression { val params = a.params map { a => toString(a, highlighter) } s"${highlighter.function(a.name)}(${params.mkString(", ")})" } - } } case class WdlExpression(ast: AstNode) extends WomValue { @@ -188,23 +208,27 @@ case class WdlExpression(ast: AstNode) extends WomValue { def evaluate(lookup: ScopedLookupFunction, functions: WdlFunctions[WomValue]): Try[WomValue] = WdlExpression.evaluate(ast, lookup, functions) - def evaluateFiles(lookup: ScopedLookupFunction, functions: WdlFunctions[WomValue], coerceTo: WomType): Try[Seq[WomFile]] = + def evaluateFiles(lookup: ScopedLookupFunction, + functions: WdlFunctions[WomValue], + coerceTo: WomType + ): Try[Seq[WomFile]] = WdlExpression.evaluateFiles(ast, lookup, functions, coerceTo) - def evaluateType(lookup: String => WomType, functions: WdlFunctions[WomType], from: Option[Scope] = None): Try[WomType] = + def evaluateType(lookup: String => WomType, + functions: WdlFunctions[WomType], + from: Option[Scope] = None + ): Try[WomType] = WdlExpression.evaluateType(ast, lookup, functions, from) def containsFunctionCall = ast.containsFunctionCalls - def toString(highlighter: SyntaxHighlighter): String = { + def toString(highlighter: SyntaxHighlighter): String = WdlExpression.toString(ast, highlighter) - } override def toWomString: String = toString(NullSyntaxHighlighter) - def prerequisiteCallNames: Set[String] = { + def prerequisiteCallNames: Set[String] = this.topLevelMemberAccesses map { _.lhsString } - } def topLevelMemberAccesses: Set[MemberAccess] = AstTools.findTopLevelMemberAccesses(ast) map { MemberAccess(_) } toSet def variableReferences(from: Scope): Iterable[VariableReference] = AstTools.findVariableReferences(ast, from) } @@ -220,7 +244,8 @@ final case class WdlWomExpression(wdlExpression: WdlExpression, from: Scope) ext override def inputs: Set[String] = wdlExpression.variableReferences(from) map { _.referencedVariableName } toSet override def evaluateValue(variableValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { - lazy val wdlFunctions = WdlStandardLibraryFunctions.fromIoFunctionSet(ioFunctionSet, FileSizeLimitationConfig.fileSizeLimitationConfig) + lazy val wdlFunctions = + WdlStandardLibraryFunctions.fromIoFunctionSet(ioFunctionSet, FileSizeLimitationConfig.fileSizeLimitationConfig) wdlExpression.evaluate(variableValues.apply, wdlFunctions).toErrorOr } @@ -229,26 +254,42 @@ final case class WdlWomExpression(wdlExpression: WdlExpression, from: Scope) ext // case in the brave new WOM-world. wdlExpression.evaluateType(inputTypes.apply, new WdlStandardLibraryFunctionsType, Option(from)).toErrorOr - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = { + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = { lazy val wdlFunctions = new WdlStandardLibraryFunctions { - override def readFile(path: String, sizeLimit: Int): String = Await.result(ioFunctionSet.readFile(path, Option(sizeLimit), failOnOverflow = true), Duration.Inf) + override def readFile(path: String, sizeLimit: Int): String = + Await.result(ioFunctionSet.readFile(path, Option(sizeLimit), failOnOverflow = true), Duration.Inf) - override def writeFile(path: String, content: String): Try[WomFile] = Try(Await.result(ioFunctionSet.writeFile(path, content), Duration.Inf)) + override def writeFile(path: String, content: String): Try[WomFile] = Try( + Await.result(ioFunctionSet.writeFile(path, content), Duration.Inf) + ) - override def stdout(params: Seq[Try[WomValue]]): Try[WomFile] = Success(WomSingleFile(ioFunctionSet.pathFunctions.stdout)) + override def stdout(params: Seq[Try[WomValue]]): Try[WomFile] = Success( + WomSingleFile(ioFunctionSet.pathFunctions.stdout) + ) - override def stderr(params: Seq[Try[WomValue]]): Try[WomFile] = Success(WomSingleFile(ioFunctionSet.pathFunctions.stderr)) + override def stderr(params: Seq[Try[WomValue]]): Try[WomFile] = Success( + WomSingleFile(ioFunctionSet.pathFunctions.stderr) + ) override def globHelper(pattern: String): Seq[String] = Await.result(ioFunctionSet.glob(pattern), Duration.Inf) - override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = Failure(new Exception("You shouldn't call 'size' from a FileEvaluator")) + override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = Failure( + new Exception("You shouldn't call 'size' from a FileEvaluator") + ) - override protected val fileSizeLimitationConfig: FileSizeLimitationConfig = FileSizeLimitationConfig.fileSizeLimitationConfig + override protected val fileSizeLimitationConfig: FileSizeLimitationConfig = + FileSizeLimitationConfig.fileSizeLimitationConfig } - wdlExpression.evaluateFiles(inputTypes.apply, wdlFunctions, coerceTo).toErrorOr.map(_.toSet[WomFile] map { file => - FileEvaluation(file, optional = areAllFileTypesInWomTypeOptional(coerceTo), secondary = false) - }) + wdlExpression + .evaluateFiles(inputTypes.apply, wdlFunctions, coerceTo) + .toErrorOr + .map(_.toSet[WomFile] map { file => + FileEvaluation(file, optional = areAllFileTypesInWomTypeOptional(coerceTo), secondary = false) + }) } } @@ -263,7 +304,8 @@ object WdlWomExpression { innerLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean, - owningScope: Scope): ErrorOr[Map[String, GraphNodePort.OutputPort]] = { + owningScope: Scope + ): ErrorOr[Map[String, GraphNodePort.OutputPort]] = { def resolveVariable(v: AstTools.VariableReference): ErrorOr[(String, GraphNodePort.OutputPort)] = { val name = v.referencedVariableName @@ -272,8 +314,11 @@ object WdlWomExpression { // If we can find the value locally, use it. // It might be a local value or it might be an OGIN already created by another Node for an outerLookup. Valid(name -> port) - case (None, Some(port)) => Valid(name -> OuterGraphInputNode(WomIdentifier(name), port, preserveIndexForOuterLookups).singleOutputPort) - case (None, None) => s"No input $name found evaluating inputs for expression ${expression.wdlExpression.toWomString} in (${(innerLookup.keys ++ outerLookup.keys).mkString(", ")})".invalidNel + case (None, Some(port)) => + Valid(name -> OuterGraphInputNode(WomIdentifier(name), port, preserveIndexForOuterLookups).singleOutputPort) + case (None, None) => + s"No input $name found evaluating inputs for expression ${expression.wdlExpression.toWomString} in (${(innerLookup.keys ++ outerLookup.keys) + .mkString(", ")})".invalidNel } } @@ -288,21 +333,22 @@ object WdlWomExpression { outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean, owningScope: Scope, - constructor: AnonymousExpressionConstructor[T]): ErrorOr[T] = { + constructor: AnonymousExpressionConstructor[T] + ): ErrorOr[T] = { import common.validation.ErrorOr.ShortCircuitingFlatMap - findInputsforExpression(expression, innerLookup, outerLookup, preserveIndexForOuterLookups, owningScope) flatMap { resolvedVariables => - AnonymousExpressionNode.fromInputMapping(nodeIdentifier, expression, resolvedVariables, constructor) + findInputsforExpression(expression, innerLookup, outerLookup, preserveIndexForOuterLookups, owningScope) flatMap { + resolvedVariables => + AnonymousExpressionNode.fromInputMapping(nodeIdentifier, expression, resolvedVariables, constructor) } } } object TernaryIf { - def unapply(arg: Ast): Option[(AstNode, AstNode, AstNode)] = { + def unapply(arg: Ast): Option[(AstNode, AstNode, AstNode)] = if (arg.getName.equals("TernaryIf")) { Option((arg.getAttribute("cond"), arg.getAttribute("iftrue"), arg.getAttribute("iffalse"))) } else { None } - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlGraphNode.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlGraphNode.scala index a2c4eaaf9fa..b833dfeeca3 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlGraphNode.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlGraphNode.scala @@ -3,7 +3,6 @@ package wdl.draft2.model import common.collections.EnhancedCollections._ import wdl.draft2.model.AstTools.{EnhancedAstNode, VariableReference} - sealed trait WdlGraphNode extends Scope { /** @@ -19,9 +18,9 @@ sealed trait WdlGraphNode extends Scope { final lazy val upstream: Set[WdlGraphNode] = { // If we are inside the scope of another graph node (i.e. via document element ancestry), then // that is also upstream of us. - val closestScopedAncestor = ancestry.collectFirst({ - case ancestor: WdlGraphNode => ancestor - }) + val closestScopedAncestor = ancestry.collectFirst { case ancestor: WdlGraphNode => + ancestor + } // We want: // - Nodes that this node references @@ -30,21 +29,20 @@ sealed trait WdlGraphNode extends Scope { // But because our children's upstream might also include these (which we don't want), filter out: // - This // - Any other WdlGraphNode descendants of this - (referencedNodes ++ closestScopedAncestor.toSeq ++ childGraphNodes.flatMap(_.upstream)).toSet - this -- descendants.filterByType[WdlGraphNode] + (referencedNodes ++ closestScopedAncestor.toSeq ++ childGraphNodes.flatMap(_.upstream)).toSet - this -- descendants + .filterByType[WdlGraphNode] } - final lazy val downstream: Set[WdlGraphNode] = { + final lazy val downstream: Set[WdlGraphNode] = for { - node <- namespace.descendants.collect({ + node <- namespace.descendants.collect { case n: WdlGraphNode if n.fullyQualifiedName != fullyQualifiedName => n - }) + } if node.upstream.contains(this) } yield node - } - def isUpstreamFrom(other: WdlGraphNode): Boolean = { + def isUpstreamFrom(other: WdlGraphNode): Boolean = other.upstreamAncestry.contains(this) || (other.childGraphNodes exists isUpstreamFrom) - } } object WdlGraphNode { @@ -59,17 +57,17 @@ trait WdlGraphNodeWithUpstreamReferences extends WdlGraphNode { def upstreamReferences: Iterable[VariableReference] // If we have variable reference to other graph nodes, then they are upstream from us. - override final def referencedNodes = for { - variable <- upstreamReferences - node <- resolveVariable(variable.terminal.sourceString) - if node.fullyQualifiedNameWithIndexScopes != fullyQualifiedNameWithIndexScopes - } yield node + final override def referencedNodes = for { + variable <- upstreamReferences + node <- resolveVariable(variable.terminal.sourceString) + if node.fullyQualifiedNameWithIndexScopes != fullyQualifiedNameWithIndexScopes + } yield node } trait WdlGraphNodeWithInputs extends WdlGraphNode { def inputMappings: Map[String, WdlExpression] - override final def referencedNodes = for { + final override def referencedNodes = for { expr <- inputMappings.values variable <- expr.variableReferences(this) scope <- parent diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala index d1f75461a46..8691c55ab57 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala @@ -23,7 +23,6 @@ import scala.jdk.CollectionConverters._ import scala.collection.mutable import scala.util.{Failure, Success, Try} - /** * Represents a parsed WDL file */ @@ -34,10 +33,10 @@ sealed trait WdlNamespace extends WomValue with Scope { def importedAs: Option[String] // Used when imported with `as` def imports: Seq[Import] def namespaces: Seq[WdlNamespace] + /** Return this `WdlNamespace` and its child `WdlNamespace`s recursively. */ - def allNamespacesRecursively: List[WdlNamespace] = { + def allNamespacesRecursively: List[WdlNamespace] = this +: (namespaces.toList flatMap { _.allNamespacesRecursively }) - } def tasks: Seq[WdlTask] def workflows: Seq[WdlWorkflow] def terminalMap: Map[Terminal, WorkflowSource] @@ -45,9 +44,8 @@ sealed trait WdlNamespace extends WomValue with Scope { override def unqualifiedName: LocallyQualifiedName = importedAs.getOrElse("") override def appearsInFqn: Boolean = importedAs.isDefined override def namespace: WdlNamespace = this - def resolve(fqn: FullyQualifiedName): Option[Scope] = { + def resolve(fqn: FullyQualifiedName): Option[Scope] = (descendants + this).find(d => d.fullyQualifiedName == fqn || d.fullyQualifiedNameWithIndexScopes == fqn) - } def resolveCallOrOutputOrDeclaration(fqn: FullyQualifiedName): Option[Scope] = { val callsAndOutputs = descendants collect { case c: WdlCall => c @@ -73,7 +71,9 @@ case class WdlNamespaceWithoutWorkflow(importedAs: Option[String], ast: Ast, sourceString: String, importUri: Option[String] = None, - resolvedImportRecords: Set[ResolvedImportRecord] = Set.empty[ResolvedImportRecord]) extends WdlNamespace { + resolvedImportRecords: Set[ResolvedImportRecord] = + Set.empty[ResolvedImportRecord] +) extends WdlNamespace { val workflows = Seq.empty[WdlWorkflow] } @@ -91,7 +91,8 @@ case class WdlNamespaceWithWorkflow(importedAs: Option[String], ast: Ast, sourceString: String, importUri: Option[String] = None, - resolvedImportRecords: Set[ResolvedImportRecord] = Set.empty[ResolvedImportRecord]) extends WdlNamespace { + resolvedImportRecords: Set[ResolvedImportRecord] = Set.empty[ResolvedImportRecord] +) extends WdlNamespace { override val workflows = Seq(workflow) @@ -102,26 +103,36 @@ case class WdlNamespaceWithWorkflow(importedAs: Option[String], * For the declarations that have an expression attached to it already, evaluate the expression * and return the value. Only evaluates workflow level declarations. Other declarations will be evaluated at runtime. */ - def staticDeclarationsRecursive(userInputs: WorkflowCoercedInputs, wdlFunctions: WdlStandardLibraryFunctions): Try[WorkflowCoercedInputs] = { + def staticDeclarationsRecursive(userInputs: WorkflowCoercedInputs, + wdlFunctions: WdlStandardLibraryFunctions + ): Try[WorkflowCoercedInputs] = { import common.exception.Aggregation._ - def evalDeclaration(accumulated: Map[FullyQualifiedName, Try[WomValue]], current: Declaration): Map[FullyQualifiedName, Try[WomValue]] = { + def evalDeclaration(accumulated: Map[FullyQualifiedName, Try[WomValue]], + current: Declaration + ): Map[FullyQualifiedName, Try[WomValue]] = current.expression match { case Some(expr) => - val successfulAccumulated = accumulated.collect({ case (k, v) if v.isSuccess => k -> v.get }) - val value = expr.evaluate(current.lookupFunction(successfulAccumulated ++ userInputs, wdlFunctions, NoOutputResolver, Map.empty[Scatter, Int]), wdlFunctions) + val successfulAccumulated = accumulated.collect { case (k, v) if v.isSuccess => k -> v.get } + val value = expr.evaluate(current.lookupFunction(successfulAccumulated ++ userInputs, + wdlFunctions, + NoOutputResolver, + Map.empty[Scatter, Int] + ), + wdlFunctions + ) val correctlyCoerced = value flatMap current.womType.coerceRawValue accumulated + (current.fullyQualifiedName -> correctlyCoerced) case None => accumulated } - } def evalScope: Map[FullyQualifiedName, Try[WomValue]] = { - val workflowDeclarations = children.collect({ case w: WdlWorkflow => w.declarations }).flatten + val workflowDeclarations = children.collect { case w: WdlWorkflow => w.declarations }.flatten (declarations ++ workflowDeclarations).foldLeft(Map.empty[FullyQualifiedName, Try[WomValue]])(evalDeclaration) } - val filteredExceptions: Set[Class[_ <: Throwable]] = Set(classOf[OutputVariableLookupException], classOf[ScatterIndexNotFound]) + val filteredExceptions: Set[Class[_ <: Throwable]] = + Set(classOf[OutputVariableLookupException], classOf[ScatterIndexNotFound]) // Filter out declarations for which evaluation failed because a call output variable could not be resolved, or a shard could not be found, // as this method is meant for pre-execution validation @@ -153,27 +164,34 @@ object WdlNamespace { val WorkflowResourceString = "string" - def loadUsingPath(wdlFile: Path, resource: Option[String], importResolver: Option[Seq[Draft2ImportResolver]]): Try[WdlNamespace] = { + def loadUsingPath(wdlFile: Path, + resource: Option[String], + importResolver: Option[Seq[Draft2ImportResolver]] + ): Try[WdlNamespace] = load(readFile(wdlFile), resource.getOrElse(wdlFile.toString), importResolver.getOrElse(Seq(fileResolver)), None) - } - def loadUsingSource(workflowSource: WorkflowSource, resource: Option[String], importResolver: Option[Seq[Draft2ImportResolver]]): Try[WdlNamespace] = { + def loadUsingSource(workflowSource: WorkflowSource, + resource: Option[String], + importResolver: Option[Seq[Draft2ImportResolver]] + ): Try[WdlNamespace] = load(workflowSource, resource.getOrElse(WorkflowResourceString), importResolver.getOrElse(Seq(fileResolver)), None) - } private def load(workflowSource: WorkflowSource, resource: String, importResolver: Seq[Draft2ImportResolver], importedAs: Option[String], resolvedImportRecords: Set[ResolvedImportRecord] = Set.empty[ResolvedImportRecord], - root: Boolean = true): Try[WdlNamespace] = Try { + root: Boolean = true + ): Try[WdlNamespace] = Try { val maybeAst = Option(AstTools.getAst(workflowSource, resource)) maybeAst match { case Some(ast) => WdlNamespace(ast, resource, workflowSource, importResolver, importedAs, resolvedImportRecords, root = root) case None => - throw new IllegalArgumentException("Could not build AST from workflow source. Source is empty or contains only comments and whitespace.") + throw new IllegalArgumentException( + "Could not build AST from workflow source. Source is empty or contains only comments and whitespace." + ) } } @@ -183,14 +201,18 @@ object WdlNamespace { importResolvers: Seq[Draft2ImportResolver], namespaceName: Option[String], resolvedImportRecords: Set[ResolvedImportRecord], - root: Boolean = false): WdlNamespace = { + root: Boolean = false + ): WdlNamespace = { val imports = for { importAst <- Option(ast).map(_.getAttribute("imports")).toSeq importStatement <- importAst.astListAsVector.map(Import(_)) } yield importStatement - def tryResolve(str: String, remainingResolvers: Seq[Draft2ImportResolver], errors: List[Throwable]): Draft2ResolvedImportBundle = { + def tryResolve(str: String, + remainingResolvers: Seq[Draft2ImportResolver], + errors: List[Throwable] + ): Draft2ResolvedImportBundle = remainingResolvers match { case resolver :: tail => Try(resolver(str)) match { @@ -203,27 +225,28 @@ object WdlNamespace { case _ => throw ValidationException(s"Failed to import workflow $str.", errors) } } - } // Translates all import statements to sub-namespaces val namespaces: Seq[WdlNamespace] = for { imp <- imports draft2ResolvedImportBundle = tryResolve(imp.uri, importResolvers, List.empty) - } yield WdlNamespace.load( - draft2ResolvedImportBundle.source, - imp.uri, - importResolvers, - Option(imp.namespaceName), - resolvedImportRecords + draft2ResolvedImportBundle.resolvedImportRecord, - root = false - ).get + } yield WdlNamespace + .load( + draft2ResolvedImportBundle.source, + imp.uri, + importResolvers, + Option(imp.namespaceName), + resolvedImportRecords + draft2ResolvedImportBundle.resolvedImportRecord, + root = false + ) + .get // Map of Terminal -> WDL Source Code so the syntax error formatter can show line numbers val terminalMap = AstTools.terminalMap(ast, source) val combinedTerminalMap = ((namespaces map { x => x.terminalMap }) ++ Seq(terminalMap)) reduce (_ ++ _) val wdlSyntaxErrorFormatter = WdlSyntaxErrorFormatter(combinedTerminalMap) - val topLevelAsts = ast.getAttribute("body").astListAsVector.collect({ case a: Ast => a }) + val topLevelAsts = ast.getAttribute("body").astListAsVector.collect { case a: Ast => a } // All `task` definitions of primary workflow. val topLevelTasks: Seq[WdlTask] = for { @@ -259,13 +282,18 @@ object WdlNamespace { def getChildren(scopeAst: Ast, scope: Option[Scope]): Seq[Scope] = { val ScopeAstNames = Seq( - AstNodeName.Call, AstNodeName.Workflow, AstNodeName.Namespace, - AstNodeName.Scatter, AstNodeName.If, AstNodeName.Declaration + AstNodeName.Call, + AstNodeName.Workflow, + AstNodeName.Namespace, + AstNodeName.Scatter, + AstNodeName.If, + AstNodeName.Declaration ) - def getScopeAsts(root: Ast, astAttribute: String): Seq[Ast] = { - root.getAttribute(astAttribute).astListAsVector.collect({ case a: Ast if ScopeAstNames.contains(a.getName) => a }) - } + def getScopeAsts(root: Ast, astAttribute: String): Seq[Ast] = + root.getAttribute(astAttribute).astListAsVector.collect { + case a: Ast if ScopeAstNames.contains(a.getName) => a + } def getTaskInputsOutputs(ast: Ast) = { val inputDeclarations = getScopeAsts(ast, "declarations").map(getScope(_, scope)) @@ -279,7 +307,8 @@ object WdlNamespace { case AstNodeName.Task => getTaskInputsOutputs(scopeAst) case AstNodeName.Declaration | AstNodeName.Output | AstNodeName.WorkflowOutputDeclaration => Seq.empty[Scope] case AstNodeName.Call => - val referencedTask = findCallable(scopeAst.getAttribute("task").sourceString, namespaces, topLevelTasks ++ workflows) + val referencedTask = + findCallable(scopeAst.getAttribute("task").sourceString, namespaces, topLevelTasks ++ workflows) referencedTask match { case Some(task: WdlTask) => getScopeAsts(task.ast, "declarations").map(d => getScope(d, scope)) @@ -301,45 +330,49 @@ object WdlNamespace { if ast.getName != AstNodeName.Task && ast.getName != AstNodeName.Workflow } yield ast - val children = topLevelTasks ++ namespaces ++ workflows ++ topLevelDeclarationScopes.map(ast => getScope(ast, parent = None)) + val children = + topLevelTasks ++ namespaces ++ workflows ++ topLevelDeclarationScopes.map(ast => getScope(ast, parent = None)) val newResolvedImportRecordsSet = resolvedImportRecords ++ namespaces.flatMap(_.resolvedImportRecords).toSet val namespace = workflows match { - case Nil => WdlNamespaceWithoutWorkflow( - importedAs = namespaceName, - imports = imports, - namespaces = namespaces, - tasks = topLevelTasks, - terminalMap = terminalMap, - ast = ast, - sourceString = source, - importUri = Option(uri), - resolvedImportRecords = newResolvedImportRecordsSet - ) - case Seq(w) => WdlNamespaceWithWorkflow( - ast = ast, - workflow = w, - namespace = namespaceName, - imports = imports, - namespaces = namespaces, - tasks = topLevelTasks, - terminalMap = terminalMap, - wdlSyntaxErrorFormatter = wdlSyntaxErrorFormatter, - sourceString = source, - importUri = Option(uri), - resolvedImportRecords = newResolvedImportRecordsSet - ) - case _ => throw new SyntaxError(wdlSyntaxErrorFormatter.tooManyWorkflows(ast.findAsts(AstNodeName.Workflow).asJava)) + case Nil => + WdlNamespaceWithoutWorkflow( + importedAs = namespaceName, + imports = imports, + namespaces = namespaces, + tasks = topLevelTasks, + terminalMap = terminalMap, + ast = ast, + sourceString = source, + importUri = Option(uri), + resolvedImportRecords = newResolvedImportRecordsSet + ) + case Seq(w) => + WdlNamespaceWithWorkflow( + ast = ast, + workflow = w, + namespace = namespaceName, + imports = imports, + namespaces = namespaces, + tasks = topLevelTasks, + terminalMap = terminalMap, + wdlSyntaxErrorFormatter = wdlSyntaxErrorFormatter, + sourceString = source, + importUri = Option(uri), + resolvedImportRecords = newResolvedImportRecordsSet + ) + case _ => + throw new SyntaxError(wdlSyntaxErrorFormatter.tooManyWorkflows(ast.findAsts(AstNodeName.Workflow).asJava)) } // Write-once var setting for parent/child relationships def descendants(scope: Scope): Seq[Scope] = { val children = scope.children - val childDescendants = scope.children.flatMap({ + val childDescendants = scope.children.flatMap { case _: WdlNamespace => Seq.empty case s => descendants(s) - }) + } children ++ childDescendants } @@ -359,9 +392,11 @@ object WdlNamespace { descendants(namespace).foreach(_.namespace = namespace) // SYNTAX CHECKS - val callInputSectionErrors = namespace.descendants.collect({ case c: WdlCall => c }).flatMap( - validateCallInputSection(_, wdlSyntaxErrorFormatter) - ) + val callInputSectionErrors = namespace.descendants + .collect { case c: WdlCall => c } + .flatMap( + validateCallInputSection(_, wdlSyntaxErrorFormatter) + ) val workflowOutputErrors = workflows flatMap { _.workflowCalls map { _.calledWorkflow } } collect { case calledWorkflow if calledWorkflow.workflowOutputWildcards.nonEmpty => @@ -400,37 +435,39 @@ object WdlNamespace { scatter <- namespace.descendants.collect { case sc: Scatter => sc } expression = scatter.collection badVariable <- referencesToAbsentValues(scatter, expression) - } yield new SyntaxError(wdlSyntaxErrorFormatter.scatterCollectionContainsInvalidVariableReference(scatter, badVariable)) + } yield new SyntaxError( + wdlSyntaxErrorFormatter.scatterCollectionContainsInvalidVariableReference(scatter, badVariable) + ) - def scopeNameAndTerminal(scope: Scope): (String, Terminal) = { + def scopeNameAndTerminal(scope: Scope): (String, Terminal) = scope match { case ns: WdlNamespace => ("Namespace", imports.find(_.uri == ns.resource).get.namespaceTerminal) case s: Scope => (s.getClass.getSimpleName, s.ast.findFirstTerminal.get) } - } case class ScopeAccumulator(accumulated: Seq[Scope] = Seq.empty, errors: Seq[String] = Seq.empty) - def lookForDuplicates(scopes: Iterable[Scope]) = { + def lookForDuplicates(scopes: Iterable[Scope]) = scopes.foldLeft(ScopeAccumulator()) { (acc, cur) => val possibleError = acc.accumulated.find(_.unqualifiedName == cur.unqualifiedName) map { duplicate => val (dupName, dupTerminal) = scopeNameAndTerminal(duplicate) val (curName, curTerminal) = scopeNameAndTerminal(cur) wdlSyntaxErrorFormatter.twoSiblingScopesHaveTheSameName( - dupName, dupTerminal, curName, curTerminal + dupName, + dupTerminal, + curName, + curTerminal ) } ScopeAccumulator(acc.accumulated :+ cur, acc.errors ++ possibleError.toSeq) } - } val scopeDuplicationErrors = (namespace.descendants + namespace) collect { case scope if scope.namespace == namespace => lookForDuplicates(scope.children) } - val expandedWorkflowOutputsDuplicationErrors = { + val expandedWorkflowOutputsDuplicationErrors = (namespace.descendants + namespace) collect { case workflow: WdlWorkflow => lookForDuplicates(workflow.outputs) } - } val accumulatedErrors = expandedWorkflowOutputsDuplicationErrors ++ scopeDuplicationErrors @@ -438,14 +475,20 @@ object WdlNamespace { val taskCommandReferenceErrors = for { task <- namespace.tasks - param <- task.commandTemplate.collect({ case p: ParameterCommandPart => p }) + param <- task.commandTemplate.collect { case p: ParameterCommandPart => p } variable <- param.expression.variableReferences(task) if !task.declarations.map(_.unqualifiedName).contains(variable.terminal.getSourceString) - } yield new SyntaxError(wdlSyntaxErrorFormatter.commandExpressionContainsInvalidVariableReference(task.ast.getAttribute("name").asInstanceOf[Terminal], variable.terminal)) + } yield new SyntaxError( + wdlSyntaxErrorFormatter.commandExpressionContainsInvalidVariableReference( + task.ast.getAttribute("name").asInstanceOf[Terminal], + variable.terminal + ) + ) - val all = workflowOutputErrors ++ declarationErrors ++ runtimeErrors ++ scatterErrors ++ callInputSectionErrors ++ taskCommandReferenceErrors ++ duplicateSiblingScopeNameErrors + val all = + workflowOutputErrors ++ declarationErrors ++ runtimeErrors ++ scatterErrors ++ callInputSectionErrors ++ taskCommandReferenceErrors ++ duplicateSiblingScopeNameErrors - all.sortWith({ case (l, r) => l.getMessage < r.getMessage }) match { + all.sortWith { case (l, r) => l.getMessage < r.getMessage } match { case s: Seq[SyntaxError] if s.nonEmpty => throw s.head case _ => } @@ -453,37 +496,46 @@ object WdlNamespace { namespace } - private def getDecls(scope: Scope): Seq[DeclarationInterface] = { + private def getDecls(scope: Scope): Seq[DeclarationInterface] = scope match { case t: WdlTask => t.declarations ++ t.outputs case w: WdlWorkflow => w.declarations ++ w.outputs case s => s.declarations } - } - private def declarationName(declarationAst: Ast): Terminal = declarationAst.getAttribute("name").asInstanceOf[Terminal] + private def declarationName(declarationAst: Ast): Terminal = + declarationAst.getAttribute("name").asInstanceOf[Terminal] /** * Determine the list of references in this expression to values which were never declared */ private def referencesToAbsentValues(container: Scope, expression: WdlExpression): Iterable[Terminal] = - expression.variableReferences(container) collect { case variable if container.resolveVariable(variable.terminal.sourceString).isEmpty => variable.terminal } + expression.variableReferences(container) collect { + case variable if container.resolveVariable(variable.terminal.sourceString).isEmpty => variable.terminal + } - private def validateDeclaration(declaration: DeclarationInterface, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Seq[SyntaxError] = { + private def validateDeclaration(declaration: DeclarationInterface, + wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter + ): Seq[SyntaxError] = { val invalidVariableReferences = for { expr <- declaration.expression.toSeq variable <- referencesToAbsentValues(declaration, expr) - } yield new SyntaxError(wdlSyntaxErrorFormatter.declarationContainsReferenceToAbsentValue( - declaration.parent, - variable - )) + } yield new SyntaxError( + wdlSyntaxErrorFormatter.declarationContainsReferenceToAbsentValue( + declaration.parent, + variable + ) + ) val typeMismatches = typeCheckDeclaration(declaration, wdlSyntaxErrorFormatter).toSeq invalidVariableReferences ++ typeMismatches } - private def validateRuntime(attributeExpression: WdlExpression, task: WdlTask, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Seq[SyntaxError] = { + private def validateRuntime(attributeExpression: WdlExpression, + task: WdlTask, + wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter + ): Seq[SyntaxError] = { val invalidVariableReferences = for { variable <- referencesToAbsentValues(task, attributeExpression) } yield new SyntaxError(wdlSyntaxErrorFormatter.declarationContainsReferenceToAbsentValue(Option(task), variable)) @@ -491,50 +543,80 @@ object WdlNamespace { invalidVariableReferences.toSeq } - private [wdl] def lookupType(from: Scope)(n: String): WomType = { + private[wdl] def lookupType(from: Scope)(n: String): WomType = { val resolved = from.resolveVariable(n) resolved match { case Some(d: DeclarationInterface) => d.relativeWdlType(from) case Some(c: WdlCall) => WdlCallOutputsObjectType(c) - case Some(s: Scatter) => s.collection.evaluateType(lookupType(s), new WdlStandardLibraryFunctionsType, Option(from)) match { - case Success(WomArrayType(aType)) => aType - // We don't need to check for a WOM map type, because - // of the custom unapply in object WomArrayType - case _ => throw new VariableLookupException(s"Variable $n references a scatter block ${s.fullyQualifiedName}, but the collection does not evaluate to an array") - } + case Some(s: Scatter) => + s.collection.evaluateType(lookupType(s), new WdlStandardLibraryFunctionsType, Option(from)) match { + case Success(WomArrayType(aType)) => aType + // We don't need to check for a WOM map type, because + // of the custom unapply in object WomArrayType + case _ => + throw new VariableLookupException( + s"Variable $n references a scatter block ${s.fullyQualifiedName}, but the collection does not evaluate to an array" + ) + } case Some(_: WdlNamespace) => WdlNamespaceType case _ => throw new VariableLookupException(s"Could not resolve $n from scope ${from.fullyQualifiedName}") } } - private def typeCheckDeclaration(decl: DeclarationInterface, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Option[SyntaxError] = { + private def typeCheckDeclaration(decl: DeclarationInterface, + wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter + ): Option[SyntaxError] = decl.expression flatMap { expr => expr.evaluateType(lookupType(decl), new WdlStandardLibraryFunctionsType, Option(decl)) match { case Success(womType) => if (!decl.womType.isCoerceableFrom(womType)) { - Option(new SyntaxError(wdlSyntaxErrorFormatter.taskOutputExpressionTypeDoesNotMatchDeclaredType( - declarationName(decl.ast), decl.womType, womType - ))) + Option( + new SyntaxError( + wdlSyntaxErrorFormatter.taskOutputExpressionTypeDoesNotMatchDeclaredType( + declarationName(decl.ast), + decl.womType, + womType + ) + ) + ) } else { expr.evaluate(NoLookup, NoFunctions) match { case Success(value) if decl.womType.coerceRawValue(value).isFailure => - Option(new SyntaxError(wdlSyntaxErrorFormatter.declarationExpressionNotCoerceableToTargetType(declarationName(decl.ast), decl.womType, value.womType))) + Option( + new SyntaxError( + wdlSyntaxErrorFormatter.declarationExpressionNotCoerceableToTargetType(declarationName(decl.ast), + decl.womType, + value.womType + ) + ) + ) case _ => None } } case Failure(_) => None } } - } - private def validateCallInputSection(call: WdlCall, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Seq[SyntaxError] = { + private def validateCallInputSection(call: WdlCall, + wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter + ): Seq[SyntaxError] = { val callInputSections = AstTools.callInputSectionIOMappings(call.ast, wdlSyntaxErrorFormatter) val invalidCallInputReferences = callInputSections flatMap { ast => val lhs = ast.getAttribute("key").sourceString call.callable.inputNames.find(_ == lhs) match { case Some(_) => None - case None => Option(new SyntaxError(wdlSyntaxErrorFormatter.callReferencesAbsentTaskInput(ast, call.callable.ast, lhs, call.unqualifiedName, call.isInstanceOf[WdlWorkflowCall]))) + case None => + Option( + new SyntaxError( + wdlSyntaxErrorFormatter.callReferencesAbsentTaskInput(ast, + call.callable.ast, + lhs, + call.unqualifiedName, + call.isInstanceOf[WdlWorkflowCall] + ) + ) + ) } } @@ -552,24 +634,32 @@ object WdlNamespace { case Some(s: Scatter) => s.collection.evaluateType(lookupType(s), new WdlStandardLibraryFunctionsType) map { case WomArrayType(WomObjectType) => None - case WomArrayType(_: WomPairType) if memberAccess.rhsString == "left" || memberAccess.rhsString == "right" => None + case WomArrayType(_: WomPairType) + if memberAccess.rhsString == "left" || memberAccess.rhsString == "right" => + None // Maps get coerced into arrays of pairs, so this is also ok: case _: WomMapType if memberAccess.rhsString == "left" || memberAccess.rhsString == "right" => None - case other => Option(new SyntaxError(wdlSyntaxErrorFormatter.badTargetTypeForMemberAccess(memberAccess, other))) + case other => + Option(new SyntaxError(wdlSyntaxErrorFormatter.badTargetTypeForMemberAccess(memberAccess, other))) } getOrElse None - case Some(d: DeclarationInterface) => d.womType match { - case _: WomPairType if memberAccess.rhsString == "left" || memberAccess.rhsString == "right" => None - case WomObjectType => None - case other => Option(new SyntaxError(wdlSyntaxErrorFormatter.badTargetTypeForMemberAccess(memberAccess, other))) - } - case Some(other) => Option(new SyntaxError(wdlSyntaxErrorFormatter.badTargetScopeForMemberAccess(memberAccess, other))) - case None => None + case Some(d: DeclarationInterface) => + d.womType match { + case _: WomPairType if memberAccess.rhsString == "left" || memberAccess.rhsString == "right" => None + case WomObjectType => None + case other => + Option(new SyntaxError(wdlSyntaxErrorFormatter.badTargetTypeForMemberAccess(memberAccess, other))) + } + case Some(other) => + Option(new SyntaxError(wdlSyntaxErrorFormatter.badTargetScopeForMemberAccess(memberAccess, other))) + case None => + None // In cases where there are many member accesses in the same Ast, it might be we can look up one layer but // not an inner layer. // It looks like we couldn't find this layer, so check whether there's an outer layer we can test access // for instead. memberAccess.lhsAst match { - case outerMemberAccessAst: Ast if outerMemberAccessAst.getName == "MemberAccess" => checkValidityOfMemberAccess(outerMemberAccessAst) + case outerMemberAccessAst: Ast if outerMemberAccessAst.getName == "MemberAccess" => + checkValidityOfMemberAccess(outerMemberAccessAst) case _ => Option(new SyntaxError(wdlSyntaxErrorFormatter.noTargetForMemberAccess(memberAccess))) } } @@ -594,18 +684,17 @@ object WdlNamespace { * Given a name, a collection of WdlNamespaces and a collection of Tasks, this method will attempt to find * a Task with that name within those collections. */ - def findTask(name: String, namespaces: Seq[WdlNamespace], tasks: Seq[WdlTask]): Option[WdlTask] = { + def findTask(name: String, namespaces: Seq[WdlNamespace], tasks: Seq[WdlTask]): Option[WdlTask] = findCallable(name, namespaces, tasks) collect { case t: WdlTask => t } - } - def findCallable(name: String, namespaces: Seq[WdlNamespace], callables: Seq[WdlCallable]): Option[WdlCallable] = { + def findCallable(name: String, namespaces: Seq[WdlNamespace], callables: Seq[WdlCallable]): Option[WdlCallable] = if (name.contains(".")) { val parts = name.split("\\.", 2) - namespaces find (_.importedAs.contains(parts(0))) flatMap { x => findCallable(parts(1), x.namespaces, x.workflows ++ x.tasks) } + namespaces find (_.importedAs.contains(parts(0))) flatMap { x => + findCallable(parts(1), x.namespaces, x.workflows ++ x.tasks) + } } else callables.find(_.unqualifiedName == name) - } - private def readFile(wdlFile: Path): WorkflowSource = File(wdlFile).contentAsString @@ -626,38 +715,52 @@ object WdlNamespace { } object WdlNamespaceWithWorkflow { - def load(workflowSource: WorkflowSource, importsResolvers: Seq[Draft2ImportResolver]): Try[WdlNamespaceWithWorkflow] = { + def load(workflowSource: WorkflowSource, importsResolvers: Seq[Draft2ImportResolver]): Try[WdlNamespaceWithWorkflow] = from(WdlNamespace.loadUsingSource(workflowSource, None, Option(importsResolvers))) - } - @deprecated("To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", "23") - def load(workflowSource: WorkflowSource): Try[WdlNamespaceWithWorkflow] = from(WdlNamespace.loadUsingSource(workflowSource, None, None)) - - @deprecated("To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", "23") + @deprecated( + "To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", + "23" + ) + def load(workflowSource: WorkflowSource): Try[WdlNamespaceWithWorkflow] = from( + WdlNamespace.loadUsingSource(workflowSource, None, None) + ) + + @deprecated( + "To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", + "23" + ) def load(workflowSource: WorkflowSource, importsDirectory: File): Try[WdlNamespaceWithWorkflow] = { - val resolvers: Seq[Draft2ImportResolver] = Seq(WdlNamespace.directoryResolver(importsDirectory), WdlNamespace.fileResolver) + val resolvers: Seq[Draft2ImportResolver] = + Seq(WdlNamespace.directoryResolver(importsDirectory), WdlNamespace.fileResolver) load(workflowSource, resolvers) } - @deprecated("To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", "23") - def load(wdlFile: Path, importResolver: Draft2ImportResolver): Try[WdlNamespaceWithWorkflow] = from(WdlNamespace.loadUsingPath(wdlFile, None, Option(Seq(importResolver)))) - - @deprecated("To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", "23") - def load(workflowSource: WorkflowSource, importResolver: Draft2ImportResolver): Try[WdlNamespaceWithWorkflow] = { + @deprecated( + "To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", + "23" + ) + def load(wdlFile: Path, importResolver: Draft2ImportResolver): Try[WdlNamespaceWithWorkflow] = from( + WdlNamespace.loadUsingPath(wdlFile, None, Option(Seq(importResolver))) + ) + + @deprecated( + "To avoid unexpected default resolutions, I recommend using the load(String, Seq[ImportResolver] method of loading.", + "23" + ) + def load(workflowSource: WorkflowSource, importResolver: Draft2ImportResolver): Try[WdlNamespaceWithWorkflow] = WdlNamespaceWithWorkflow.from(WdlNamespace.loadUsingSource(workflowSource, None, Option(Seq(importResolver)))) - } /** * Used to safely cast a WdlNamespace to a NamespaceWithWorkflow. Throws an IllegalArgumentException if another * form of WdlNamespace is passed to it */ - private def from(namespace: Try[WdlNamespace]): Try[WdlNamespaceWithWorkflow] = { + private def from(namespace: Try[WdlNamespace]): Try[WdlNamespaceWithWorkflow] = namespace match { case Success(n: WdlNamespaceWithWorkflow) => Success(n) case Success(_) => Failure(new IllegalArgumentException("Namespace does not have a local workflow to run")) case Failure(f) => Failure(f) } - } def apply(ast: Ast, workflow: WdlWorkflow, @@ -669,7 +772,18 @@ object WdlNamespaceWithWorkflow { wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter, sourceString: String, importUri: Option[String], - resolvedImportRecords: Set[ResolvedImportRecord]): WdlNamespaceWithWorkflow = { - new WdlNamespaceWithWorkflow(namespace, workflow, imports, namespaces, tasks, terminalMap, wdlSyntaxErrorFormatter, ast, sourceString, importUri, resolvedImportRecords) - } + resolvedImportRecords: Set[ResolvedImportRecord] + ): WdlNamespaceWithWorkflow = + new WdlNamespaceWithWorkflow(namespace, + workflow, + imports, + namespaces, + tasks, + terminalMap, + wdlSyntaxErrorFormatter, + ast, + sourceString, + importUri, + resolvedImportRecords + ) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala index 84508d001c6..3b30379f281 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala @@ -15,7 +15,8 @@ object WdlRuntimeAttributes { def apply(ast: Ast): WdlRuntimeAttributes = { val asts = ast.findAsts(AstNodeName.Runtime) if (asts.size > 1) throw new UnsupportedOperationException("Only one runtime block may be defined per task") - val kvPairAsts = asts.headOption.map(_.getAttribute("map").asInstanceOf[AstList].asScala.toVector.map(_.asInstanceOf[Ast])) + val kvPairAsts = + asts.headOption.map(_.getAttribute("map").asInstanceOf[AstList].asScala.toVector.map(_.asInstanceOf[Ast])) val runtimeAttributeMap: Map[String, WdlExpression] = kvPairAsts match { case Some(vector) => vector.map(ast => processRuntimeAttribute(ast)).toMap case None => Map.empty[String, WdlExpression] diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala index 158037a6105..bc1a7764efd 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala @@ -21,94 +21,110 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e case classicTerminal => terminalMap.get(classicTerminal) } - private def line(t:Terminal): String = getTerminal(t).map(_.split("\n")(t.getLine - 1)).getOrElse(s"Cannot highlight line. It was probably in an imported file.") + private def line(t: Terminal): String = getTerminal(t) + .map(_.split("\n")(t.getLine - 1)) + .getOrElse(s"Cannot highlight line. It was probably in an imported file.") - def unexpectedEof(method: String, expected: java.util.List[TerminalIdentifier], nt_rules: java.util.List[String]): String = "ERROR: Unexpected end of file" + def unexpectedEof(method: String, + expected: java.util.List[TerminalIdentifier], + nt_rules: java.util.List[String] + ): String = "ERROR: Unexpected end of file" - def excessTokens(method: String, terminal: Terminal): String = { + def excessTokens(method: String, terminal: Terminal): String = s"""ERROR: Finished parsing without consuming all tokens. - | - |${pointToSource(terminal)} + | + |${pointToSource(terminal)} """.stripMargin - } - def unexpectedSymbol(method: String, actual: Terminal, expected: java.util.List[TerminalIdentifier], rule: String): String = { + def unexpectedSymbol(method: String, + actual: Terminal, + expected: java.util.List[TerminalIdentifier], + rule: String + ): String = { val expectedTokens = expected.asScala.map(_.string).mkString(", ") s"""ERROR: Unexpected symbol (line ${actual.getLine}, col ${actual.getColumn}) when parsing '$method'. - | - |Expected $expectedTokens, got ${actual.getSourceString}. - | - |${pointToSource(actual)} - | - |$rule + | + |Expected $expectedTokens, got ${actual.getSourceString}. + | + |${pointToSource(actual)} + | + |$rule """.stripMargin } - def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = { + def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = s"""ERROR: No more tokens. Expecting ${expecting.string} - | - |${pointToSource(last)} + | + |${pointToSource(last)} """.stripMargin - } - def invalidTerminal(method: String, invalid: Terminal): String = { + def invalidTerminal(method: String, invalid: Terminal): String = s"""ERROR: Invalid symbol ID: ${invalid.getId} (${invalid.getTerminalStr}) - | - |${pointToSource(invalid)} + | + |${pointToSource(invalid)} """.stripMargin - } // TODO: these next two methods won't be called by the parser because there are no lists in the WDL grammar that // cause these to be triggered. Currently the parser is passing in 'null' for the value of 'last' and when that // changes, these errors can be made more helpful. - def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = { + def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = s"ERROR: $method requires $required items, but only found $found" - } - def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = { + def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = s"ERROR: $method requires a terminator after each element" - } def tooManyWorkflows(workflowAsts: java.util.List[Ast]): String = { - val otherWorkflows = workflowAsts.asScala.map({ ast => - val name: Terminal = ast.getAttribute("name").asInstanceOf[Terminal] - s"""Prior workflow definition (line ${name.getLine} col ${name.getColumn}): - | - |${pointToSource(name)} + val otherWorkflows = workflowAsts.asScala + .map { ast => + val name: Terminal = ast.getAttribute("name").asInstanceOf[Terminal] + s"""Prior workflow definition (line ${name.getLine} col ${name.getColumn}): + | + |${pointToSource(name)} """.stripMargin - }).mkString("\n") + } + .mkString("\n") s"""ERROR: Only one workflow definition allowed, found ${workflowAsts.size} workflows: - | - |$otherWorkflows + | + |$otherWorkflows """.stripMargin } def duplicateTask(taskAsts: Seq[Ast]): String = { - val otherTasks = taskAsts.map({ ast => - val name: Terminal = ast.getAttribute("name").asInstanceOf[Terminal] - s"""Prior task definition (line ${name.getLine} col ${name.getColumn}): - | - |${pointToSource(name)} + val otherTasks = taskAsts + .map { ast => + val name: Terminal = ast.getAttribute("name").asInstanceOf[Terminal] + s"""Prior task definition (line ${name.getLine} col ${name.getColumn}): + | + |${pointToSource(name)} """.stripMargin - }).mkString("\n") + } + .mkString("\n") - s"""ERROR: Two tasks defined with the name '${taskAsts.head.getAttribute("name").asInstanceOf[Terminal].getSourceString}': - | - |$otherTasks + s"""ERROR: Two tasks defined with the name '${taskAsts.head + .getAttribute("name") + .asInstanceOf[Terminal] + .getSourceString}': + | + |$otherTasks """.stripMargin } def callReferencesBadTaskName(callAst: Ast, taskName: String): String = { val callTask: Terminal = callAst.getAttribute("task").asInstanceOf[Terminal] s"""ERROR: Call references a task ($taskName) that doesn't exist (line ${callTask.getLine}, col ${callTask.getColumn}) - | - |${pointToSource(callTask)} + | + |${pointToSource(callTask)} """.stripMargin } - def callReferencesAbsentTaskInput(callInputAst: Ast, taskAst: Ast, missingInput: String, callName: String, forSubworkflowCall: Boolean): String = { + def callReferencesAbsentTaskInput(callInputAst: Ast, + taskAst: Ast, + missingInput: String, + callName: String, + forSubworkflowCall: Boolean + ): String = { val callParameter: Terminal = callInputAst.getAttribute("key").asInstanceOf[Terminal] val taskName: Terminal = taskAst.getAttribute("name").asInstanceOf[Terminal] val taskNameString = taskName.getSourceString @@ -129,44 +145,48 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e } else { "" } s"""ERROR: Call supplied an unexpected input: The '$taskNameString' task doesn't have an input called '$missingInput': - | - |${pointToSource(callParameter)} - | - |Options: - | - Add the input '$missingInput' to the '$taskNameString' task (defined on line ${taskName.getLine}).$subworkflowWarning - | - Remove '$missingInput = ...' from $callName's inputs (on line ${callParameter.getLine}). + | + |${pointToSource(callParameter)} + | + |Options: + | - Add the input '$missingInput' to the '$taskNameString' task (defined on line ${taskName.getLine}).$subworkflowWarning + | - Remove '$missingInput = ...' from $callName's inputs (on line ${callParameter.getLine}). """.stripMargin } def taskAndNamespaceHaveSameName(taskAst: Ast, namespace: Terminal): String = { val taskName = taskAst.getAttribute("name").asInstanceOf[Terminal] s"""ERROR: Task and namespace have the same name: - | - |Task defined here (line ${taskName.getLine}, col ${taskName.getColumn}): - | - |${pointToSource(taskName)} - | - |Import statement defined here (line ${namespace.getLine}, col ${namespace.getColumn}): - | - |${pointToSource(namespace)} + | + |Task defined here (line ${taskName.getLine}, col ${taskName.getColumn}): + | + |${pointToSource(taskName)} + | + |Import statement defined here (line ${namespace.getLine}, col ${namespace.getColumn}): + | + |${pointToSource(namespace)} """.stripMargin } def workflowAndNamespaceHaveSameName(workflowAst: Ast, namespace: Terminal): String = { val workflowName = workflowAst.getAttribute("name").asInstanceOf[Terminal] s"""ERROR: Workflow and namespace have the same name: - | - |Task defined here (line ${workflowName.getLine}, col ${workflowName.getColumn}): - | - |${pointToSource(workflowName)} - | - |Import statement defined here (line ${namespace.getLine}, col ${namespace.getColumn}): - | - |${pointToSource(namespace)} + | + |Task defined here (line ${workflowName.getLine}, col ${workflowName.getColumn}): + | + |${pointToSource(workflowName)} + | + |Import statement defined here (line ${namespace.getLine}, col ${namespace.getColumn}): + | + |${pointToSource(namespace)} """.stripMargin } - def twoSiblingScopesHaveTheSameName(firstScopeType: String, firstScopeName: Terminal, secondScopeType: String, secondScopeName: Terminal): String = { + def twoSiblingScopesHaveTheSameName(firstScopeType: String, + firstScopeName: Terminal, + secondScopeType: String, + secondScopeName: Terminal + ): String = s"""ERROR: Sibling nodes have conflicting names: | |$firstScopeType defined here (line ${firstScopeName.getLine}, col ${firstScopeName.getColumn}): @@ -177,13 +197,12 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e | |${pointToSource(secondScopeName)} """.stripMargin - } def multipleCallsAndHaveSameName(names: Seq[(String, Terminal)]): String = { val duplicatedCallNames = names.map { case (astType, name) => s"""$astType statement here (line ${name.getLine}, column ${name.getColumn}): - | - |${pointToSource(name)} + | + |${pointToSource(name)} """.stripMargin } @@ -193,28 +212,26 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e """.stripMargin } - def multipleInputStatementsOnCall(secondInputStatement: Terminal): String = { + def multipleInputStatementsOnCall(secondInputStatement: Terminal): String = s"""ERROR: Call has multiple 'input' sections defined: | |${pointToSource(secondInputStatement)} | |Instead of multiple 'input' sections, use commas to separate the values. """.stripMargin - } - def emptyInputSection(callTaskName: Terminal) = { + def emptyInputSection(callTaskName: Terminal) = s"""ERROR: empty "input" section for call '${callTaskName.getSourceString}': | |${pointToSource(callTaskName)} """.stripMargin - } def noTargetForMemberAccess(memberAccess: MemberAccess): String = { val rhsAst = memberAccess.ast.getAttribute("rhs").asInstanceOf[Terminal] s"""ERROR: Cannot find reference to '${memberAccess.lhsString}' for member access '${memberAccess.memberAccessString}' (line ${rhsAst.getLine}, col ${rhsAst.getColumn}): - | - |${pointToSource(rhsAst)} + | + |${pointToSource(rhsAst)} """.stripMargin } @@ -223,7 +240,7 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e s"""ERROR: Bad target for member access '${memberAccess.memberAccessString}': '${memberAccess.lhsString}' was a ${unexpectedType.stableName} (line ${rhsAst.getLine}, col ${rhsAst.getColumn}): | - |${pointToSource(rhsAst)} + |${pointToSource(rhsAst)} """.stripMargin } @@ -232,7 +249,7 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e s"""ERROR: Bad target for member access '${memberAccess.memberAccessString}': '${memberAccess.lhsString}' was a ${unexpectedScope.getClass.getSimpleName} (line ${rhsAst.getLine}, col ${rhsAst.getColumn}): | - |${pointToSource(rhsAst)} + |${pointToSource(rhsAst)} """.stripMargin } @@ -240,15 +257,16 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e val rhsAst = memberAccessAst.getAttribute("rhs").asInstanceOf[Terminal] val memberAccess = MemberAccess(memberAccessAst) val taskName = call.unqualifiedName - val goodOutputs = s" (current outputs of '$taskName': " + call.outputs.map("'" + _.unqualifiedName + "'").mkString(", ") + ")" + val goodOutputs = + s" (current outputs of '$taskName': " + call.outputs.map("'" + _.unqualifiedName + "'").mkString(", ") + ")" s"""ERROR: Call output not found: Call '${memberAccess.lhsString}' doesn't have an output '${memberAccess.rhsString}' (line ${rhsAst.getLine}, col ${rhsAst.getColumn}). - | - |${pointToSource(rhsAst)} - | - |Options: - | - Add the output '${memberAccess.rhsString}' to '$taskName'. - | - Modify the member access (on line ${rhsAst.getLine}) to use an existing output$goodOutputs. + | + |${pointToSource(rhsAst)} + | + |Options: + | - Add the output '${memberAccess.rhsString}' to '$taskName'. + | - Modify the member access (on line ${rhsAst.getLine}) to use an existing output$goodOutputs. """.stripMargin } @@ -256,104 +274,95 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e val rhsAst = ast.getAttribute("fqn").asInstanceOf[Terminal] s"""ERROR: Old style workflow output references '${rhsAst.getSourceString}' which doesn't exist (line ${rhsAst.getLine}, col ${rhsAst.getColumn}): - | - |${pointToSource(rhsAst)} - |""".stripMargin + | + |${pointToSource(rhsAst)} + |""".stripMargin } - def pairMustHaveExactlyTwoTypeParameters(arrayDecl: Terminal): String = { + def pairMustHaveExactlyTwoTypeParameters(arrayDecl: Terminal): String = s"""ERROR: Pair type should have exactly two parameterized types (line ${arrayDecl.getLine}, col ${arrayDecl.getColumn}): - | - |${pointToSource(arrayDecl)} + | + |${pointToSource(arrayDecl)} """.stripMargin - } - def arrayMustHaveOnlyOneTypeParameter(arrayDecl: Terminal): String = { + def arrayMustHaveOnlyOneTypeParameter(arrayDecl: Terminal): String = s"""ERROR: Array type should only have one parameterized type (line ${arrayDecl.getLine}, col ${arrayDecl.getColumn}): - | - |${pointToSource(arrayDecl)} + | + |${pointToSource(arrayDecl)} """.stripMargin - } - def mapMustHaveExactlyTwoTypeParameters(mapDecl: Terminal): String = { + def mapMustHaveExactlyTwoTypeParameters(mapDecl: Terminal): String = s"""ERROR: Map type should have two parameterized types (line ${mapDecl.getLine}, col ${mapDecl.getColumn}): - | - |${pointToSource(mapDecl)} + | + |${pointToSource(mapDecl)} """.stripMargin - } - def arrayMustHaveATypeParameter(arrayDecl: Terminal): String = { + def arrayMustHaveATypeParameter(arrayDecl: Terminal): String = s"""ERROR: Array type should have exactly one parameterized type (line ${arrayDecl.getLine}, col ${arrayDecl.getColumn}): - | - |${pointToSource(arrayDecl)} + | + |${pointToSource(arrayDecl)} """.stripMargin - } - def taskOutputExpressionTypeDoesNotMatchDeclaredType(outputName: Terminal, outputType: WomType, expressionType: WomType) = { + def taskOutputExpressionTypeDoesNotMatchDeclaredType(outputName: Terminal, + outputType: WomType, + expressionType: WomType + ) = s"""ERROR: ${outputName.getSourceString} is declared as a ${outputType.stableName} but the expression evaluates to a ${expressionType.stableName}: | |${pointToSource(outputName)} """.stripMargin - } - def declarationExpressionNotCoerceableToTargetType(declName: Terminal, declType: WomType, evaluatedType: WomType) = { + def declarationExpressionNotCoerceableToTargetType(declName: Terminal, declType: WomType, evaluatedType: WomType) = s"""ERROR: Value '${declName.getSourceString}' is declared as a '${declType.stableName}' but the expression evaluates to '${evaluatedType.stableName}': - | + | |${pointToSource(declName)} """.stripMargin - } - def failedToDetermineTypeOfDeclaration(declName: Terminal) = { + def failedToDetermineTypeOfDeclaration(declName: Terminal) = s"""ERROR: Could not determine type of declaration ${declName.getSourceString}: | |${pointToSource(declName)} """.stripMargin - } - def trueAndFalseAttributesAreRequired(firstAttribute: Terminal) = { + def trueAndFalseAttributesAreRequired(firstAttribute: Terminal) = s"""ERROR: Both 'true' and 'false' attributes must be specified if either is specified: | |${pointToSource(firstAttribute)} """.stripMargin - } - def expressionExpectedToBeString(key: Terminal) = { + def expressionExpectedToBeString(key: Terminal) = s"""ERROR: Value for this attribute is expected to be a string: | |${pointToSource(key)} """.stripMargin - } - def expectedAtMostOneSectionPerTask(section: String, taskName: Terminal) = { + def expectedAtMostOneSectionPerTask(section: String, taskName: Terminal) = s"""ERROR: Expecting to find at most one '$section' section in the task: | |${pointToSource(taskName)} """.stripMargin - } - def expectedExactlyOneCommandSectionPerTask(taskName: Terminal) = { + def expectedExactlyOneCommandSectionPerTask(taskName: Terminal) = s"""ERROR: Expecting to find at most one 'command' section in the task: | |${pointToSource(taskName)} """.stripMargin - } - def commandExpressionContainsInvalidVariableReference(taskName: Terminal, variable: Terminal) = { + def commandExpressionContainsInvalidVariableReference(taskName: Terminal, variable: Terminal) = s"""ERROR: Variable ${variable.getSourceString} does not reference any declaration in the task (line ${variable.getLine}, col ${variable.getColumn}): - | - |${pointToSource(variable)} - | - |Task defined here (line ${taskName.getLine}, col ${taskName.getColumn}): - | - |${pointToSource(taskName)} + | + |${pointToSource(variable)} + | + |Task defined here (line ${taskName.getLine}, col ${taskName.getColumn}): + | + |${pointToSource(taskName)} """.stripMargin - } def declarationContainsReferenceToAbsentValue(parent: Option[Scope], variable: Terminal) = { val (parentName, missingType) = parent match { - case Some(t: WdlTask) => (s"task '${t.unqualifiedName}'" , "value") - case Some(t: WdlTaskCall) => (s"task '${t.task.unqualifiedName}'" , "value") + case Some(t: WdlTask) => (s"task '${t.unqualifiedName}'", "value") + case Some(t: WdlTaskCall) => (s"task '${t.task.unqualifiedName}'", "value") case Some(_) => ("workflow", "value or call") case None => ("", "") } @@ -361,7 +370,8 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e invalidVariableReference(variable, missingType, parentName) } - def scatterCollectionContainsInvalidVariableReference(scatter: Scatter, variable: Terminal) = invalidVariableReference(variable, "value", "workflow") + def scatterCollectionContainsInvalidVariableReference(scatter: Scatter, variable: Terminal) = + invalidVariableReference(variable, "value", "workflow") private def invalidVariableReference(variable: Terminal, missingType: String, parentName: String) = s""" @@ -370,12 +380,11 @@ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) e |${pointToSource(variable)} |""".stripMargin - def memoryRuntimeAttributeInvalid(expressionStart: Terminal) = { + def memoryRuntimeAttributeInvalid(expressionStart: Terminal) = s"""ERROR: 'memory' runtime attribute should have the format "size unit" (e.g. "8 GB"). - | - |Expression starts here (line ${expressionStart.getLine}, col ${expressionStart.getColumn}): - | - |${pointToSource(expressionStart)} + | + |Expression starts here (line ${expressionStart.getLine}, col ${expressionStart.getColumn}): + | + |${pointToSource(expressionStart)} """.stripMargin - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala index 130c4b0f706..89ea5d67aab 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala @@ -19,7 +19,7 @@ import scala.language.postfixOps object WdlTask { val Ws = Pattern.compile("[\\ \\t]+") - private implicit val instantiatedCommandMonoid = cats.derived.MkMonoid[InstantiatedCommand] + implicit private val instantiatedCommandMonoid = cats.derived.MkMonoid[InstantiatedCommand] /** The function validateDeclaration() and the DeclarationAccumulator class are used * to accumulate errors and keep track of which Declarations/TaskOutputs have been examined. @@ -38,7 +38,8 @@ object WdlTask { val meta = AstTools.wdlSectionToStringMap(ast, AstNodeName.Meta, wdlSyntaxErrorFormatter) val parameterMeta = AstTools.wdlSectionToStringMap(ast, AstNodeName.ParameterMeta, wdlSyntaxErrorFormatter) - if (commandAsts.size != 1) throw new SyntaxError(wdlSyntaxErrorFormatter.expectedExactlyOneCommandSectionPerTask(taskNameTerminal)) + if (commandAsts.size != 1) + throw new SyntaxError(wdlSyntaxErrorFormatter.expectedExactlyOneCommandSectionPerTask(taskNameTerminal)) val commandTemplate = commandAsts.head.getAttribute("parts").asInstanceOf[AstList].asScala.toVector map { case x: Terminal => StringCommandPart(x.getSourceString) case x: Ast => ParameterCommandPart(x, wdlSyntaxErrorFormatter) @@ -47,7 +48,13 @@ object WdlTask { WdlTask(name, commandTemplate, runtimeAttributes, meta, parameterMeta, ast) } - def empty: WdlTask = new WdlTask("taskName", Seq.empty, WdlRuntimeAttributes(Map.empty[String, WdlExpression]), Map.empty, Map.empty, null) + def empty: WdlTask = new WdlTask("taskName", + Seq.empty, + WdlRuntimeAttributes(Map.empty[String, WdlExpression]), + Map.empty, + Map.empty, + null + ) } @@ -66,7 +73,8 @@ case class WdlTask(name: String, runtimeAttributes: WdlRuntimeAttributes, meta: Map[String, String], parameterMeta: Map[String, String], - ast: Ast) extends WdlCallable { + ast: Ast +) extends WdlCallable { override val unqualifiedName: LocallyQualifiedName = name @@ -111,18 +119,20 @@ case class WdlTask(name: String, */ def instantiateCommand(taskInputs: EvaluatedTaskInputs, functions: WdlFunctions[WomValue], - valueMapper: WomValue => WomValue = identity): ErrorOr[List[InstantiatedCommand]] = { + valueMapper: WomValue => WomValue = identity + ): ErrorOr[List[InstantiatedCommand]] = { - val mappedInputs = taskInputs.map({case (k, v) => k.unqualifiedName -> v}) + val mappedInputs = taskInputs.map { case (k, v) => k.unqualifiedName -> v } // `foldMap`: `map` over the elements of the `List[WdlCommandPart]`s, transforming each `WdlCommandPart` to an // `ErrorOr[InstantiatedCommand]`. Then fold the resulting `List[ErrorOr[InstantiatedCommand]]` into a single // `ErrorOr[InstantiatedCommand]`. import WdlTask.instantiatedCommandMonoid val fullInstantiatedCommand: ErrorOr[InstantiatedCommand] = commandTemplate.toList - .flatTraverse(_.instantiate(declarations, mappedInputs, functions, valueMapper)).map(_.combineAll) + .flatTraverse(_.instantiate(declarations, mappedInputs, functions, valueMapper)) + .map(_.combineAll) // `normalize` the instantiation (i.e. don't break Python code indentation) - fullInstantiatedCommand map { c => List(c.copy(commandString = StringUtil.normalize(c.commandString)))} + fullInstantiatedCommand map { c => List(c.copy(commandString = StringUtil.normalize(c.commandString))) } } def commandTemplateString: String = StringUtil.normalize(commandTemplate.map(_.toString).mkString) @@ -139,10 +149,10 @@ case class WdlTask(name: String, * inputMap = Map("t.s" -> WdlString("hello")) */ // TODO WOM: Unused except in Specs - def inputsFromMap(inputs: Map[FullyQualifiedName, WomValue]): EvaluatedTaskInputs = { + def inputsFromMap(inputs: Map[FullyQualifiedName, WomValue]): EvaluatedTaskInputs = declarations flatMap { declaration => inputs collectFirst { - case (fqn, value) if fqn == declaration.fullyQualifiedName => declaration -> value } + case (fqn, value) if fqn == declaration.fullyQualifiedName => declaration -> value + } } toMap - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlWorkflow.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlWorkflow.scala index 8f92f7db363..0eeef921775 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlWorkflow.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlWorkflow.scala @@ -13,10 +13,10 @@ object WdlWorkflow { throw new UnsupportedOperationException(s"Expecting Workflow AST, got a ${ast.getName} AST") } val name = ast.getAttribute("name").asInstanceOf[Terminal].getSourceString - val callNames = ast.findAsts(AstNodeName.Call).map {call => + val callNames = ast.findAsts(AstNodeName.Call).map { call => ("Call", Option(call.getAttribute("alias")).getOrElse(call.getAttribute("task"))) } - val declarationNames = ast.findAsts(AstNodeName.Declaration).map {decl => + val declarationNames = ast.findAsts(AstNodeName.Declaration).map { decl => ("Declaration", decl.getAttribute("name")) } @@ -28,7 +28,8 @@ object WdlWorkflow { (callNames ++ declarationNames) groupBy { _._2.sourceString } foreach { case (_, terminals) if terminals.size > 1 => - val castedToTerminal = terminals map { case (astType, terminalAst) => (astType, terminalAst.asInstanceOf[Terminal]) + val castedToTerminal = terminals map { case (astType, terminalAst) => + (astType, terminalAst.asInstanceOf[Terminal]) } @@ -48,7 +49,8 @@ case class WdlWorkflow(unqualifiedName: String, wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter, meta: Map[String, String], parameterMeta: Map[String, String], - ast: Ast) extends WdlCallable { + ast: Ast +) extends WdlCallable { /** * Also include workflow outputs which are not technically children but should be processed as such @@ -79,9 +81,13 @@ case class WdlWorkflow(unqualifiedName: String, /** First tries to find any Call with name `name`. If not found, * Fallback to looking at immediate children or delegating to parent node */ - override def resolveVariable(name: String, relativeTo: Scope = this, ignoreLocal: Boolean = false): Option[WdlGraphNode] = { - findCallByName(name) orElse findDeclarationByName(name) orElse findWorkflowOutputByName(name, relativeTo) orElse super.resolveVariable(name, relativeTo) - } + override def resolveVariable(name: String, + relativeTo: Scope = this, + ignoreLocal: Boolean = false + ): Option[WdlGraphNode] = + findCallByName(name) orElse findDeclarationByName(name) orElse findWorkflowOutputByName(name, + relativeTo + ) orElse super.resolveVariable(name, relativeTo) /** * Find a Call object by name. For example, @@ -104,28 +110,27 @@ case class WdlWorkflow(unqualifiedName: String, * Declarations within the workflow scope (including inside scatters and ifs) */ lazy val transitiveDeclarations = { - def isValid(d: Declaration) = { + def isValid(d: Declaration) = d.parent match { case Some(w: WdlWorkflow) if w == this => true case Some(_: Scatter) => true case Some(_: If) => true case _ => false } - } descendants collect { case declaration: Declaration if isValid(declaration) => declaration } } - def findDeclarationByName(name: String): Option[Declaration] = { + def findDeclarationByName(name: String): Option[Declaration] = transitiveDeclarations.find(_.unqualifiedName == name) - } def findWorkflowOutputByName(name: String, relativeTo: Scope) = { - val leftOutputs = if (outputs.contains(relativeTo)) - outputs.dropRight(outputs.size - outputs.indexOf(relativeTo)) - else outputs + val leftOutputs = + if (outputs.contains(relativeTo)) + outputs.dropRight(outputs.size - outputs.indexOf(relativeTo)) + else outputs leftOutputs.find(_.unqualifiedName == name) } @@ -142,7 +147,8 @@ case class WdlWorkflow(unqualifiedName: String, * @return a Map[FullyQualifiedName, WomType] representing the union * of all outputs from all `call`s within this workflow */ - lazy val expandedWildcardOutputs: Seq[WorkflowOutput] = if (isTopLevelWorkflow) calculateExpandedWildcardOutputs else Seq.empty + lazy val expandedWildcardOutputs: Seq[WorkflowOutput] = + if (isTopLevelWorkflow) calculateExpandedWildcardOutputs else Seq.empty private def calculateExpandedWildcardOutputs = { @@ -151,7 +157,12 @@ case class WdlWorkflow(unqualifiedName: String, throw new RuntimeException(s"output ${output.fullyQualifiedName} has no parent Scope") } - new WorkflowOutput(locallyQualifiedName, womType, WdlExpression.fromString(locallyQualifiedName), output.ast, Option(this)) + new WorkflowOutput(locallyQualifiedName, + womType, + WdlExpression.fromString(locallyQualifiedName), + output.ast, + Option(this) + ) } def toWorkflowOutputs(scope: Scope): Seq[WorkflowOutput] = { @@ -159,7 +170,7 @@ case class WdlWorkflow(unqualifiedName: String, val outputs = scope match { case call: WdlCall => call.outputs case outputDeclaration: Output => Seq(outputDeclaration) - // For non output declaration, don't return an array but return the raw value + // For non output declaration, don't return an array but return the raw value case otherDeclaration: DeclarationInterface => Seq(otherDeclaration) case _ => Seq.empty } @@ -169,7 +180,9 @@ case class WdlWorkflow(unqualifiedName: String, // No outputs means all outputs val effectiveOutputWildcards = if (hasEmptyOutputSection) { - calls map { call => WorkflowOutputWildcard(unqualifiedName + "." + call.unqualifiedName, wildcard = true, call.ast) } toSeq + calls map { call => + WorkflowOutputWildcard(unqualifiedName + "." + call.unqualifiedName, wildcard = true, call.ast) + } toSeq } else workflowOutputWildcards effectiveOutputWildcards flatMap { output => @@ -180,13 +193,16 @@ case class WdlWorkflow(unqualifiedName: String, } namespace.resolveCallOrOutputOrDeclaration(outputFqn) match { case Some(call: WdlCall) if output.wildcard && calls.contains(call) => toWorkflowOutputs(call) - case Some(declaration: DeclarationInterface) if descendants.contains(declaration) => toWorkflowOutputs(declaration) + case Some(declaration: DeclarationInterface) if descendants.contains(declaration) => + toWorkflowOutputs(declaration) case _ => throw new SyntaxError(wdlSyntaxErrorFormatter.badOldStyleWorkflowOutput(output.ast)) } } } - override lazy val outputs: Seq[WorkflowOutput] = expandedWildcardOutputs ++ children collect { case o: WorkflowOutput => o } + override lazy val outputs: Seq[WorkflowOutput] = expandedWildcardOutputs ++ children collect { + case o: WorkflowOutput => o + } override def toString = s"[Workflow $fullyQualifiedName]" } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutput.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutput.scala index 0369618373e..878502a8adc 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutput.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutput.scala @@ -13,4 +13,9 @@ object WorkflowOutput { } } -case class WorkflowOutput(unqualifiedName: String, womType: WomType, requiredExpression: WdlExpression, ast: Ast, override val parent: Option[Scope]) extends Output +case class WorkflowOutput(unqualifiedName: String, + womType: WomType, + requiredExpression: WdlExpression, + ast: Ast, + override val parent: Option[Scope] +) extends Output diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutputWildcard.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutputWildcard.scala index 556595592b8..78f5b1d7f38 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutputWildcard.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowOutputWildcard.scala @@ -4,12 +4,11 @@ import wdl.draft2.parser.WdlParser.Ast case class WorkflowOutputWildcard(fqn: String, wildcard: Boolean, ast: Ast) { - def outputMatchesDeclaration(outputFqn: String, wildcardsAllowed: Boolean): Boolean = { + def outputMatchesDeclaration(outputFqn: String, wildcardsAllowed: Boolean): Boolean = if (wildcard) { val callFqn = outputFqn.substring(0, outputFqn.lastIndexOf('.')) wildcardsAllowed && callFqn.equals(fqn) } else { outputFqn.equals(fqn) } - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowScoped.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowScoped.scala index 0b64be6cf52..0ed48e61c0a 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowScoped.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WorkflowScoped.scala @@ -1,7 +1,11 @@ package wdl.draft2.model trait WorkflowScoped extends Scope { - def parentWorkflow: WdlWorkflow = ancestry.collectFirst({ case w: WdlWorkflow => w }).getOrElse( - throw new IllegalStateException(s"Grammar constraint violation: $fullyQualifiedName should be contained in a workflow") - ) + def parentWorkflow: WdlWorkflow = ancestry + .collectFirst { case w: WdlWorkflow => w } + .getOrElse( + throw new IllegalStateException( + s"Grammar constraint violation: $fullyQualifiedName should be contained in a workflow" + ) + ) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/ParameterCommandPart.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/ParameterCommandPart.scala index d1347a35a1e..aea8d35aefa 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/ParameterCommandPart.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/ParameterCommandPart.scala @@ -21,9 +21,13 @@ object ParameterCommandPart { (ast.getAttribute("key").sourceString, ast.getAttribute("value").sourceString) } toMap val expression = WdlExpression(ast.getAttribute("expr")) - if ((attributes.contains("true") && !attributes.contains("false")) || (attributes.contains("false") && !attributes.contains("true"))) { + if ( + (attributes.contains("true") && !attributes.contains("false")) || (attributes.contains("false") && !attributes + .contains("true")) + ) { // .head because we can't get here without there being at least one attribute - val firstAttr = ast.getAttribute("attributes").astListAsVector.head.asInstanceOf[Ast].getAttribute("key").asInstanceOf[Terminal] + val firstAttr = + ast.getAttribute("attributes").astListAsVector.head.asInstanceOf[Ast].getAttribute("key").asInstanceOf[Terminal] throw new SyntaxError(wdlSyntaxErrorFormatter.trueAndFalseAttributesAreRequired(firstAttr)) } new ParameterCommandPart(attributes, expression) @@ -31,11 +35,16 @@ object ParameterCommandPart { } case class ParameterCommandPart(attributes: Map[String, String], expression: WdlExpression) extends WdlCommandPart { - def attributesToString: String = if (attributes.nonEmpty) attributes.map({case (k,v) => s"$k=${WomString(v).toWomString}"}).mkString(" ") + " " else "" + def attributesToString: String = if (attributes.nonEmpty) + attributes.map { case (k, v) => s"$k=${WomString(v).toWomString}" }.mkString(" ") + " " + else "" override def toString: String = "${" + s"$attributesToString${expression.toWomString}" + "}" - override def instantiate(declarations: Seq[Declaration], inputs: Map[String, WomValue], functions: WdlFunctions[WomValue], - valueMapper: (WomValue) => WomValue): ErrorOr[List[InstantiatedCommand]] = { + override def instantiate(declarations: Seq[Declaration], + inputs: Map[String, WomValue], + functions: WdlFunctions[WomValue], + valueMapper: (WomValue) => WomValue + ): ErrorOr[List[InstantiatedCommand]] = { // This is a safety net. // In Cromwell's production code, optional declarations are always passed to instantiate, as WdlOptionalValue.none(type) if necessary. def lookupDeclaration(s: String) = declarations.collectFirst { @@ -48,17 +57,17 @@ case class ParameterCommandPart(attributes: Map[String, String], expression: Wdl // Note that evaluating this expression may have the side effect of causing a file to be created if `writeFile` // is invoked. That file will be written to an "engine-relative" location which may be in cloud storage. val evaluatedCommandPartExpression: ErrorOr[WomValue] = expression.evaluate(lookup, functions) match { - case Success(v) => v match { - case WomOptionalValue(_, opt) => opt.getOrElse(defaultString).validNel - case _ => v.validNel - } + case Success(v) => + v match { + case WomOptionalValue(_, opt) => opt.getOrElse(defaultString).validNel + case _ => v.validNel + } case Failure(OptionalNotSuppliedException(_)) => defaultString.validNel case Failure(e) => s"Could not evaluate expression: ${expression.toWomString}: ${e.getMessage}".invalidNel } // Create the stringified version of the command and record any file created in the process. - def instantiateCommand(value: WomValue): ErrorOr[InstantiatedCommand] = { - + def instantiateCommand(value: WomValue): ErrorOr[InstantiatedCommand] = (valueMapper(value), value) match { case (b: WomBoolean, _) if attributes.contains("true") && attributes.contains("false") => InstantiatedCommand(if (b.value) attributes.get("true").head else attributes.get("false").head).validNel @@ -66,7 +75,9 @@ case class ParameterCommandPart(attributes: Map[String, String], expression: Wdl // Files generated by writeFiles have "engine-relative" paths which will be different from the container paths // calculated by `valueMapper`. "engine-relative" may mean either the non-Docker container path on the host // running Cromwell, or a cloud path. Capture these newly created files and their engine paths here. - InstantiatedCommand(commandString = f.valueString, createdFiles = List(CommandSetupSideEffectFile(unmappedFile))).validNel + InstantiatedCommand(commandString = f.valueString, + createdFiles = List(CommandSetupSideEffectFile(unmappedFile)) + ).validNel case (f: WomFile, _) => InstantiatedCommand(f.valueString).validNel case (p: WomPrimitive, _) => InstantiatedCommand(p.valueString).validNel @@ -77,7 +88,6 @@ case class ParameterCommandPart(attributes: Map[String, String], expression: Wdl case _ => s"Could not string-ify value: $value".invalidNel } - } for { value <- evaluatedCommandPartExpression diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/StringCommandPart.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/StringCommandPart.scala index ecb522eb02f..aa003949628 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/StringCommandPart.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/StringCommandPart.scala @@ -10,6 +10,9 @@ import wom.values.WomValue case class StringCommandPart(literal: String) extends WdlCommandPart { override def toString: String = literal - override def instantiate(declarations: Seq[Declaration], inputsMap: Map[String, WomValue], functions: WdlFunctions[WomValue], - valueMapper: (WomValue) => WomValue): ErrorOr[List[InstantiatedCommand]] = List(InstantiatedCommand(literal)).validNel + override def instantiate(declarations: Seq[Declaration], + inputsMap: Map[String, WomValue], + functions: WdlFunctions[WomValue], + valueMapper: (WomValue) => WomValue + ): ErrorOr[List[InstantiatedCommand]] = List(InstantiatedCommand(literal)).validNel } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala index 39aea939c39..c2657b2eaf0 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala @@ -10,19 +10,25 @@ import wom.graph.LocalName import wom.values.WomValue import wom.{CommandPart, InstantiatedCommand} - trait WdlCommandPart extends CommandPart { def instantiate(declarations: Seq[Declaration], inputsMap: Map[String, WomValue], functions: WdlFunctions[WomValue], - valueMapper: WomValue => WomValue): ErrorOr[List[InstantiatedCommand]] + valueMapper: WomValue => WomValue + ): ErrorOr[List[InstantiatedCommand]] override def instantiate(inputsMap: Map[LocalName, WomValue], functions: IoFunctionSet, valueMapper: WomValue => WomValue, - runtimeEnvironment: RuntimeEnvironment): ErrorOr[List[InstantiatedCommand]] = { - val wdlFunctions = WdlStandardLibraryFunctions.fromIoFunctionSet(functions, FileSizeLimitationConfig.fileSizeLimitationConfig) - instantiate(Seq.empty, inputsMap.map({case (localName, value) => localName.value -> value}), wdlFunctions, valueMapper) + runtimeEnvironment: RuntimeEnvironment + ): ErrorOr[List[InstantiatedCommand]] = { + val wdlFunctions = + WdlStandardLibraryFunctions.fromIoFunctionSet(functions, FileSizeLimitationConfig.fileSizeLimitationConfig) + instantiate(Seq.empty, + inputsMap.map { case (localName, value) => localName.value -> value }, + wdlFunctions, + valueMapper + ) } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex1.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex1.scala index 6bf3c7e8316..906776c460b 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex1.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex1.scala @@ -5,21 +5,21 @@ import wdl.draft2.model.WdlNamespaceWithWorkflow object ex1 { def main(args: Array[String]): Unit = { val wdl = """ - |task a { - | command { ps } - |} - |workflow wf { - | call a - |}""".stripMargin + |task a { + | command { ps } + |} + |workflow wf { + | call a + |}""".stripMargin val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get println(s"Workflow: ${ns.workflow.unqualifiedName}") - ns.workflow.calls foreach {call => + ns.workflow.calls foreach { call => println(s"Call: ${call.unqualifiedName}") } - ns.tasks foreach {task => + ns.tasks foreach { task => println(s"Task: ${task.name}") println(s"Command: ${task.commandTemplate}") } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex2.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex2.scala index bfceb9754c2..f246712fa3c 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex2.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex2.scala @@ -7,26 +7,26 @@ import wom.ResolvedImportRecord object ex2 { def main(args: Array[String]): Unit = { val wdl = """ - |import "some_string" - |task a { - | command { ps } - |} - |workflow wf { - | call a - |}""".stripMargin + |import "some_string" + |task a { + | command { ps } + |} + |workflow wf { + | call a + |}""".stripMargin - def resolver(importString: String): Draft2ResolvedImportBundle = { + def resolver(importString: String): Draft2ResolvedImportBundle = importString match { - case "some_string" => Draft2ResolvedImportBundle("task imported { command {ps} }", ResolvedImportRecord("some_string")) + case "some_string" => + Draft2ResolvedImportBundle("task imported { command {ps} }", ResolvedImportRecord("some_string")) case s if s.startsWith("http://") => // issue HTTP request throw new UnsupportedOperationException("not implemented") } - } val ns = WdlNamespaceWithWorkflow.load(wdl, Seq(resolver _)).get - ns.tasks foreach {task => + ns.tasks foreach { task => println(s"Task: ${task.name}") } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex3.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex3.scala index f9f78754e49..98f80e0c90a 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex3.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex3.scala @@ -7,24 +7,24 @@ import wom.ResolvedImportRecord object ex3 { def main(args: Array[String]): Unit = { val wdl = """ - |import "some_string" as my_namespace - |task a { - | command { ps } - |} - |workflow wf { - | call a - |}""".stripMargin + |import "some_string" as my_namespace + |task a { + | command { ps } + |} + |workflow wf { + | call a + |}""".stripMargin - def resolver(importString: String): Draft2ResolvedImportBundle = { + def resolver(importString: String): Draft2ResolvedImportBundle = importString match { - case "some_string" => Draft2ResolvedImportBundle("task imported { command {ps} }", ResolvedImportRecord("some_string")) + case "some_string" => + Draft2ResolvedImportBundle("task imported { command {ps} }", ResolvedImportRecord("some_string")) case _ => throw new UnsupportedOperationException() } - } val ns = WdlNamespaceWithWorkflow.load(wdl, Seq(resolver _)).get - ns.tasks foreach {task => + ns.tasks foreach { task => println(s"Task: ${task.name}") } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex4.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex4.scala index 7573f414ace..7f1b3b803b2 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex4.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex4.scala @@ -5,13 +5,13 @@ import wdl.draft2.model.WdlNamespaceWithWorkflow object ex4 { def main(args: Array[String]): Unit = { val wdl = """ - |task a { - | command { ps } - |} - |workflow wf { - | call a - | call a as b - |}""".stripMargin + |task a { + | command { ps } + |} + |workflow wf { + | call a + | call a as b + |}""".stripMargin val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex5.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex5.scala index a796737f1fc..3c868058066 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex5.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex5.scala @@ -5,20 +5,20 @@ import wdl.draft2.model.{WdlNamespaceWithWorkflow, WdlTaskCall} object ex5 { def main(args: Array[String]): Unit = { val wdl = """ - |task a { - | command { ps } - | output { File procs = stdout() } - |} - | - |task b { - | File s - | command { wc -l ${s} } - |} - | - |workflow wf { - | call a - | call b {input: s=a.procs} - |}""".stripMargin + |task a { + | command { ps } + | output { File procs = stdout() } + |} + | + |task b { + | File s + | command { wc -l ${s} } + |} + | + |workflow wf { + | call a + | call b {input: s=a.procs} + |}""".stripMargin val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex6.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex6.scala index 01fbd4735b8..13de4743697 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex6.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex6.scala @@ -7,19 +7,18 @@ import wom.values.{WomString, WomValue} object ex6 { def main(args: Array[String]): Unit = { val wdl = """ - |workflow wf { - | String a = "foo" + "bar" - | String b = "hello " + variable - | String c = "hello " + other_variable - |}""".stripMargin + |workflow wf { + | String a = "foo" + "bar" + | String b = "hello " + variable + | String c = "hello " + other_variable + |}""".stripMargin val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get - def lookup(name: String): WomValue = { + def lookup(name: String): WomValue = name match { case "variable" => WomString("world") case _ => throw new NoSuchElementException } - } ns.workflow.declarations foreach { decl => val value = decl.expression.get.evaluate(lookup, NoFunctions) println(s"Declaration '${decl.toWdlString}' evaluates to: $value") diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex7.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex7.scala index 7bf0638cb58..6e347e16a43 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex7.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex7.scala @@ -11,31 +11,30 @@ import scala.util.{Success, Try} object ex7 { def main(args: Array[String]): Unit = { val wdl = s""" - |task a { - | String prefix - | Array[Int] ints - | command { - | python script.py $${write_lines(ints)} > $${prefix + ".out"} - | } - |} - |workflow wf { - | call a - |}""".stripMargin + |task a { + | String prefix + | Array[Int] ints + | command { + | python script.py $${write_lines(ints)} > $${prefix + ".out"} + | } + |} + |workflow wf { + | call a + |}""".stripMargin val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get val inputs = Map( "prefix" -> WomString("some_prefix"), - "ints" -> WomArray(WomArrayType(WomIntegerType), Seq(1,2,3,4,5).map(WomInteger.apply)) + "ints" -> WomArray(WomArrayType(WomIntegerType), Seq(1, 2, 3, 4, 5).map(WomInteger.apply)) ) class CustomFunctions extends WdlFunctions[WomValue] { - def write_lines(params: Seq[Try[WomValue]]): Try[WomValue] = { + def write_lines(params: Seq[Try[WomValue]]): Try[WomValue] = // Validate `params`, write the result to a file, return file path Success(WomSingleFile("/tmp/array.txt")) - } } - ns.taskCalls.find( _.unqualifiedName == "a") foreach { call => + ns.taskCalls.find(_.unqualifiedName == "a") foreach { call => val wdlFunctions: CustomFunctions = new CustomFunctions val evaluatedInputs = call.evaluateTaskInputs(inputs, wdlFunctions).get println(call.task.instantiateCommand(evaluatedInputs, wdlFunctions).toTry.get) diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex8.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex8.scala index 40fe900b670..12a69a10482 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex8.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/examples/ex8.scala @@ -20,7 +20,7 @@ object ex8 { println(ast.toPrettyString) /* Traverse the tree to find all Task definitions */ - AstTools.findAsts(ast, "Task") foreach {ast => + AstTools.findAsts(ast, "Task") foreach { ast => println(s"Task name: ${ast.getAttribute("name").sourceString}") } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/LookupException.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/LookupException.scala index 4222d289b86..2790746a0d2 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/LookupException.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/LookupException.scala @@ -9,19 +9,28 @@ sealed trait LookupException { this: Exception => } /** * When a variable does not reference any known scope in the namespace. */ -sealed abstract case class VariableNotFoundException(variable: String, quoteName: Boolean = true) extends Exception(s"Variable $variable not found") with LookupException +sealed abstract case class VariableNotFoundException(variable: String, quoteName: Boolean = true) + extends Exception(s"Variable $variable not found") + with LookupException object VariableNotFoundException { def apply(variable: String): VariableNotFoundException = new VariableNotFoundException(s"'$variable'") {} - def apply(variable: String, variableType: WomType): VariableNotFoundException= new VariableNotFoundException(s"'$variable': ${variableType.stableName}") {} - def apply(declaration: Declaration): VariableNotFoundException = VariableNotFoundException.apply(declaration.fullyQualifiedName, declaration.womType) + def apply(variable: String, variableType: WomType): VariableNotFoundException = new VariableNotFoundException( + s"'$variable': ${variableType.stableName}" + ) {} + def apply(declaration: Declaration): VariableNotFoundException = + VariableNotFoundException.apply(declaration.fullyQualifiedName, declaration.womType) } /** * When an unexpected exception occurred while attempting to resolve a variable. * Might act a single exception or aggregate multiple exceptions. */ -class VariableLookupException(override val exceptionContext: String, override val throwables: List[Throwable] = List.empty) extends RuntimeException with ThrowableAggregation with LookupException +class VariableLookupException(override val exceptionContext: String, + override val throwables: List[Throwable] = List.empty +) extends RuntimeException + with ThrowableAggregation + with LookupException /** * Raised when attempting to resolve a variable in a scatter but the index could not be found. @@ -31,4 +40,5 @@ final case class ScatterIndexNotFound(message: String) extends VariableLookupExc /** * Raised when attempting to resolve an output variable but the output resolver failed to return a value. */ -final case class OutputVariableLookupException(node: WdlGraphNode, index: Option[Int]) extends VariableLookupException(s"Could not find outputs for call ${node.fullyQualifiedName} at index $index") +final case class OutputVariableLookupException(node: WdlGraphNode, index: Option[Int]) + extends VariableLookupException(s"Could not find outputs for call ${node.fullyQualifiedName} at index $index") diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/UnsatisfiedInputException.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/UnsatisfiedInputException.scala index c35906b8228..a4cc56bd30b 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/UnsatisfiedInputException.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/UnsatisfiedInputException.scala @@ -1,3 +1,3 @@ package wdl.draft2.model.exception -class UnsatisfiedInputException(message: String) extends Exception(message) \ No newline at end of file +class UnsatisfiedInputException(message: String) extends Exception(message) diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/ValidationException.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/ValidationException.scala index 1d1580519b6..d7933d4d089 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/ValidationException.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/exception/ValidationException.scala @@ -2,4 +2,4 @@ package wdl.draft2.model.exception import common.exception.ThrowableAggregation -case class ValidationException(exceptionContext: String, throwables: List[Throwable]) extends ThrowableAggregation \ No newline at end of file +case class ValidationException(exceptionContext: String, throwables: List[Throwable]) extends ThrowableAggregation diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/Evaluator.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/Evaluator.scala index 221f899e0aa..4594f6200c1 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/Evaluator.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/Evaluator.scala @@ -12,4 +12,3 @@ trait Evaluator { def functions: Functions def evaluate(ast: AstNode): Try[T] } - diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/FileEvaluator.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/FileEvaluator.scala index 224ced12d50..7b01cecfa17 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/FileEvaluator.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/FileEvaluator.scala @@ -37,21 +37,20 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom private def evalValue(ast: AstNode): Try[WomValue] = valueEvaluator.evaluate(ast) - private def evalValueToWdlFile(ast: AstNode): Try[WomFile] = { + private def evalValueToWdlFile(ast: AstNode): Try[WomFile] = evalValue(ast) match { case Success(p: WomPrimitive) => Success(WomSingleFile(p.valueString)) - case Success(_) => Failure(new WomExpressionException(s"Expecting a primitive type from AST:\n${ast.toPrettyString}")) + case Success(_) => + Failure(new WomExpressionException(s"Expecting a primitive type from AST:\n${ast.toPrettyString}")) case Failure(e) => Failure(e) } - } - def evaluate(ast: AstNode, anticipatedType: WomType = coerceTo): Try[Seq[WomFile]] = { + def evaluate(ast: AstNode, anticipatedType: WomType = coerceTo): Try[Seq[WomFile]] = valueEvaluator.evaluate(ast) match { - // If the ast can successfully be evaluated, then just find the WdlFiles that it contains + // If the ast can successfully be evaluated, then just find the WdlFiles that it contains case Success(v) => Success(FileEvaluatorUtil.findFilesToDelocalize(v, coerceTo)) case Failure(_) => evaluateRecursive(ast, anticipatedType) } - } /** * Recursively traverse the ast and collect asts that evaluate successfully to a WdlFile @@ -69,7 +68,7 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom * Because we know that read_string takes a file parameter, WdlString("out") is transformed to a WdlFile (see evalValueToWdlFile) * We can now deduce that this task is expected to produce a WdlFile called "out" */ - private def evaluateRecursive(ast: AstNode, anticipatedType: WomType): Try[Seq[WomFile]] = { + private def evaluateRecursive(ast: AstNode, anticipatedType: WomType): Try[Seq[WomFile]] = ast match { case a: Ast if a.isGlobFunctionCall => evalValueToWdlFile(a.params.head) map { wdlFile => Seq(WomGlobFile(wdlFile.value)) } @@ -83,17 +82,18 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom case a: Ast if a.isBinaryOperator => evalValueToWdlFile(a) match { case Success(f: WomFile) => Success(Seq(f)) - case _ => for { - left <- evaluateRecursive(a.getAttribute("lhs"), anticipatedType) - right <- evaluateRecursive(a.getAttribute("rhs"), anticipatedType) - } yield left ++ right + case _ => + for { + left <- evaluateRecursive(a.getAttribute("lhs"), anticipatedType) + right <- evaluateRecursive(a.getAttribute("rhs"), anticipatedType) + } yield left ++ right } case a: Ast if a.isUnaryOperator => evalValueToWdlFile(a) match { case Success(f: WomFile) => Success(Seq(f)) case _ => evaluateRecursive(a.getAttribute("expression"), anticipatedType) match { - case Success(a:Seq[WomFile]) => Success(a) + case Success(a: Seq[WomFile]) => Success(a) case _ => Failure(new WomExpressionException(s"Could not evaluate:\n${a.toPrettyString}")) } } @@ -106,10 +106,11 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom case a: Ast if a.isArrayOrMapLookup => evalValue(a) match { case Success(f: WomFile) => Success(Seq(f)) - case _ => for { - left <- evaluateRecursive(a.getAttribute("lhs"), anticipatedType) - right <- evaluateRecursive(a.getAttribute("rhs"), anticipatedType) - } yield left ++ right + case _ => + for { + left <- evaluateRecursive(a.getAttribute("lhs"), anticipatedType) + right <- evaluateRecursive(a.getAttribute("rhs"), anticipatedType) + } yield left ++ right } case a: Ast if a.isMemberAccess => evalValue(a) match { @@ -124,7 +125,12 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom case Success(v) => Success(v.flatten) case f => f.map(_.flatten) } - case _ => Failure(new WomExpressionException(s"Failed to parse $a for files. Found an unexpected Array literal but anticipated a ${anticipatedType.stableName}")) + case _ => + Failure( + new WomExpressionException( + s"Failed to parse $a for files. Found an unexpected Array literal but anticipated a ${anticipatedType.stableName}" + ) + ) } case a: Ast if a.isTupleLiteral => val unevaluatedElements = a.getAttribute("values").astListAsVector @@ -137,7 +143,12 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom left <- evaluate(unevaluatedElements.head, leftType) right <- evaluate(unevaluatedElements(1), rightType) } yield left ++ right - case _ => Failure(new WomExpressionException(s"Failed to parse $a for files. Found an unexpected Pair literal but anticipated a ${anticipatedType.stableName}")) + case _ => + Failure( + new WomExpressionException( + s"Failed to parse $a for files. Found an unexpected Pair literal but anticipated a ${anticipatedType.stableName}" + ) + ) } } else { @@ -152,20 +163,22 @@ case class FileEvaluator(valueEvaluator: ValueEvaluator, coerceTo: WomType = Wom key -> value } - val flattenedTries = evaluatedMap flatMap { case (k,v) => Seq(k,v) } - flattenedTries partition {_.isSuccess} match { + val flattenedTries = evaluatedMap flatMap { case (k, v) => Seq(k, v) } + flattenedTries partition { _.isSuccess } match { case (_, failures) if failures.nonEmpty => val message = failures.collect { case f: Failure[_] => f.exception.getMessage }.mkString("\n") Failure(new WomExpressionException(s"Could not evaluate expression:\n$message")) case (successes, _) => Success(successes.flatMap(_.get)) } - case _ => Failure(new WomExpressionException(s"Failed to parse $a for files. Found an unexpected Map literal but anticipated a ${anticipatedType.stableName}")) + case _ => + Failure( + new WomExpressionException( + s"Failed to parse $a for files. Found an unexpected Map literal but anticipated a ${anticipatedType.stableName}" + ) + ) } - case _ => Success(Seq.empty[WomFile]) } - } } - diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/TypeEvaluator.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/TypeEvaluator.scala index be3fe3cb9a6..ac8fe551cc1 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/TypeEvaluator.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/TypeEvaluator.scala @@ -11,7 +11,10 @@ import wom.types._ import scala.util.{Failure, Success, Try} -case class TypeEvaluator(override val lookup: String => WomType, override val functions: WdlFunctions[WomType], from: Option[Scope] = None) extends Evaluator { +case class TypeEvaluator(override val lookup: String => WomType, + override val functions: WdlFunctions[WomType], + from: Option[Scope] = None +) extends Evaluator { override type T = WomType override def evaluate(ast: AstNode): Try[WomType] = ast match { @@ -25,35 +28,36 @@ case class TypeEvaluator(override val lookup: String => WomType, override val fu val lhs = evaluate(a.getAttribute("lhs")) val rhs = evaluate(a.getAttribute("rhs")) a.getName match { - case "Add" => for(l <- lhs; r <- rhs) yield l.add(r).get - case "Subtract" => for(l <- lhs; r <- rhs) yield l.subtract(r).get - case "Multiply" => for(l <- lhs; r <- rhs) yield l.multiply(r).get - case "Divide" => for(l <- lhs; r <- rhs) yield l.divide(r).get - case "Remainder" => for(l <- lhs; r <- rhs) yield l.mod(r).get - case "Equals" => for(l <- lhs; r <- rhs) yield l.equalsType(r).get - case "NotEquals" => for(l <- lhs; r <- rhs) yield l.notEquals(r).get - case "LessThan" => for(l <- lhs; r <- rhs) yield l.lessThan(r).get - case "LessThanOrEqual" => for(l <- lhs; r <- rhs) yield l.lessThanOrEqual(r).get - case "GreaterThan" => for(l <- lhs; r <- rhs) yield l.greaterThan(r).get - case "GreaterThanOrEqual" => for(l <- lhs; r <- rhs) yield l.greaterThanOrEqual(r).get - case "LogicalOr" => for(l <- lhs; r <- rhs) yield l.or(r).get - case "LogicalAnd" => for(l <- lhs; r <- rhs) yield l.and(r).get + case "Add" => for (l <- lhs; r <- rhs) yield l.add(r).get + case "Subtract" => for (l <- lhs; r <- rhs) yield l.subtract(r).get + case "Multiply" => for (l <- lhs; r <- rhs) yield l.multiply(r).get + case "Divide" => for (l <- lhs; r <- rhs) yield l.divide(r).get + case "Remainder" => for (l <- lhs; r <- rhs) yield l.mod(r).get + case "Equals" => for (l <- lhs; r <- rhs) yield l.equalsType(r).get + case "NotEquals" => for (l <- lhs; r <- rhs) yield l.notEquals(r).get + case "LessThan" => for (l <- lhs; r <- rhs) yield l.lessThan(r).get + case "LessThanOrEqual" => for (l <- lhs; r <- rhs) yield l.lessThanOrEqual(r).get + case "GreaterThan" => for (l <- lhs; r <- rhs) yield l.greaterThan(r).get + case "GreaterThanOrEqual" => for (l <- lhs; r <- rhs) yield l.greaterThanOrEqual(r).get + case "LogicalOr" => for (l <- lhs; r <- rhs) yield l.or(r).get + case "LogicalAnd" => for (l <- lhs; r <- rhs) yield l.and(r).get case _ => Failure(new WomExpressionException(s"Invalid operator: ${a.getName}")) } case a: Ast if a.isUnaryOperator => val expression = evaluate(a.getAttribute("expression")) a.getName match { - case "LogicalNot" => for(e <- expression) yield e.not.get - case "UnaryPlus" => for(e <- expression) yield e.unaryPlus.get - case "UnaryNegation" => for(e <- expression) yield e.unaryMinus.get + case "LogicalNot" => for (e <- expression) yield e.not.get + case "UnaryPlus" => for (e <- expression) yield e.unaryPlus.get + case "UnaryNegation" => for (e <- expression) yield e.unaryMinus.get case _ => Failure(new WomExpressionException(s"Invalid operator: ${a.getName}")) } case TernaryIf(condition, ifTrue, ifFalse) => evaluate(condition) flatMap { - case WomBooleanType => for { - ifTrueType <- evaluate(ifTrue) - ifFalseType <- evaluate(ifFalse) - } yield WomType.lowestCommonSubtype(Seq(ifTrueType, ifFalseType)) + case WomBooleanType => + for { + ifTrueType <- evaluate(ifTrue) + ifFalseType <- evaluate(ifFalse) + } yield WomType.lowestCommonSubtype(Seq(ifTrueType, ifFalseType)) case _ => Failure(new WomExpressionException("The condition of a ternary 'if' must be a Boolean.")) } case a: Ast if a.isArrayLiteral => @@ -71,14 +75,14 @@ case class TypeEvaluator(override val lookup: String => WomType, override val fu key -> value } - val flattenedTries = evaluatedMap flatMap { case (k,v) => Seq(k,v) } - flattenedTries partition {_.isSuccess} match { + val flattenedTries = evaluatedMap flatMap { case (k, v) => Seq(k, v) } + flattenedTries partition { _.isSuccess } match { case (_, failures) if failures.nonEmpty => val message = failures.collect { case f: Failure[_] => f.exception.getMessage }.mkString("\n") Failure(new WomExpressionException(s"Could not evaluate expression:\n$message")) case good @ _ => - val keyType = WomType.homogeneousTypeFromTypes(evaluatedMap map { case (k, _) => k.get} ) - val valueType = WomType.homogeneousTypeFromTypes(evaluatedMap map { case (_, v) => v.get} ) + val keyType = WomType.homogeneousTypeFromTypes(evaluatedMap map { case (k, _) => k.get }) + val valueType = WomType.homogeneousTypeFromTypes(evaluatedMap map { case (_, v) => v.get }) Success(WomMapType(keyType, valueType)) } case a: Ast if a.isMemberAccess => @@ -102,10 +106,12 @@ case class TypeEvaluator(override val lookup: String => WomType, override val fu case "right" => Success(rightType) } case WomObjectType => Success(WomAnyType) - case ns: WdlNamespace => Success(lookup(ns.importedAs.map{ n => s"$n.${rhs.getSourceString}" }.getOrElse(rhs.getSourceString))) - case _ => Failure(new WomExpressionException("Left-hand side of expression must be a WdlObject or Namespace")) - } recoverWith { - case _ => Try(lookup(a.getAttribute("lhs").sourceString + "." + rhs.sourceString)) + case ns: WdlNamespace => + Success(lookup(ns.importedAs.map(n => s"$n.${rhs.getSourceString}").getOrElse(rhs.getSourceString))) + case _ => + Failure(new WomExpressionException("Left-hand side of expression must be a WdlObject or Namespace")) + } recoverWith { case _ => + Try(lookup(a.getAttribute("lhs").sourceString + "." + rhs.sourceString)) } case _ => Failure(new WomExpressionException("Right-hand side of expression must be identifier")) } @@ -113,7 +119,12 @@ case class TypeEvaluator(override val lookup: String => WomType, override val fu (evaluate(a.getAttribute("lhs")), evaluate(a.getAttribute("rhs"))) match { case (Success(a: WomArrayType), Success(WomIntegerType)) => Success(a.memberType) case (Success(m: WomMapType), Success(_: WomType)) => Success(m.valueType) - case (Success(otherLhs), Success(_)) => Failure(new WomExpressionException(s"Invalid indexing target. You cannot index a value of type '${otherLhs.stableName}'")) + case (Success(otherLhs), Success(_)) => + Failure( + new WomExpressionException( + s"Invalid indexing target. You cannot index a value of type '${otherLhs.stableName}'" + ) + ) case (f: Failure[_], _) => f case (_, f: Failure[_]) => f } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/ValueEvaluator.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/ValueEvaluator.scala index e57ead11abb..c60cb511b56 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/ValueEvaluator.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/ValueEvaluator.scala @@ -16,13 +16,15 @@ object ValueEvaluator { val InterpolationTagPattern = "\\$\\{\\s*([^\\}]*)\\s*\\}".r } -case class ValueEvaluator(override val lookup: String => WomValue, override val functions: WdlFunctions[WomValue]) extends Evaluator { +case class ValueEvaluator(override val lookup: String => WomValue, override val functions: WdlFunctions[WomValue]) + extends Evaluator { override type T = WomValue private val InterpolationTagPattern = "\\$\\{\\s*([^\\}]*)\\s*\\}".r - - private def interpolate(strToProcess: String, resultSoFar: Try[WomString] = Success(WomString(""))): Try[WomString] = { + private def interpolate(strToProcess: String, + resultSoFar: Try[WomString] = Success(WomString("")) + ): Try[WomString] = { def evaluateTag(tag: String): Try[WomString] = { val expr = WdlExpression.fromString(tag.substring(2, tag.length - 1)) @@ -57,40 +59,47 @@ case class ValueEvaluator(override val lookup: String => WomValue, override val val lhs = evaluate(a.getAttribute("lhs")) val rhs = () => evaluate(a.getAttribute("rhs")) a.getName match { - case "Add" => for(l <- lhs; r <- rhs()) yield l.add(r).get - case "Subtract" => for(l <- lhs; r <- rhs()) yield l.subtract(r).get - case "Multiply" => for(l <- lhs; r <- rhs()) yield l.multiply(r).get - case "Divide" => for(l <- lhs; r <- rhs()) yield l.divide(r).get - case "Remainder" => for(l <- lhs; r <- rhs()) yield l.mod(r).get - case "Equals" => for(l <- lhs; r <- rhs()) yield l.equals(r).get - case "NotEquals" => for(l <- lhs; r <- rhs()) yield l.notEquals(r).get - case "LessThan" => for(l <- lhs; r <- rhs()) yield l.lessThan(r).get - case "LessThanOrEqual" => for(l <- lhs; r <- rhs()) yield l.lessThanOrEqual(r).get - case "GreaterThan" => for(l <- lhs; r <- rhs()) yield l.greaterThan(r).get - case "GreaterThanOrEqual" => for(l <- lhs; r <- rhs()) yield l.greaterThanOrEqual(r).get - case "LogicalOr" => lhs flatMap { - case WomBoolean(true) => Success(WomBoolean(true)) - case b => rhs() flatMap b.or - } - case "LogicalAnd" => lhs flatMap { - case WomBoolean(false) => Success(WomBoolean(false)) - case b => rhs() flatMap b.and - } + case "Add" => for (l <- lhs; r <- rhs()) yield l.add(r).get + case "Subtract" => for (l <- lhs; r <- rhs()) yield l.subtract(r).get + case "Multiply" => for (l <- lhs; r <- rhs()) yield l.multiply(r).get + case "Divide" => for (l <- lhs; r <- rhs()) yield l.divide(r).get + case "Remainder" => for (l <- lhs; r <- rhs()) yield l.mod(r).get + case "Equals" => for (l <- lhs; r <- rhs()) yield l.equals(r).get + case "NotEquals" => for (l <- lhs; r <- rhs()) yield l.notEquals(r).get + case "LessThan" => for (l <- lhs; r <- rhs()) yield l.lessThan(r).get + case "LessThanOrEqual" => for (l <- lhs; r <- rhs()) yield l.lessThanOrEqual(r).get + case "GreaterThan" => for (l <- lhs; r <- rhs()) yield l.greaterThan(r).get + case "GreaterThanOrEqual" => for (l <- lhs; r <- rhs()) yield l.greaterThanOrEqual(r).get + case "LogicalOr" => + lhs flatMap { + case WomBoolean(true) => Success(WomBoolean(true)) + case b => rhs() flatMap b.or + } + case "LogicalAnd" => + lhs flatMap { + case WomBoolean(false) => Success(WomBoolean(false)) + case b => rhs() flatMap b.and + } case _ => Failure(new WomExpressionException(s"Invalid operator: ${a.getName}")) } case a: Ast if a.isUnaryOperator => val expression = evaluate(a.getAttribute("expression")) a.getName match { - case "LogicalNot" => for(e <- expression) yield e.not.get - case "UnaryPlus" => for(e <- expression) yield e.unaryPlus.get - case "UnaryNegation" => for(e <- expression) yield e.unaryMinus.get + case "LogicalNot" => for (e <- expression) yield e.not.get + case "UnaryPlus" => for (e <- expression) yield e.unaryPlus.get + case "UnaryNegation" => for (e <- expression) yield e.unaryMinus.get case _ => Failure(new WomExpressionException(s"Invalid operator: ${a.getName}")) } case TernaryIf(condition, ifTrue, ifFalse) => evaluate(condition) flatMap { case WomBoolean(true) => evaluate(ifTrue) case WomBoolean(false) => evaluate(ifFalse) - case other => Failure(new WomExpressionException("'if' expression must be given a boolean argument but got: " + other.toWomString)) + case other => + Failure( + new WomExpressionException( + "'if' expression must be given a boolean argument but got: " + other.toWomString + ) + ) } case a: Ast if a.isArrayLiteral => val evaluatedElements = a.getAttribute("values").astListAsVector map evaluate @@ -134,56 +143,66 @@ case class ValueEvaluator(override val lookup: String => WomValue, override val } case a: Ast if a.isMemberAccess => a.getAttribute("rhs") match { - case rhs:Terminal if rhs.getTerminalStr == "identifier" => + case rhs: Terminal if rhs.getTerminalStr == "identifier" => val memberAccessAsString = s"${a.getAttribute("lhs").sourceString}.${a.getAttribute("rhs").sourceString}" - Try(lookup(memberAccessAsString)).recoverWith { - case _ => - evaluate(a.getAttribute("lhs")).flatMap { - case o: WomObjectLike => - o.values.get(rhs.getSourceString) match { - case Some(v:WomValue) => Success(v) - case None => - o match { - // o is a CallOutputsObject which means we failed to find an output value for rhs - // Give a specific error message based on the type of Callable - case callOutputObject: WdlCallOutputsObject => - callOutputObject.call match { - case workflowCall: WdlWorkflowCall => - Failure(new WomExpressionException( + Try(lookup(memberAccessAsString)).recoverWith { case _ => + evaluate(a.getAttribute("lhs")).flatMap { + case o: WomObjectLike => + o.values.get(rhs.getSourceString) match { + case Some(v: WomValue) => Success(v) + case None => + o match { + // o is a CallOutputsObject which means we failed to find an output value for rhs + // Give a specific error message based on the type of Callable + case callOutputObject: WdlCallOutputsObject => + callOutputObject.call match { + case workflowCall: WdlWorkflowCall => + Failure( + new WomExpressionException( s"""${rhs.getSourceString} is not declared as an output of the sub workflow ${workflowCall.calledWorkflow.fullyQualifiedName}. |If you want to use workflow ${workflowCall.calledWorkflow.fullyQualifiedName} as a sub workflow, make sure that its output section is up to date with the latest syntax. |See the WDL specification for how to write outputs: https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#outputs""".stripMargin - )) - case taskCall: WdlTaskCall => - Failure(new WomExpressionException( + ) + ) + case taskCall: WdlTaskCall => + Failure( + new WomExpressionException( s"""${rhs.getSourceString} is not declared as an output of the task ${taskCall.task.fullyQualifiedName}. |Make sure to declare it as an output to be able to use it in the workflow.""".stripMargin - )) - case unknownCall => - Failure(new WomExpressionException( + ) + ) + case unknownCall => + Failure( + new WomExpressionException( s"Could not find key ${rhs.getSourceString} in Call ${unknownCall.fullyQualifiedName} of unknown type." - )) - } - case _ => Failure(new WomExpressionException(s"Could not find key ${rhs.getSourceString} in WdlObject")) - } - } - case array: WomArray if array.womType == WomArrayType(WomObjectType) => - /* - * This case is for slicing an Array[Object], used mainly for scatter-gather. - * For example, if 'call foo' was in a scatter block, foo's outputs (e.g. Int x) - * would be an Array[Int]. If a downstream call has an input expression "foo.x", - * then 'foo' would evaluate to an Array[Objects] and foo.x would result in an - * Array[Int] - */ - Success(array map {_.asInstanceOf[WomObject].values.get(rhs.sourceString).get}) - case p: WomPair => - val identifier = rhs.getSourceString - if (identifier.equals("left")) Success(p.left) - else if (identifier.equals("right")) Success(p.right) - else Failure(new WomExpressionException("A pair only has the members: 'left' and 'right'")) - case ns: WdlNamespace => Success(lookup(ns.importedAs.map{ n => s"$n.${rhs.getSourceString}" }.getOrElse(rhs.getSourceString))) - case _ => Failure(new WomExpressionException("Left-hand side of expression must be a WdlObject or Namespace")) - } + ) + ) + } + case _ => + Failure(new WomExpressionException(s"Could not find key ${rhs.getSourceString} in WdlObject")) + } + } + case array: WomArray if array.womType == WomArrayType(WomObjectType) => + /* + * This case is for slicing an Array[Object], used mainly for scatter-gather. + * For example, if 'call foo' was in a scatter block, foo's outputs (e.g. Int x) + * would be an Array[Int]. If a downstream call has an input expression "foo.x", + * then 'foo' would evaluate to an Array[Objects] and foo.x would result in an + * Array[Int] + */ + Success(array map { _.asInstanceOf[WomObject].values.get(rhs.sourceString).get }) + case p: WomPair => + val identifier = rhs.getSourceString + if (identifier.equals("left")) Success(p.left) + else if (identifier.equals("right")) Success(p.right) + else Failure(new WomExpressionException("A pair only has the members: 'left' and 'right'")) + case ns: WdlNamespace => + Success( + lookup(ns.importedAs.map(n => s"$n.${rhs.getSourceString}").getOrElse(rhs.getSourceString)) + ) + case _ => + Failure(new WomExpressionException("Left-hand side of expression must be a WdlObject or Namespace")) + } } case _ => Failure(new WomExpressionException("Right-hand side of expression must be identifier")) } @@ -193,8 +212,13 @@ case class ValueEvaluator(override val lookup: String => WomValue, override val (mapOrArray, index) match { case (Success(a: WomArray), Success(i: WomInteger)) => Try(a.value(i.value)) match { - case s:Success[WomValue] => s - case Failure(ex) => Failure(new WomExpressionException(s"Failed to find index $index on array:\n\n$mapOrArray\n\n${ex.getMessage}")) + case s: Success[WomValue] => s + case Failure(ex) => + Failure( + new WomExpressionException( + s"Failed to find index $index on array:\n\n$mapOrArray\n\n${ex.getMessage}" + ) + ) } case (Success(m: WomMap), Success(v: WomValue)) => m.value.get(v) match { @@ -212,4 +236,3 @@ case class ValueEvaluator(override val lookup: String => WomValue, override val } } } - diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlFunctions.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlFunctions.scala index 087210de953..cd00d6fd754 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlFunctions.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlFunctions.scala @@ -15,10 +15,10 @@ trait WdlFunctions[T] { * Extract a single `WomValue` from the specified `Seq`, returning `Failure` if the parameters * represent something other than a single `WomValue`. */ - def extractSingleArgument(functionName: String, params: Seq[Try[T]]): Try[T] = { - if (params.length != 1) Failure(new UnsupportedOperationException(s"Expected one argument for $functionName, got ${params.length}")) + def extractSingleArgument(functionName: String, params: Seq[Try[T]]): Try[T] = + if (params.length != 1) + Failure(new UnsupportedOperationException(s"Expected one argument for $functionName, got ${params.length}")) else params.head - } /* * Below are methods that can be overridden, if necessary, by engine implementations of the standard library @@ -27,5 +27,7 @@ trait WdlFunctions[T] { /** * Path where to write files created by standard functions (write_*). */ - def tempFilePath: String = throw new UnsupportedOperationException("write_* functions are not supported by this implementation") + def tempFilePath: String = throw new UnsupportedOperationException( + "write_* functions are not supported by this implementation" + ) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala index 5cd98ad7a61..0a9d6480174 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala @@ -34,75 +34,97 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { protected def fileSizeLimitationConfig: FileSizeLimitationConfig - private def writeContent(baseName: String, content: String): Try[WomFile] = writeFile(s"${baseName}_${content.md5Sum}.tmp", content) + private def writeContent(baseName: String, content: String): Try[WomFile] = + writeFile(s"${baseName}_${content.md5Sum}.tmp", content) - private def writeToTsv[A <: WomValue with TsvSerializable](functionName: String, params: Seq[Try[WomValue]], defaultIfOptionalEmpty: A): Try[WomFile] = { + private def writeToTsv[A <: WomValue with TsvSerializable](functionName: String, + params: Seq[Try[WomValue]], + defaultIfOptionalEmpty: A + ): Try[WomFile] = for { singleArgument <- extractSingleArgument(functionName, params) serialized <- ValueEvaluation.serializeWomValue(functionName, singleArgument, defaultIfOptionalEmpty) file <- writeContent(functionName, serialized) } yield file - } - def read_objects(params: Seq[Try[WomValue]]): Try[WomArray] = extractObjects("read_objects", params) map { a => WomArray(WomArrayType(WomObjectType), a.toIndexedSeq) } - def read_string(params: Seq[Try[WomValue]]): Try[WomString] = readContentsFromSingleFileParameter("read_string", params, fileSizeLimitationConfig.readStringLimit).map(s => WomString(s.trim)) - def read_json(params: Seq[Try[WomValue]]): Try[WomValue] = readContentsFromSingleFileParameter("read_json", params, fileSizeLimitationConfig.readJsonLimit).map(_.parseJson).flatMap(WomObjectType.coerceRawValue) - def read_int(params: Seq[Try[WomValue]]): Try[WomInteger] = readContentsFromSingleFileParameter("read_int", params, fileSizeLimitationConfig.readIntLimit).map(s => WomString(s.trim)) map { s => WomInteger(s.value.trim.toInt) } - def read_float(params: Seq[Try[WomValue]]): Try[WomFloat] = readContentsFromSingleFileParameter("read_float", params, fileSizeLimitationConfig.readFloatLimit).map(s => WomString(s.trim)) map { s => WomFloat(s.value.trim.toDouble) } - - def write_lines(params: Seq[Try[WomValue]]): Try[WomFile] = writeToTsv("write_lines", params, WomArray(WomArrayType(WomStringType), List.empty[WomValue])) - def write_map(params: Seq[Try[WomValue]]): Try[WomFile] = writeToTsv("write_map", params, WomMap(WomMapType(WomStringType, WomStringType), Map.empty[WomValue, WomValue])) - def write_object(params: Seq[Try[WomValue]]): Try[WomFile] = writeToTsv("write_object", params, WomObject(Map.empty[String, WomValue])) - def write_objects(params: Seq[Try[WomValue]]): Try[WomFile] = writeToTsv("write_objects", params, WomArray(WomArrayType(WomObjectType), List.empty[WomObject])) - def write_tsv(params: Seq[Try[WomValue]]): Try[WomFile] = writeToTsv("write_tsv", params, WomArray(WomArrayType(WomStringType), List.empty[WomValue])) + def read_objects(params: Seq[Try[WomValue]]): Try[WomArray] = extractObjects("read_objects", params) map { a => + WomArray(WomArrayType(WomObjectType), a.toIndexedSeq) + } + def read_string(params: Seq[Try[WomValue]]): Try[WomString] = + readContentsFromSingleFileParameter("read_string", params, fileSizeLimitationConfig.readStringLimit).map(s => + WomString(s.trim) + ) + def read_json(params: Seq[Try[WomValue]]): Try[WomValue] = readContentsFromSingleFileParameter( + "read_json", + params, + fileSizeLimitationConfig.readJsonLimit + ).map(_.parseJson).flatMap(WomObjectType.coerceRawValue) + def read_int(params: Seq[Try[WomValue]]): Try[WomInteger] = + readContentsFromSingleFileParameter("read_int", params, fileSizeLimitationConfig.readIntLimit).map(s => + WomString(s.trim) + ) map { s => WomInteger(s.value.trim.toInt) } + def read_float(params: Seq[Try[WomValue]]): Try[WomFloat] = + readContentsFromSingleFileParameter("read_float", params, fileSizeLimitationConfig.readFloatLimit).map(s => + WomString(s.trim) + ) map { s => WomFloat(s.value.trim.toDouble) } + + def write_lines(params: Seq[Try[WomValue]]): Try[WomFile] = + writeToTsv("write_lines", params, WomArray(WomArrayType(WomStringType), List.empty[WomValue])) + def write_map(params: Seq[Try[WomValue]]): Try[WomFile] = + writeToTsv("write_map", params, WomMap(WomMapType(WomStringType, WomStringType), Map.empty[WomValue, WomValue])) + def write_object(params: Seq[Try[WomValue]]): Try[WomFile] = + writeToTsv("write_object", params, WomObject(Map.empty[String, WomValue])) + def write_objects(params: Seq[Try[WomValue]]): Try[WomFile] = + writeToTsv("write_objects", params, WomArray(WomArrayType(WomObjectType), List.empty[WomObject])) + def write_tsv(params: Seq[Try[WomValue]]): Try[WomFile] = + writeToTsv("write_tsv", params, WomArray(WomArrayType(WomStringType), List.empty[WomValue])) def write_json(params: Seq[Try[WomValue]]): Try[WomFile] = for { value <- extractSingleArgument("write_json", params) jsonContent = ValueEvaluation.valueToJson(value) written <- writeContent("write_json", jsonContent.compactPrint) } yield written - def read_lines(params: Seq[Try[WomValue]]): Try[WomArray] = { + def read_lines(params: Seq[Try[WomValue]]): Try[WomArray] = for { contents <- readContentsFromSingleFileParameter("read_lines", params, fileSizeLimitationConfig.readLinesLimit) lines = contents.split("\n") } yield WomArray(WomArrayType(WomStringType), lines.toIndexedSeq map WomString) - } - def read_map(params: Seq[Try[WomValue]]): Try[WomMap] = { + def read_map(params: Seq[Try[WomValue]]): Try[WomMap] = for { contents <- readContentsFromSingleFileParameter("read_map", params, fileSizeLimitationConfig.readMapLimit) wdlMap <- WomMap.fromTsv(contents, WomMapType(WomAnyType, WomAnyType)) } yield wdlMap - } - def read_object(params: Seq[Try[WomValue]]): Try[WomObject] = { + def read_object(params: Seq[Try[WomValue]]): Try[WomObject] = extractObjects("read_object", params) map { case array if array.length == 1 => array.head - case _ => throw new IllegalArgumentException("read_object yields an Object and thus can only read 2-rows TSV files. Try using read_objects instead.") + case _ => + throw new IllegalArgumentException( + "read_object yields an Object and thus can only read 2-rows TSV files. Try using read_objects instead." + ) } - } - def read_tsv(params: Seq[Try[WomValue]]): Try[WomArray] = { + def read_tsv(params: Seq[Try[WomValue]]): Try[WomArray] = for { contents <- readContentsFromSingleFileParameter("read_tsv", params, fileSizeLimitationConfig.readTsvLimit) wdlArray = WomArray.fromTsv(contents) } yield wdlArray - } - def read_boolean(params: Seq[Try[WomValue]]): Try[WomBoolean] = { - readContentsFromSingleFileParameter("read_boolean", params, fileSizeLimitationConfig.readBoolLimit).map(s => WomString(s.trim)) map { s => WomBoolean(java.lang.Boolean.parseBoolean(s.value.trim.toLowerCase)) } - } + def read_boolean(params: Seq[Try[WomValue]]): Try[WomBoolean] = + readContentsFromSingleFileParameter("read_boolean", params, fileSizeLimitationConfig.readBoolLimit).map(s => + WomString(s.trim) + ) map { s => WomBoolean(java.lang.Boolean.parseBoolean(s.value.trim.toLowerCase)) } def globHelper(pattern: String): Seq[String] - final def glob(params: Seq[Try[WomValue]]): Try[WomArray] = { + final def glob(params: Seq[Try[WomValue]]): Try[WomArray] = for { pattern <- extractSingleArgument("glob", params) womString <- WomStringType.coerceRawValue(pattern) patternString = womString.valueString filePaths <- Try(globHelper(patternString)) } yield WomArray(WomArrayType(WomSingleFileType), filePaths.map(WomSingleFile)) - } def basename(params: Seq[Try[WomValue]]): Try[WomString] = { @@ -113,8 +135,8 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { val message = s"Bad number of arguments to basename(filename, suffixToStrip = ''): ${params.size}" Failure(new IllegalArgumentException(message)) case _ => - val failures = params collect { - case Failure(e) => e + val failures = params collect { case Failure(e) => + e } Failure(AggregatedException("Failures evaluating basename parameters", failures)) } @@ -128,34 +150,51 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { } yield WomString(suffixless) } - def floor(params: Seq[Try[WomValue]]): Try[WomInteger] = { - extractSingleArgument("floor", params) flatMap { f => WomFloatType.coerceRawValue(f) } map { f => WomInteger(Math.floor(f.asInstanceOf[WomFloat].value).toInt) } - } + def floor(params: Seq[Try[WomValue]]): Try[WomInteger] = + extractSingleArgument("floor", params) flatMap { f => WomFloatType.coerceRawValue(f) } map { f => + WomInteger(Math.floor(f.asInstanceOf[WomFloat].value).toInt) + } - def round(params: Seq[Try[WomValue]]): Try[WomInteger] = { - extractSingleArgument("round", params) flatMap { f => WomFloatType.coerceRawValue(f) } map { f => WomInteger(Math.round(f.asInstanceOf[WomFloat].value).toInt) } - } + def round(params: Seq[Try[WomValue]]): Try[WomInteger] = + extractSingleArgument("round", params) flatMap { f => WomFloatType.coerceRawValue(f) } map { f => + WomInteger(Math.round(f.asInstanceOf[WomFloat].value).toInt) + } - def ceil(params: Seq[Try[WomValue]]): Try[WomInteger] = { - extractSingleArgument("ceil", params) flatMap { f => WomFloatType.coerceRawValue(f) } map { f => WomInteger(Math.ceil(f.asInstanceOf[WomFloat].value).toInt) } - } + def ceil(params: Seq[Try[WomValue]]): Try[WomInteger] = + extractSingleArgument("ceil", params) flatMap { f => WomFloatType.coerceRawValue(f) } map { f => + WomInteger(Math.ceil(f.asInstanceOf[WomFloat].value).toInt) + } - def transpose(params: Seq[Try[WomValue]]): Try[WomArray] = { + def transpose(params: Seq[Try[WomValue]]): Try[WomArray] = params.size match { case 1 => params.head flatMap EngineFunctions.transpose - case n => Failure(new IllegalArgumentException(s"Invalid number of parameters for engine function transpose: $n. Ensure transpose(x: Array[Array[X]]) takes exactly 1 parameters.")) + case n => + Failure( + new IllegalArgumentException( + s"Invalid number of parameters for engine function transpose: $n. Ensure transpose(x: Array[Array[X]]) takes exactly 1 parameters." + ) + ) } - } def length(params: Seq[Try[WomValue]]): Try[WomInteger] = { def extractArguments: Try[WomValue] = params.size match { case 1 => params.head - case n => Failure(new IllegalArgumentException(s"Invalid number of parameters for engine function length(): $n. length() takes exactly 1 parameter.")) + case n => + Failure( + new IllegalArgumentException( + s"Invalid number of parameters for engine function length(): $n. length() takes exactly 1 parameter." + ) + ) } def arrayLength(value: WomValue): Try[WomInteger] = value match { case WomArray(_, arrayValues) => Success(WomInteger(arrayValues.length)) - case bad => Failure(new UnsupportedOperationException(s"length() expects one parameter of type Array but got one parameter of type ${bad.womType.stableName}")) + case bad => + Failure( + new UnsupportedOperationException( + s"length() expects one parameter of type Array but got one parameter of type ${bad.womType.stableName}" + ) + ) } extractArguments flatMap arrayLength @@ -164,7 +203,12 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { def flatten(params: Seq[Try[WomValue]]): Try[WomValue] = { def getFlatValues(v: WomValue): Try[Seq[WomValue]] = v match { case WomArrayLike(WomArray(_, values)) => Success(values.toList) - case other => Failure(new IllegalArgumentException(s"Invalid argument to flatten: ${other.womType.stableName}, flatten requires an Array[Array[_]]")) + case other => + Failure( + new IllegalArgumentException( + s"Invalid argument to flatten: ${other.womType.stableName}, flatten requires an Array[Array[_]]" + ) + ) } val arg: Try[WomValue] = extractSingleArgument("flatten", params) @@ -173,7 +217,11 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { val llt: Try[Seq[Seq[WomValue]]] = TryUtil.sequence(arrayValues.map(getFlatValues)) llt.map(ll => WomArray(WomArrayType(elemType), ll.flatten)) case bad => - Failure(new UnsupportedOperationException(s"flatten() expects one parameter of type Array[Array[T]] but got one parameter of type ${bad.womType.stableName}")) + Failure( + new UnsupportedOperationException( + s"flatten() expects one parameter of type Array[Array[T]] but got one parameter of type ${bad.womType.stableName}" + ) + ) } } @@ -183,13 +231,18 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { case n => Failure(new UnsupportedOperationException(s"prefix() expects two parameters but got $n")) } - val makePrefixedString = (prefixString: WomValue, elements: WomValue) => (prefixString, elements) match { - case (WomString(p), WomArray(WomArrayType(etype), es)) if etype.isInstanceOf[WomPrimitiveType] => - val result = es map { e => WomString(p + e.valueString) } - Success(WomArray(WomArrayType(WomStringType), result)) - case (_, _) => - Failure(new UnsupportedOperationException(s"The function prefix expect arguments (String, Array[X]) where X is a primitive type, but got (${prefixString.womType.stableName}, ${elements.womType.stableName})")) - } + val makePrefixedString = (prefixString: WomValue, elements: WomValue) => + (prefixString, elements) match { + case (WomString(p), WomArray(WomArrayType(etype), es)) if etype.isInstanceOf[WomPrimitiveType] => + val result = es map { e => WomString(p + e.valueString) } + Success(WomArray(WomArrayType(WomStringType), result)) + case (_, _) => + Failure( + new UnsupportedOperationException( + s"The function prefix expect arguments (String, Array[X]) where X is a primitive type, but got (${prefixString.womType.stableName}, ${elements.womType.stableName})" + ) + ) + } extractTwoArguments flatMap makePrefixedString.tupled } @@ -197,31 +250,44 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { def range(params: Seq[Try[WomValue]]): Try[WomArray] = { def extractAndValidateArguments = params.size match { case 1 => validateArguments(params.head) - case n => Failure(new IllegalArgumentException(s"Invalid number of parameters for engine function range: $n. Ensure range(x: WdlInteger) takes exactly 1 parameters.")) + case n => + Failure( + new IllegalArgumentException( + s"Invalid number of parameters for engine function range: $n. Ensure range(x: WdlInteger) takes exactly 1 parameters." + ) + ) } def validateArguments(value: Try[WomValue]) = value match { case Success(intValue: WomValue) if WomIntegerType.isCoerceableFrom(intValue.womType) => Integer.valueOf(intValue.valueString) match { case i if i >= 0 => Success(i) - case n => Failure(new IllegalArgumentException(s"Parameter to seq must be greater than or equal to 0 (but got $n)")) + case n => + Failure(new IllegalArgumentException(s"Parameter to seq must be greater than or equal to 0 (but got $n)")) } case _ => Failure(new IllegalArgumentException(s"Invalid parameter for engine function seq: $value.")) } - extractAndValidateArguments map { intValue => WomArray(WomArrayType(WomIntegerType), (0 until intValue).map(WomInteger)) } + extractAndValidateArguments map { intValue => + WomArray(WomArrayType(WomIntegerType), (0 until intValue).map(WomInteger)) + } } def sub(params: Seq[Try[WomValue]]): Try[WomString] = { def extractArguments = params.size match { case 3 => Success((params.head, params(1), params(2))) - case n => Failure(new IllegalArgumentException(s"Invalid number of parameters for engine function sub: $n. sub takes exactly 3 parameters.")) + case n => + Failure( + new IllegalArgumentException( + s"Invalid number of parameters for engine function sub: $n. sub takes exactly 3 parameters." + ) + ) } def validateArguments(values: (Try[WomValue], Try[WomValue], Try[WomValue])) = values match { case (Success(strValue), Success(WomString(pattern)), Success(replaceValue)) - if WomStringType.isCoerceableFrom(strValue.womType) && - WomStringType.isCoerceableFrom(replaceValue.womType) => + if WomStringType.isCoerceableFrom(strValue.womType) && + WomStringType.isCoerceableFrom(replaceValue.womType) => Success((strValue.valueString, pattern, replaceValue.valueString)) case _ => Failure(new IllegalArgumentException(s"Invalid parameters for engine function sub: $values.")) } @@ -232,23 +298,40 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { } yield WomString(pattern.r.replaceAllIn(str, replace)) } - private val SelectFirstEmptyInput = Failure(new IllegalArgumentException("select_first failed. The input array was empty.")) + private val SelectFirstEmptyInput = Failure( + new IllegalArgumentException("select_first failed. The input array was empty.") + ) def select_first(params: Seq[Try[WomValue]]): Try[WomValue] = extractSingleArgument("select_first", params) flatMap { case WomArray(WomArrayType(WomOptionalType(memberType)), arrayValue) => - if (arrayValue.isEmpty) SelectFirstEmptyInput else (arrayValue collectFirst { - case WomOptionalValue(_, Some(womValue)) => womValue - case womValue if memberType.isCoerceableFrom(womValue.womType) => - memberType.coerceRawValue(womValue).get}).map(Success(_)).getOrElse(Failure(new IllegalArgumentException("select_first failed. All provided values were empty."))) - case WomArray(WomArrayType(_), arrayValue) => if (arrayValue.isEmpty) SelectFirstEmptyInput else Success(arrayValue.head) - case other => Failure(new IllegalArgumentException(s"select_first must take an array but got ${other.womType.stableName}: ${other.toWomString}")) + if (arrayValue.isEmpty) SelectFirstEmptyInput + else + (arrayValue collectFirst { + case WomOptionalValue(_, Some(womValue)) => womValue + case womValue if memberType.isCoerceableFrom(womValue.womType) => + memberType.coerceRawValue(womValue).get + }).map(Success(_)) + .getOrElse(Failure(new IllegalArgumentException("select_first failed. All provided values were empty."))) + case WomArray(WomArrayType(_), arrayValue) => + if (arrayValue.isEmpty) SelectFirstEmptyInput else Success(arrayValue.head) + case other => + Failure( + new IllegalArgumentException( + s"select_first must take an array but got ${other.womType.stableName}: ${other.toWomString}" + ) + ) } def select_all(params: Seq[Try[WomValue]]): Try[WomArray] = extractSingleArgument("select_all", params) flatMap { case WomArray(WomArrayType(WomOptionalType(memberType)), arrayValue) => - Success(WomArray(WomArrayType(memberType), arrayValue collect { - case WomOptionalValue(_, Some(womValue)) => womValue - case womValue if memberType.isCoerceableFrom(womValue.womType) => memberType.coerceRawValue(womValue).get - })) + Success( + WomArray( + WomArrayType(memberType), + arrayValue collect { + case WomOptionalValue(_, Some(womValue)) => womValue + case womValue if memberType.isCoerceableFrom(womValue.womType) => memberType.coerceRawValue(womValue).get + } + ) + ) case allValid @ WomArray(WomArrayType(_), _) => Success(allValid) case other => Failure(new IllegalArgumentException("select_all must take an array but got: " + other.toWomString)) } @@ -259,26 +342,34 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { } def zip(params: Seq[Try[WomValue]]): Try[WomArray] = { - val badArgsFailure = Failure(new IllegalArgumentException(s"Invalid parameters for engine function zip: $params. Requires exactly two evaluated array values of equal length.")) + val badArgsFailure = Failure( + new IllegalArgumentException( + s"Invalid parameters for engine function zip: $params. Requires exactly two evaluated array values of equal length." + ) + ) for { values <- extractTwoParams(params, badArgsFailure) (left, right) <- assertEquallySizedArrays(values, badArgsFailure) leftType = left.womType.memberType rightType = right.womType.memberType - zipped = left.value.zip(right.value) map { case (l,r) => WomPair(l, r) } + zipped = left.value.zip(right.value) map { case (l, r) => WomPair(l, r) } } yield WomArray(WomArrayType(WomPairType(leftType, rightType)), zipped) } def cross(params: Seq[Try[WomValue]]): Try[WomArray] = { - val badArgsFailure = Failure(new IllegalArgumentException(s"Invalid parameters for engine function cross: $params. Requires exactly two evaluated array values of equal length.")) + val badArgsFailure = Failure( + new IllegalArgumentException( + s"Invalid parameters for engine function cross: $params. Requires exactly two evaluated array values of equal length." + ) + ) for { values <- extractTwoParams(params, badArgsFailure) (left, right) <- assertArrays(values, badArgsFailure) leftType = left.womType.memberType rightType = right.womType.memberType - crossed = stdLibCrossProduct(left.value, right.value) map { case (l,r) => WomPair(l, r) } + crossed = stdLibCrossProduct(left.value, right.value) map { case (l, r) => WomPair(l, r) } } yield WomArray(WomArrayType(WomPairType(leftType, rightType)), crossed) } @@ -287,12 +378,14 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { * as a File and attempts to read the contents of that file and returns back the contents * as a String */ - private def readContentsFromSingleFileParameter(functionName: String, params: Seq[Try[WomValue]], sizeLimit: Int): Try[String] = { + private def readContentsFromSingleFileParameter(functionName: String, + params: Seq[Try[WomValue]], + sizeLimit: Int + ): Try[String] = for { singleArgument <- extractSingleArgument(functionName, params) string = readFile(singleArgument.valueString, sizeLimit) } yield string - } private def extractObjects(functionName: String, params: Seq[Try[WomValue]]): Try[Array[WomObject]] = for { contents <- readContentsFromSingleFileParameter(functionName, params, fileSizeLimitationConfig.readObjectLimit) @@ -301,18 +394,26 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { } object WdlStandardLibraryFunctions { - def fromIoFunctionSet(ioFunctionSet: IoFunctionSet, _fileSizeLimitationConfig: FileSizeLimitationConfig) = new WdlStandardLibraryFunctions { - override def readFile(path: String, sizeLimit: Int): String = Await.result(ioFunctionSet.readFile(path, Option(sizeLimit), failOnOverflow = true), Duration.Inf) + def fromIoFunctionSet(ioFunctionSet: IoFunctionSet, _fileSizeLimitationConfig: FileSizeLimitationConfig) = + new WdlStandardLibraryFunctions { + override def readFile(path: String, sizeLimit: Int): String = + Await.result(ioFunctionSet.readFile(path, Option(sizeLimit), failOnOverflow = true), Duration.Inf) - override def writeFile(path: String, content: String): Try[WomFile] = Try(Await.result(ioFunctionSet.writeFile(path, content), Duration.Inf)) + override def writeFile(path: String, content: String): Try[WomFile] = Try( + Await.result(ioFunctionSet.writeFile(path, content), Duration.Inf) + ) - override def stdout(params: Seq[Try[WomValue]]): Try[WomFile] = Success(WomSingleFile(ioFunctionSet.pathFunctions.stdout)) + override def stdout(params: Seq[Try[WomValue]]): Try[WomFile] = Success( + WomSingleFile(ioFunctionSet.pathFunctions.stdout) + ) - override def stderr(params: Seq[Try[WomValue]]): Try[WomFile] = Success(WomSingleFile(ioFunctionSet.pathFunctions.stderr)) + override def stderr(params: Seq[Try[WomValue]]): Try[WomFile] = Success( + WomSingleFile(ioFunctionSet.pathFunctions.stderr) + ) - override def globHelper(pattern: String): Seq[String] = Await.result(ioFunctionSet.glob(pattern), Duration.Inf) + override def globHelper(pattern: String): Seq[String] = Await.result(ioFunctionSet.glob(pattern), Duration.Inf) - override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = { + override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = { // Inner function: get the memory unit from the second (optional) parameter def toUnit(womValue: Try[WomValue]) = womValue flatMap { unit => Try(MemoryUnit.fromSuffix(unit.valueString)) } @@ -325,68 +426,98 @@ object WdlStandardLibraryFunctions { // Inner function: Get the file size, allowing for unpacking of optionals def optionalSafeFileSize(value: WomValue): Try[Long] = value match { - case f if f.isInstanceOf[WomSingleFile] || WomSingleFileType.isCoerceableFrom(f.womType) => Try(Await.result(ioFunctionSet.size(f.valueString), Duration.Inf)) + case f if f.isInstanceOf[WomSingleFile] || WomSingleFileType.isCoerceableFrom(f.womType) => + Try(Await.result(ioFunctionSet.size(f.valueString), Duration.Inf)) case WomOptionalValue(_, Some(o)) => optionalSafeFileSize(o) case WomOptionalValue(f, None) if isOptionalOfFileType(f) => Success(0L) - case _ => Failure(new Exception(s"The 'size' method expects a 'File' or 'File?' argument but instead got ${value.womType.stableName}.")) + case _ => + Failure( + new Exception( + s"The 'size' method expects a 'File' or 'File?' argument but instead got ${value.womType.stableName}." + ) + ) } // Inner function: get the file size and convert into the requested memory unit - def fileSize(womValue: Try[WomValue], convertTo: Try[MemoryUnit] = Success(MemoryUnit.Bytes)): Try[Double] = { + def fileSize(womValue: Try[WomValue], convertTo: Try[MemoryUnit] = Success(MemoryUnit.Bytes)): Try[Double] = for { value <- womValue unit <- convertTo fileSize <- optionalSafeFileSize(value) } yield MemorySize(fileSize.toDouble, MemoryUnit.Bytes).to(unit).amount - } params match { case _ if params.length == 1 => fileSize(params.head) map WomFloat.apply case _ if params.length == 2 => fileSize(params.head, toUnit(params.tail.head)) map WomFloat.apply - case _ => Failure(new UnsupportedOperationException(s"Expected one or two parameters but got ${params.length} instead.")) + case _ => + Failure( + new UnsupportedOperationException(s"Expected one or two parameters but got ${params.length} instead.") + ) } - } + } - override protected def fileSizeLimitationConfig: FileSizeLimitationConfig = _fileSizeLimitationConfig - } + override protected def fileSizeLimitationConfig: FileSizeLimitationConfig = _fileSizeLimitationConfig + } def crossProduct[A, B](as: Seq[A], bs: Seq[B]): Seq[(A, B)] = for { a <- as b <- bs } yield (a, b) - def extractTwoParams[A](params: Seq[Try[A]], badArgsFailure: Failure[Nothing]): Try[(A, A)] = { + def extractTwoParams[A](params: Seq[Try[A]], badArgsFailure: Failure[Nothing]): Try[(A, A)] = if (params.lengthCompare(2) != 0) { badArgsFailure } - else for { - left <- params.head - right <- params(1) - } yield (left, right) - } - - def assertEquallySizedArrays[A](values: (WomValue, WomValue), badArgsFailure: Failure[Nothing] ): Try[(WomArray, WomArray)] = values match { - case (leftArray: WomArray, rightArray: WomArray) if leftArray.value.lengthCompare(rightArray.value.size) == 0 => Success((leftArray, rightArray)) + else + for { + left <- params.head + right <- params(1) + } yield (left, right) + + def assertEquallySizedArrays[A](values: (WomValue, WomValue), + badArgsFailure: Failure[Nothing] + ): Try[(WomArray, WomArray)] = values match { + case (leftArray: WomArray, rightArray: WomArray) if leftArray.value.lengthCompare(rightArray.value.size) == 0 => + Success((leftArray, rightArray)) case _ => badArgsFailure } - def assertArrays(values: (WomValue, WomValue), badArgsFailure: Failure[Nothing] ): Try[(WomArray, WomArray)] = values match { - case (leftArray: WomArray, rightArray: WomArray) => Success((leftArray, rightArray)) - case _ => badArgsFailure - } + def assertArrays(values: (WomValue, WomValue), badArgsFailure: Failure[Nothing]): Try[(WomArray, WomArray)] = + values match { + case (leftArray: WomArray, rightArray: WomArray) => Success((leftArray, rightArray)) + case _ => badArgsFailure + } } trait PureStandardLibraryFunctionsLike extends WdlStandardLibraryFunctions { def className = this.getClass.getCanonicalName - override def readFile(path: String, sizeLimit: Int): String = throw new UnsupportedOperationException(s"readFile not available in $className.") - override def writeFile(path: String, content: String): Try[WomFile] = throw new UnsupportedOperationException(s"writeFile not available in $className.") - override def read_json(params: Seq[Try[WomValue]]): Try[WomValue] = Failure(new UnsupportedOperationException(s"read_json not available in $className.")) - override def write_json(params: Seq[Try[WomValue]]): Try[WomFile] = Failure(new UnsupportedOperationException(s"write_json not available in $className.")) - override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = Failure(new UnsupportedOperationException(s"size not available in $className.")) - override def write_tsv(params: Seq[Try[WomValue]]): Try[WomFile] = Failure(new UnsupportedOperationException(s"write_tsv not available in $className.")) - override def stdout(params: Seq[Try[WomValue]]): Try[WomFile] = Failure(new UnsupportedOperationException(s"stdout not available in $className.")) - override def globHelper(pattern: String): Seq[String] = throw new UnsupportedOperationException(s"glob not available in $className.") - override def stderr(params: Seq[Try[WomValue]]): Try[WomFile] = Failure(new UnsupportedOperationException(s"stderr not available in $className.")) + override def readFile(path: String, sizeLimit: Int): String = throw new UnsupportedOperationException( + s"readFile not available in $className." + ) + override def writeFile(path: String, content: String): Try[WomFile] = throw new UnsupportedOperationException( + s"writeFile not available in $className." + ) + override def read_json(params: Seq[Try[WomValue]]): Try[WomValue] = Failure( + new UnsupportedOperationException(s"read_json not available in $className.") + ) + override def write_json(params: Seq[Try[WomValue]]): Try[WomFile] = Failure( + new UnsupportedOperationException(s"write_json not available in $className.") + ) + override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = Failure( + new UnsupportedOperationException(s"size not available in $className.") + ) + override def write_tsv(params: Seq[Try[WomValue]]): Try[WomFile] = Failure( + new UnsupportedOperationException(s"write_tsv not available in $className.") + ) + override def stdout(params: Seq[Try[WomValue]]): Try[WomFile] = Failure( + new UnsupportedOperationException(s"stdout not available in $className.") + ) + override def globHelper(pattern: String): Seq[String] = throw new UnsupportedOperationException( + s"glob not available in $className." + ) + override def stderr(params: Seq[Try[WomValue]]): Try[WomFile] = Failure( + new UnsupportedOperationException(s"stderr not available in $className.") + ) override def fileSizeLimitationConfig: FileSizeLimitationConfig = FileSizeLimitationConfig.default } @@ -422,20 +553,36 @@ class WdlStandardLibraryFunctionsType extends WdlFunctions[WomType] { params.toList match { case Success(f) :: Nil if isGoodFirstSizeParam(f) => Success(WomFloatType) case Success(f) :: Success(WomStringType) :: Nil if isGoodFirstSizeParam(f) => Success(WomFloatType) - case other => Failure(new Exception(s"Unexpected arguments to function `size`. Expected 'size(file: File [, unit: String])' but got 'size(${other.map(_.map(_.stableName)).mkString(", ")})'")) + case other => + Failure( + new Exception( + s"Unexpected arguments to function `size`. Expected 'size(file: File [, unit: String])' but got 'size(${other + .map(_.map(_.stableName)) + .mkString(", ")})'" + ) + ) } } def length(params: Seq[Try[WomType]]): Try[WomType] = params.toList match { case Success(WomArrayType(_)) :: Nil => Success(WomIntegerType) case _ => val badArgs = params.mkString(", ") - Failure(new Exception(s"Unexpected arguments to function `length`. `length` takes a parameter of type Array but got: $badArgs")) + Failure( + new Exception( + s"Unexpected arguments to function `length`. `length` takes a parameter of type Array but got: $badArgs" + ) + ) } def prefix(params: Seq[Try[WomType]]): Try[WomType] = params.toList match { - case Success(WomStringType) :: Success(WomArrayType(_: WomPrimitiveType)) :: Nil => Success(WomArrayType(WomStringType)) + case Success(WomStringType) :: Success(WomArrayType(_: WomPrimitiveType)) :: Nil => + Success(WomArrayType(WomStringType)) case _ => val badArgs = params.mkString(", ") - Failure(new Exception(s"Unexpected arguments to function `prefix`. `prefix` takes parameters of type String and Array[] but got: $badArgs")) + Failure( + new Exception( + s"Unexpected arguments to function `prefix`. `prefix` takes parameters of type String and Array[] but got: $badArgs" + ) + ) } def sub(params: Seq[Try[WomType]]): Try[WomType] = Success(WomStringType) def range(params: Seq[Try[WomType]]): Try[WomType] = Success(WomArrayType(WomIntegerType)) @@ -453,7 +600,9 @@ class WdlStandardLibraryFunctionsType extends WdlFunctions[WomType] { } def basename(params: Seq[Try[WomType]]): Try[WomType] = params.toList match { case Success(fType) :: Nil if WomStringType.isCoerceableFrom(fType) => Success(WomStringType) - case Success(fType) :: Success(sType) :: Nil if WomStringType.isCoerceableFrom(fType) && WomStringType.isCoerceableFrom(sType) => Success(WomStringType) + case Success(fType) :: Success(sType) :: Nil + if WomStringType.isCoerceableFrom(fType) && WomStringType.isCoerceableFrom(sType) => + Success(WomStringType) case _ => Failure(new Exception(s"Unexpected basename arguments: $params")) } def transpose(params: Seq[Try[WomType]]): Try[WomType] = params.toList match { @@ -462,17 +611,29 @@ class WdlStandardLibraryFunctionsType extends WdlFunctions[WomType] { } def select_first(params: Seq[Try[WomType]]): Try[WomType] = extractSingleArgument("select_first", params) flatMap { case WomArrayType(WomOptionalType(innerType)) => Success(innerType) - case other => Failure(new IllegalArgumentException(s"select_first failed. It expects an array of optional values but got ${other.stableName}.")) + case other => + Failure( + new IllegalArgumentException( + s"select_first failed. It expects an array of optional values but got ${other.stableName}." + ) + ) } def select_all(params: Seq[Try[WomType]]): Try[WomType] = extractSingleArgument("select_all", params) flatMap { case WomArrayType(WomOptionalType(innerType)) => Success(WomArrayType(innerType)) - case other => Failure(new IllegalArgumentException(s"select_all failed. It expects an array of optional values but got ${other.stableName}.")) - } - def defined(params: Seq[Try[WomType]]): Try[WomType] = extractSingleArgument("defined", params).map(_ => WomBooleanType) + case other => + Failure( + new IllegalArgumentException( + s"select_all failed. It expects an array of optional values but got ${other.stableName}." + ) + ) + } + def defined(params: Seq[Try[WomType]]): Try[WomType] = + extractSingleArgument("defined", params).map(_ => WomBooleanType) def zip(params: Seq[Try[WomType]]): Try[WomType] = { val badArgsFailure = Failure(new Exception(s"Unexpected zip parameters: $params")) WdlStandardLibraryFunctions.extractTwoParams(params, badArgsFailure) flatMap { - case (arrayType1: WomArrayType, arrayType2: WomArrayType) => Success(WomArrayType(WomPairType(arrayType1.memberType, arrayType2.memberType))) + case (arrayType1: WomArrayType, arrayType2: WomArrayType) => + Success(WomArrayType(WomPairType(arrayType1.memberType, arrayType2.memberType))) case _ => badArgsFailure } } @@ -480,7 +641,8 @@ class WdlStandardLibraryFunctionsType extends WdlFunctions[WomType] { def flatten(params: Seq[Try[WomType]]): Try[WomType] = extractSingleArgument("flatten", params) flatMap { case WomArrayType(inner @ WomArrayType(_)) => Success(inner) - case otherType => Failure(new Exception(s"flatten requires an Array[Array[_]] argument but instead got ${otherType.stableName}")) + case otherType => + Failure(new Exception(s"flatten requires an Array[Array[_]] argument but instead got ${otherType.stableName}")) } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala index d96b8fee102..4a1dbcfd651 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala @@ -48,9 +48,8 @@ object HtmlSyntaxHighlighter extends SyntaxHighlighter { class SyntaxFormatter(highlighter: SyntaxHighlighter = NullSyntaxHighlighter) { val indentLevel = 2 - private def indent(s: String, i: Int): String = { - s.split("\n").map {" " * (i * indentLevel) + _}.mkString("\n") - } + private def indent(s: String, i: Int): String = + s.split("\n").map(" " * (i * indentLevel) + _).mkString("\n") def format(namespace: WdlNamespace): String = { val imports = namespace.imports.map(formatImport) match { @@ -61,16 +60,22 @@ class SyntaxFormatter(highlighter: SyntaxHighlighter = NullSyntaxHighlighter) { /* TODO/FIXME: If 'body' is really a function of `namespace` then `WdlNamespace` should have a func which does the first part, and `NamespaceWithWorkflow` override it, call super and then adds on the second part - */ + */ val namespaceDefinitions = namespace.ast.getAttribute("body").asInstanceOf[AstList].asScala.toVector - val taskDefinitions = namespaceDefinitions collect { case a: Ast if a.getName == "Task" => - formatTask(namespace.findTask(a.getAttribute("name").sourceString).getOrElse(throw new UnsupportedOperationException("Shouldn't happen"))) + val taskDefinitions = namespaceDefinitions collect { + case a: Ast if a.getName == "Task" => + formatTask( + namespace + .findTask(a.getAttribute("name").sourceString) + .getOrElse(throw new UnsupportedOperationException("Shouldn't happen")) + ) } val workflowDefinitions = namespace match { - case n: WdlNamespaceWithWorkflow => namespaceDefinitions collect {case a: Ast if a.getName == "Workflow" => formatWorkflow(n.workflow)} + case n: WdlNamespaceWithWorkflow => + namespaceDefinitions collect { case a: Ast if a.getName == "Workflow" => formatWorkflow(n.workflow) } case _ => Vector.empty[AstNode] } val definitions = taskDefinitions ++ workflowDefinitions @@ -96,39 +101,38 @@ class SyntaxFormatter(highlighter: SyntaxHighlighter = NullSyntaxHighlighter) { val parameterMeta = formatMetaSection("parameter_meta", task.parameterMeta, 1) val sections = List(declarations, command, outputs, runtime, meta, parameterMeta).filter(_.nonEmpty) val header = s"""${highlighter.keyword("task")} ${highlighter.name(task.name)} { - |${sections.mkString("\n")} - |}""" - .stripMargin + |${sections.mkString("\n")} + |}""".stripMargin header } - private def formatMetaSection(section: String, attrs: Map[String, String], level: Int): String = { + private def formatMetaSection(section: String, attrs: Map[String, String], level: Int): String = attrs match { case m: Map[String, String] if m.nonEmpty => val wdlAttrs = m map { case (k, v) => indent(s"$k: " + "\"" + v + "\"", 1) } - indent( - s"""${highlighter.keyword(section)} { - |${wdlAttrs.mkString("\n")} - |}""".stripMargin, level) + indent(s"""${highlighter.keyword(section)} { + |${wdlAttrs.mkString("\n")} + |}""".stripMargin, + level + ) case _ => "" } - } - private def formatRuntimeSection(runtimeAttributes: WdlRuntimeAttributes, level: Int): String = { + private def formatRuntimeSection(runtimeAttributes: WdlRuntimeAttributes, level: Int): String = runtimeAttributes.attrs match { case m if m.nonEmpty => val attrs = m map { case (k, v) => indent(s"$k: ${v.toWomString}", level) } - indent( - s"""${highlighter.keyword("runtime")} { - |${attrs.mkString("\n")} - |}""".stripMargin, level) + indent(s"""${highlighter.keyword("runtime")} { + |${attrs.mkString("\n")} + |}""".stripMargin, + level + ) case _ => "" } - } - private def formatCommandSection(task: WdlTask, level:Int): String = { + private def formatCommandSection(task: WdlTask, level: Int): String = { val (sdelim: String, edelim: String) = - if (task.commandTemplate.collect({case s:StringCommandPart => s.literal}).mkString.contains("}")) ("<<<", ">>>") + if (task.commandTemplate.collect { case s: StringCommandPart => s.literal }.mkString.contains("}")) ("<<<", ">>>") else ("{", "}") val section = s"""${highlighter.section("command")} $sdelim @@ -137,46 +141,48 @@ class SyntaxFormatter(highlighter: SyntaxHighlighter = NullSyntaxHighlighter) { indent(section.stripMargin, level) } - private def formatOutputs(outputs: Seq[TaskOutput], level:Int): String = { + private def formatOutputs(outputs: Seq[TaskOutput], level: Int): String = { val section = s"""${highlighter.section("output")} { |${outputs.map(formatOutput(_, 1)).mkString("\n")} |}""" indent(section.stripMargin, level) } - private def formatOutput(output: TaskOutput, level:Int): String = { - indent(s"${highlighter.womType(output.womType)} ${highlighter.variable(output.unqualifiedName)} = ${output.requiredExpression.toString(highlighter)}", level) - } + private def formatOutput(output: TaskOutput, level: Int): String = + indent(s"${highlighter.womType(output.womType)} ${highlighter + .variable(output.unqualifiedName)} = ${output.requiredExpression.toString(highlighter)}", + level + ) private def formatWorkflow(workflow: WdlWorkflow): String = { val declarations = workflow.declarations.map(formatDeclaration(_, 1)) - val children = workflow.children.collect({case c if !workflow.declarations.contains(c) => formatScope(c, 1) }) + val children = workflow.children.collect { case c if !workflow.declarations.contains(c) => formatScope(c, 1) } val outputs = formatWorkflowOutputs(workflow.workflowOutputWildcards, 1) val meta = formatMetaSection("meta", workflow.meta, 1) val parameterMeta = formatMetaSection("parameter_meta", workflow.parameterMeta, 1) val sections = (declarations ++ children ++ Seq(meta, parameterMeta, outputs)).filter(_.nonEmpty) s"""${highlighter.keyword("workflow")} ${highlighter.name(workflow.unqualifiedName)} { - |${sections.mkString("\n")} - |}""".stripMargin + |${sections.mkString("\n")} + |}""".stripMargin } - private def formatWorkflowOutputs(outputs: Seq[WorkflowOutputWildcard], level: Int): String = { + private def formatWorkflowOutputs(outputs: Seq[WorkflowOutputWildcard], level: Int): String = outputs match { case x: Seq[WorkflowOutputWildcard] if x.nonEmpty => val outputStrings = outputs.map(formatWorkflowOutput(_, 1)) indent(s"""${highlighter.keyword("output")} { |${outputStrings.mkString("\n")} - |}""".stripMargin, level) + |}""".stripMargin, + level + ) case _ => "" } - } - private def formatWorkflowOutput(output: WorkflowOutputWildcard, level: Int): String = { + private def formatWorkflowOutput(output: WorkflowOutputWildcard, level: Int): String = output.wildcard match { case true => indent(s"${formatWorkflowOutputFqn(output.fqn)}.*", level) case false => indent(formatWorkflowOutputFqn(output.fqn), level) } - } private def formatWorkflowOutputFqn(fqn: String) = fqn.replaceFirst("[a-zA-Z0-9]+\\.", "") @@ -196,24 +202,29 @@ class SyntaxFormatter(highlighter: SyntaxHighlighter = NullSyntaxHighlighter) { if (call.inputMappings.isEmpty) { indent(header, level) } else { - val inputString = call.inputMappings.map {case (k, v) => - s"$k=${v.toString(highlighter)}" - }.mkString(", ") + val inputString = call.inputMappings + .map { case (k, v) => + s"$k=${v.toString(highlighter)}" + } + .mkString(", ") indent(s"""$header { - | input: $inputString - |}""".stripMargin, level) + | input: $inputString + |}""".stripMargin, + level + ) } } private def formatScatter(scatter: Scatter, level: Int): String = { - val children = scatter.children.collect({case c if !c.isInstanceOf[Declaration] => formatScope(c, 1) }) + val children = scatter.children.collect { case c if !c.isInstanceOf[Declaration] => formatScope(c, 1) } indent( s"""${highlighter.keyword("scatter")} (${scatter.item} in ${scatter.collection.toString(highlighter)}) { - |${children.mkString("\n")} - |}""".stripMargin, level) + |${children.mkString("\n")} + |}""".stripMargin, + level + ) } - private def formatCallAlias(call: WdlTaskCall): String = { - call.alias.map {a => s" as ${highlighter.alias(a)}"}.getOrElse("") - } + private def formatCallAlias(call: WdlTaskCall): String = + call.alias.map(a => s" as ${highlighter.alias(a)}").getOrElse("") } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/package.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/package.scala index c3326d26f36..4dd52a9eabf 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/package.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/package.scala @@ -18,5 +18,6 @@ package object model { type Draft2ImportResolver = String => Draft2ResolvedImportBundle type OutputResolver = (WdlGraphNode, Option[Int]) => Try[WomValue] - val NoOutputResolver: OutputResolver = (node: WdlGraphNode, i: Option[Int]) => Failure(OutputVariableLookupException(node, i)) + val NoOutputResolver: OutputResolver = (node: WdlGraphNode, i: Option[Int]) => + Failure(OutputVariableLookupException(node, i)) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlCallOutputsObjectType.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlCallOutputsObjectType.scala index 257eca6dd59..107a68410e5 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlCallOutputsObjectType.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlCallOutputsObjectType.scala @@ -7,7 +7,7 @@ import wom.types.WomObjectTypeLike case class WdlCallOutputsObjectType(call: WdlCall) extends WomObjectTypeLike { val stableName: String = "Object" - override protected def coercion = { - case o: WdlCallOutputsObject => o + override protected def coercion = { case o: WdlCallOutputsObject => + o } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala index 4a74ef94842..fb38714109e 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala @@ -13,23 +13,25 @@ object WdlFlavoredWomType { private val parser = new WdlParser() implicit class FromString(val womType: WomType) extends AnyVal { + /** * Converts WDL source into a WomValue of this type, if possible. * * @param workflowSource source code representing the WomValue * @return The WomValue */ - //TODO: return a Try ? - def fromWorkflowSource(workflowSource: WorkflowSource): WomValue = { + // TODO: return a Try ? + def fromWorkflowSource(workflowSource: WorkflowSource): WomValue = womType match { case WomFloatType => WomFloat(workflowSource.toDouble) case WomIntegerType => WomInteger(workflowSource.toInt) case WdlExpressionType => WdlExpression.fromString(workflowSource) case WomBooleanType => WomBoolean(workflowSource.toBoolean) - case WdlNamespaceType => throw new UnsupportedOperationException // This is what the original code was doing and clearly this is right. + case WdlNamespaceType => + throw new UnsupportedOperationException // This is what the original code was doing and clearly this is right. case _ => val tokens = parser.lex(workflowSource, "string") - val terminalMap = tokens.asScala.toVector.map {(_, workflowSource)}.toMap + val terminalMap = tokens.asScala.toVector.map((_, workflowSource)).toMap val wdlSyntaxErrorFormatter = WdlSyntaxErrorFormatter(terminalMap) /* Parsing as an expression is not sufficient... only a subset of these @@ -40,15 +42,14 @@ object WdlFlavoredWomType { ast.womValue(womType, wdlSyntaxErrorFormatter) } - } } - def fromDisplayString(wdlString: String): WomType = { + def fromDisplayString(wdlString: String): WomType = wdlString match { case "Expression" => WdlExpressionType case _ => val tokens = parser.lex(wdlString, "string") - val terminalMap = tokens.asScala.toVector.map {(_, wdlString)}.toMap + val terminalMap = tokens.asScala.toVector.map((_, wdlString)).toMap val wdlSyntaxErrorFormatter = WdlSyntaxErrorFormatter(terminalMap) /* parse_type_e() is the parse function for the $type_e nonterminal in grammar.hgr */ @@ -56,5 +57,4 @@ object WdlFlavoredWomType { ast.womType(wdlSyntaxErrorFormatter) } - } } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlNamespaceType.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlNamespaceType.scala index 41431bd6248..7e83ec259e0 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlNamespaceType.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlNamespaceType.scala @@ -6,7 +6,7 @@ import wom.types.WomType case object WdlNamespaceType extends WomType { override def stableName: String = "Namespace" - override protected def coercion = { - case n: WdlNamespace => n + override protected def coercion = { case n: WdlNamespace => + n } } diff --git a/wdl/model/draft2/src/test/scala/wdl/AstSpec.scala b/wdl/model/draft2/src/test/scala/wdl/AstSpec.scala index 1800ef2669e..20ca07d83a7 100644 --- a/wdl/model/draft2/src/test/scala/wdl/AstSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/AstSpec.scala @@ -17,9 +17,11 @@ class AstSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } it should "produce AST with 3 Call nodes in the Workflow node" in { - AstTools.findAsts( - AstTools.findAsts(namespace.ast, "Workflow").head, - "Call" - ).size shouldEqual 3 + AstTools + .findAsts( + AstTools.findAsts(namespace.ast, "Workflow").head, + "Call" + ) + .size shouldEqual 3 } } diff --git a/wdl/model/draft2/src/test/scala/wdl/DeclarationSpec.scala b/wdl/model/draft2/src/test/scala/wdl/DeclarationSpec.scala index 91303d3482c..9a66f5abc9e 100644 --- a/wdl/model/draft2/src/test/scala/wdl/DeclarationSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/DeclarationSpec.scala @@ -10,7 +10,6 @@ import wom.values._ import scala.util.{Failure, Success} - class DeclarationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { lazy val wdlSource = (new SampleWdl.DeclarationsWdl).workflowSource() lazy val namespace = WdlNamespaceWithWorkflow.load(wdlSource, Seq.empty).get @@ -148,44 +147,44 @@ class DeclarationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } "A workflow" should "allow for JIT evaluation of declarations" in { - val wdl = """task t { - | String i - | command { - | echo "${i}" - | } - | output { - | String o = read_string(stdout()) - | } - |} - | - |workflow declarations_as_nodes { - | call t as t1 { input: i = "hello" } - | - | String a = t1.o + " world" - | - | call t as t2 { input: i = a } - | - | Array[String] arr = [t1.o, t2.o] - | - | Map[String, String] map = { "key": t1.o } - | - | scatter(i in arr) { - | call t as t3 { input: i = i } - | String b = i + t3.o - | call t as t4 { input: i = b } - | String c = t3.o + " " + t4.o - | } - | - | Array[String] d = c - | - | output { - | String o1 = a - | Array[String] o2 = t4.o - | Array[String] o3 = d - | Array[String] o4 = b - | Array[String] o5 = c - | } - |} + val wdl = """task t { + | String i + | command { + | echo "${i}" + | } + | output { + | String o = read_string(stdout()) + | } + |} + | + |workflow declarations_as_nodes { + | call t as t1 { input: i = "hello" } + | + | String a = t1.o + " world" + | + | call t as t2 { input: i = a } + | + | Array[String] arr = [t1.o, t2.o] + | + | Map[String, String] map = { "key": t1.o } + | + | scatter(i in arr) { + | call t as t3 { input: i = i } + | String b = i + t3.o + | call t as t4 { input: i = b } + | String c = t3.o + " " + t4.o + | } + | + | Array[String] d = c + | + | output { + | String o1 = a + | Array[String] o2 = t4.o + | Array[String] o3 = d + | Array[String] o4 = b + | Array[String] o5 = c + | } + |} """.stripMargin val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get ns.staticDeclarationsRecursive(Map.empty[String, WomValue], NoFunctions) match { @@ -195,4 +194,3 @@ class DeclarationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } } } - diff --git a/wdl/model/draft2/src/test/scala/wdl/NamespaceSpec.scala b/wdl/model/draft2/src/test/scala/wdl/NamespaceSpec.scala index 24e3a0b8916..0c65778b5eb 100644 --- a/wdl/model/draft2/src/test/scala/wdl/NamespaceSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/NamespaceSpec.scala @@ -9,7 +9,10 @@ class NamespaceSpec extends WdlTest { val namespace = Try(loadWdl("type_checks.wdl")) namespace match { - case Failure(f) => f.getMessage should startWith("ERROR: oopsNotOptionalArray is declared as a Array[Int] but the expression evaluates to a Array[Int?]") + case Failure(f) => + f.getMessage should startWith( + "ERROR: oopsNotOptionalArray is declared as a Array[Int] but the expression evaluates to a Array[Int?]" + ) case Success(_) => fail("Should have failed to load namespace") } } diff --git a/wdl/model/draft2/src/test/scala/wdl/ParameterWdlCommandPartSpec.scala b/wdl/model/draft2/src/test/scala/wdl/ParameterWdlCommandPartSpec.scala index fe636fba4c9..997c1c3d595 100644 --- a/wdl/model/draft2/src/test/scala/wdl/ParameterWdlCommandPartSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/ParameterWdlCommandPartSpec.scala @@ -18,7 +18,7 @@ class ParameterWdlCommandPartSpec extends WdlTest { val task = namespace.tasks.find(_.name == "param_test") getOrElse { fail("task 'param_test' not found") } - val paramsByName = task.commandTemplate.collect({ case p: ParameterCommandPart => p }) + val paramsByName = task.commandTemplate.collect { case p: ParameterCommandPart => p } s"Stringify the $${...} tags correctly" in { paramsByName.size shouldEqual 6 @@ -33,15 +33,18 @@ class ParameterWdlCommandPartSpec extends WdlTest { "raise exception if 'true' attribute is specified but 'false' is not" in { WdlNamespace.loadUsingSource( s"""task param_test { - | Boolean f - | - | command <<< - | ./binary $${true="--true" f} - | >>> - |} - | - |workflow wf {call param_test} - """.stripMargin, None, None) match { + | Boolean f + | + | command <<< + | ./binary $${true="--true" f} + | >>> + |} + | + |workflow wf {call param_test} + """.stripMargin, + None, + None + ) match { case Failure(_: SyntaxError) => // expected case x => fail(s"Expecting a syntax error but got $x") } @@ -60,24 +63,29 @@ class ParameterWdlCommandPartSpec extends WdlTest { val ns = WdlNamespace.loadUsingSource(wdl, None, None).get val task: WdlTask = ns.findTask("t").get - val command = task.instantiateCommand(Map(task.declarations.head -> WomString("world")), NoFunctions).toTry.get.head + val command = + task.instantiateCommand(Map(task.declarations.head -> WomString("world")), NoFunctions).toTry.get.head command.commandString shouldBe "echo hello world" } - + "replace undefined values by their default value after evaluation" in { val wdl = s""" - |task t { - | String? none - | command { - | echo $${"hello" + none} - | } - |} + |task t { + | String? none + | command { + | echo $${"hello" + none} + | } + |} """.stripMargin - + val ns = WdlNamespace.loadUsingSource(wdl, None, None).get val task = ns.findTask("t").get - val command = task.instantiateCommand(Map(task.declarations.head -> WomOptionalValue.none(WomStringType)), NoFunctions).toTry.get.head + val command = task + .instantiateCommand(Map(task.declarations.head -> WomOptionalValue.none(WomStringType)), NoFunctions) + .toTry + .get + .head command.commandString shouldBe "echo" } } diff --git a/wdl/model/draft2/src/test/scala/wdl/RuntimeAttributeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/RuntimeAttributeSpec.scala index da6b6b6d96f..6ff8c27d0e9 100644 --- a/wdl/model/draft2/src/test/scala/wdl/RuntimeAttributeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/RuntimeAttributeSpec.scala @@ -79,15 +79,15 @@ object RuntimeAttributeSpec { """.stripMargin } - class RuntimeAttributeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with EitherValues { - val NamespaceWithRuntime = WdlNamespaceWithWorkflow.load(WorkflowWithRuntime, Seq.empty).get - val NamespaceWithoutRuntime = WdlNamespaceWithWorkflow.load(WorkflowWithoutRuntime, Seq.empty).get +class RuntimeAttributeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with EitherValues { + val NamespaceWithRuntime = WdlNamespaceWithWorkflow.load(WorkflowWithRuntime, Seq.empty).get + val NamespaceWithoutRuntime = WdlNamespaceWithWorkflow.load(WorkflowWithoutRuntime, Seq.empty).get - "WDL file with runtime attributes" should "have attribute maps" in { - NamespaceWithRuntime.tasks.forall(_.runtimeAttributes.attrs.nonEmpty) should be(true) - } + "WDL file with runtime attributes" should "have attribute maps" in { + NamespaceWithRuntime.tasks.forall(_.runtimeAttributes.attrs.nonEmpty) should be(true) + } - "WDL file without runtime attributes" should "not have attribute maps" in { - NamespaceWithoutRuntime.tasks.forall(_.runtimeAttributes.attrs.isEmpty) should be(true) - } + "WDL file without runtime attributes" should "not have attribute maps" in { + NamespaceWithoutRuntime.tasks.forall(_.runtimeAttributes.attrs.isEmpty) should be(true) } +} diff --git a/wdl/model/draft2/src/test/scala/wdl/SameNameParametersSpec.scala b/wdl/model/draft2/src/test/scala/wdl/SameNameParametersSpec.scala index c55583d38cc..79143389baa 100644 --- a/wdl/model/draft2/src/test/scala/wdl/SameNameParametersSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/SameNameParametersSpec.scala @@ -9,15 +9,18 @@ import wdl.draft2.model.expression.NoFunctions import wom.values.WomString class SameNameParametersSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val namespace1 = WdlNamespaceWithWorkflow.load( - """ - |task test { - | String x - | command { ./script ${x} ${x} ${x} } - |} - |workflow wf { call test } - """.stripMargin, Seq.empty - ).get + val namespace1 = WdlNamespaceWithWorkflow + .load( + """ + |task test { + | String x + | command { ./script ${x} ${x} ${x} } + |} + |workflow wf { call test } + """.stripMargin, + Seq.empty + ) + .get val task = namespace1.findTask("test").get "A task with command that uses the same parameter more than once" should "only count it as one input" in { diff --git a/wdl/model/draft2/src/test/scala/wdl/SampleWdl.scala b/wdl/model/draft2/src/test/scala/wdl/SampleWdl.scala index c3b90302302..f756f81c700 100644 --- a/wdl/model/draft2/src/test/scala/wdl/SampleWdl.scala +++ b/wdl/model/draft2/src/test/scala/wdl/SampleWdl.scala @@ -79,147 +79,145 @@ object SampleWdl { withPlaceholders.stripMargin.replace(outputSectionPlaceholder, outputsSection) } - val PatternKey ="three_step.cgrep.pattern" + val PatternKey = "three_step.cgrep.pattern" } object ThreeStep extends ThreeStepTemplate - object NestedScatterWdl extends SampleWdl { override def workflowSource(runtime: String = "") = s""" task A { - | command { - | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nscott" - | } - | output { - | Array[String] A_out = read_lines(stdout()) - | } - |} - | - |task B { - | String B_in - | command { - | python -c "print(len('$${B_in}'))" - | } - | output { - | Int B_out = read_int(stdout()) - | } - |} - | - |task C { - | Int C_in - | command { - | python -c "print($${C_in}*100)" - | } - | output { - | Int C_out = read_int(stdout()) - | } - |} - | - |task D { - | Array[Int] D_in - | command { - | python -c "print($${sep = '+' D_in})" - | } - | output { - | Int D_out = read_int(stdout()) - | } - |} - | - |task E { - | command { - | python -c "import random; print(random.randint(1,100))" - | } - | output { - | Int E_out = read_int(stdout()) - | } - |} - | - |workflow w { - | call A - | scatter (item in A.A_out) { # scatter 0 - | call B {input: B_in = item} - | call C {input: C_in = B.B_out} - | call E - | scatter (itemB in B.B_out) { # scatter 1 - | call E as G - | } - | scatter (itemB in B.B_out) { # scatter 2 - | call E as H - | } - | } - | scatter (item in A.A_out) { # scatter 3 - | call E as F - | } - | call D {input: D_in = B.B_out} - |} + | command { + | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nscott" + | } + | output { + | Array[String] A_out = read_lines(stdout()) + | } + |} + | + |task B { + | String B_in + | command { + | python -c "print(len('$${B_in}'))" + | } + | output { + | Int B_out = read_int(stdout()) + | } + |} + | + |task C { + | Int C_in + | command { + | python -c "print($${C_in}*100)" + | } + | output { + | Int C_out = read_int(stdout()) + | } + |} + | + |task D { + | Array[Int] D_in + | command { + | python -c "print($${sep = '+' D_in})" + | } + | output { + | Int D_out = read_int(stdout()) + | } + |} + | + |task E { + | command { + | python -c "import random; print(random.randint(1,100))" + | } + | output { + | Int E_out = read_int(stdout()) + | } + |} + | + |workflow w { + | call A + | scatter (item in A.A_out) { # scatter 0 + | call B {input: B_in = item} + | call C {input: C_in = B.B_out} + | call E + | scatter (itemB in B.B_out) { # scatter 1 + | call E as G + | } + | scatter (itemB in B.B_out) { # scatter 2 + | call E as H + | } + | } + | scatter (item in A.A_out) { # scatter 3 + | call E as F + | } + | call D {input: D_in = B.B_out} + |} """.stripMargin } - class ScatterWdl extends SampleWdl { val tasks = s"""task A { - | command { - | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nscott" - | } - | output { - | Array[String] A_out = read_lines(stdout()) - | } - |} - | - |task B { - | String B_in - | command { - | python -c "print(len('$${B_in}'))" - | } - | output { - | Int B_out = read_int(stdout()) - | } - |} - | - |task C { - | Int C_in - | command { - | python -c "print($${C_in}*100)" - | } - | output { - | Int C_out = read_int(stdout()) - | } - |} - | - |task D { - | Array[Int] D_in - | command { - | python -c "print($${sep = '+' D_in})" - | } - | output { - | Int D_out = read_int(stdout()) - | } - |} - | - |task E { - | command { - | python -c "print(9)" - | } - | output { - | Int E_out = read_int(stdout()) - | } - |} + | command { + | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nscott" + | } + | output { + | Array[String] A_out = read_lines(stdout()) + | } + |} + | + |task B { + | String B_in + | command { + | python -c "print(len('$${B_in}'))" + | } + | output { + | Int B_out = read_int(stdout()) + | } + |} + | + |task C { + | Int C_in + | command { + | python -c "print($${C_in}*100)" + | } + | output { + | Int C_out = read_int(stdout()) + | } + |} + | + |task D { + | Array[Int] D_in + | command { + | python -c "print($${sep = '+' D_in})" + | } + | output { + | Int D_out = read_int(stdout()) + | } + |} + | + |task E { + | command { + | python -c "print(9)" + | } + | output { + | Int E_out = read_int(stdout()) + | } + |} """.stripMargin override def workflowSource(runtime: String = "") = s"""$tasks - | - |workflow w { - | call A - | scatter (item in A.A_out) { - | call B {input: B_in = item} - | call C {input: C_in = B.B_out} - | call E - | } - | call D {input: D_in = B.B_out} - |} + | + |workflow w { + | call A + | scatter (item in A.A_out) { + | call B {input: B_in = item} + | call C {input: C_in = B.B_out} + | call E + | } + | call D {input: D_in = B.B_out} + |} """.stripMargin } @@ -263,67 +261,66 @@ object SampleWdl { object TaskDeclarationsWdl extends SampleWdl { override def workflowSource(runtime: String = "") = """ - |task t { - | String s - | command { - | echo ${s} - | } - | output { - | String o = s - | Array[Int] outputArray = [0, 1, 2] - | } - |} - | - |task u { - | String a - | String b - | String c - | Int d - | String e = "e" - | String f - | String? g - | String? h - | String i - | File j - | Array[File] k - | String? l - | - | command { - | echo ${a} - | echo ${b} - | echo ${c} - | echo ${d} - | echo ${e} - | echo ${f} - | echo ${g} - | echo ${h} - | echo ${i} - | } - |} - | - |workflow wf { - | String workflowDeclarationFromInput - | String workflowDeclaration = "b" - | Array[File] files = ["a", "b", "c"] - | - | call t as t2 {input: s = "hey" } - | - | scatter (i in t2.outputArray) { - | call t {input: s = "c"} - | if (true) { - | call t as t3 {input: s = "c"} - | } - | call u as v {input: a = workflowDeclarationFromInput, - | b = workflowDeclaration, - | c = t.o, - | d = i, - | i = "${workflowDeclaration}", - | k = files, - | l = t3.o } - | } - |} - """. - stripMargin + |task t { + | String s + | command { + | echo ${s} + | } + | output { + | String o = s + | Array[Int] outputArray = [0, 1, 2] + | } + |} + | + |task u { + | String a + | String b + | String c + | Int d + | String e = "e" + | String f + | String? g + | String? h + | String i + | File j + | Array[File] k + | String? l + | + | command { + | echo ${a} + | echo ${b} + | echo ${c} + | echo ${d} + | echo ${e} + | echo ${f} + | echo ${g} + | echo ${h} + | echo ${i} + | } + |} + | + |workflow wf { + | String workflowDeclarationFromInput + | String workflowDeclaration = "b" + | Array[File] files = ["a", "b", "c"] + | + | call t as t2 {input: s = "hey" } + | + | scatter (i in t2.outputArray) { + | call t {input: s = "c"} + | if (true) { + | call t as t3 {input: s = "c"} + | } + | call u as v {input: a = workflowDeclarationFromInput, + | b = workflowDeclaration, + | c = t.o, + | d = i, + | i = "${workflowDeclaration}", + | k = files, + | l = t3.o } + | } + |} + """.stripMargin val workflowInputs = Map( "wf.workflowDeclarationFromInput" -> WomString("a"), diff --git a/wdl/model/draft2/src/test/scala/wdl/ScopeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/ScopeSpec.scala index e3003f5d0cf..317bc0d2777 100644 --- a/wdl/model/draft2/src/test/scala/wdl/ScopeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/ScopeSpec.scala @@ -11,12 +11,14 @@ class ScopeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val namespace = WdlNamespaceWithWorkflow.load(SampleWdl.NestedScatterWdl.workflowSource(), Seq.empty).get it should "throw an exception if trying to re-assign children on a scope" in { - the [UnsupportedOperationException] thrownBy { namespace.workflow.children = Seq.empty } should have message "children is write-once" + the[UnsupportedOperationException] thrownBy { + namespace.workflow.children = Seq.empty + } should have message "children is write-once" } it should "throw an exception if trying to generate a workflow from a non-workflow ast" in { val callAst: Ast = AstTools.findAsts(namespace.ast, AstNodeName.Call).head - the [UnsupportedOperationException] thrownBy { + the[UnsupportedOperationException] thrownBy { WdlWorkflow(callAst, namespace.wdlSyntaxErrorFormatter) } should have message "Expecting Workflow AST, got a Call AST" } diff --git a/wdl/model/draft2/src/test/scala/wdl/SyntaxErrorSpec.scala b/wdl/model/draft2/src/test/scala/wdl/SyntaxErrorSpec.scala index 0ddad39acad..6cea8fec619 100644 --- a/wdl/model/draft2/src/test/scala/wdl/SyntaxErrorSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/SyntaxErrorSpec.scala @@ -16,36 +16,36 @@ import scala.util.{Failure, Success} class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { private val psTaskWdl = """ - |task ps { - | command { - | ps - | } - | output { - | File procs = stdout() - | } - |}""".stripMargin + |task ps { + | command { + | ps + | } + | output { + | File procs = stdout() + | } + |}""".stripMargin private val cgrepTaskWdl = s""" - |task cgrep { - | String pattern - | File in_file - | command { - | grep '$${pattern}' $${in_file} | wc -l - | } - | output { - | Int count = read_int(stdout()) - | } - |}""".stripMargin - - private def resolver(importUri: String): Draft2ResolvedImportBundle = { + |task cgrep { + | String pattern + | File in_file + | command { + | grep '$${pattern}' $${in_file} | wc -l + | } + | output { + | Int count = read_int(stdout()) + | } + |}""".stripMargin + + private def resolver(importUri: String): Draft2ResolvedImportBundle = importUri match { case "ps" => Draft2ResolvedImportBundle(psTaskWdl, ResolvedImportRecord("ps")) case "cgrep" => Draft2ResolvedImportBundle(cgrepTaskWdl, ResolvedImportRecord("cgrep")) case _ => throw new RuntimeException(s"Can't resolve $importUri") } - } - private def normalizeErrorMessage(msg: String) = StringUtil.stripAll(msg, " \t\n\r", " \t\n\r").replaceAll("[ \t]+\n", "\n") + private def normalizeErrorMessage(msg: String) = + StringUtil.stripAll(msg, " \t\n\r", " \t\n\r").replaceAll("[ \t]+\n", "\n") trait ErrorWdl { def testString: String @@ -384,17 +384,17 @@ class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val testString = "detect when a call has multiple input sections" val wdl = s"""task x { - | String a - | String b - | command { ./script $${a} $${b} } - |} - | - |workflow wf { - | call x { - | input: a = "a" - | input: b = "b" - | } - |} + | String a + | String b + | command { ./script $${a} $${b} } + |} + | + |workflow wf { + | call x { + | input: a = "a" + | input: b = "b" + | } + |} """.stripMargin val errors = @@ -495,7 +495,7 @@ class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers | Boolean o = a.x | ^""".stripMargin } - + case object TypeMismatch4 extends ErrorWdl { val testString = "detect when a call output has a type mismatch (4)" val wdl = @@ -573,25 +573,25 @@ class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val testString = s"detect when expressions in command section reference missing task inputs" val wdl = s"""task a { - | Int x - | command { ./script $${x+y} } - |} - | - |workflow w { - | call a - |} + | Int x + | command { ./script $${x+y} } + |} + | + |workflow w { + | call a + |} """.stripMargin val errors = s"""ERROR: Variable y does not reference any declaration in the task (line 3, col 26): - | - | command { ./script $${x+y} } - | ^ - | - |Task defined here (line 1, col 6): - | - |task a { - | ^ + | + | command { ./script $${x+y} } + | ^ + | + |Task defined here (line 1, col 6): + | + |task a { + | ^ """.stripMargin } @@ -673,11 +673,11 @@ class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val testString = "detect when a variable is declared more than once (1)" val wdl = s"""task inputOops { - | Int a = 5 - | Int a = 10 - | command { echo $${a} } - |} - |workflow a { call inputOops } + | Int a = 5 + | Int a = 10 + | command { echo $${a} } + |} + |workflow a { call inputOops } """.stripMargin val errors = @@ -799,16 +799,16 @@ class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val errors = """|ERROR: Sibling nodes have conflicting names: - | - |WorkflowOutput defined here (line 4, col 4): - | - | String o = "output" - | ^ - | - |WorkflowOutput statement defined here (line 4, col 4): - | - | String o = "output" - | ^ + | + |WorkflowOutput defined here (line 4, col 4): + | + | String o = "output" + | ^ + | + |WorkflowOutput statement defined here (line 4, col 4): + | + | String o = "output" + | ^ """.stripMargin } @@ -1036,14 +1036,14 @@ class SyntaxErrorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers UnknownVariableInDeclaration ) - forAll(syntaxErrorWdlTable) { (errorWdl) => + forAll(syntaxErrorWdlTable) { errorWdl => it should errorWdl.testString in { - WdlNamespace.loadUsingSource(errorWdl.wdl, None, Option(Seq(resolver))) match { - case Failure(e: SyntaxError) => normalizeErrorMessage(e.getMessage) shouldEqual normalizeErrorMessage(errorWdl.errors) - case Failure(x) => throw new Exception(s"Expecting a SyntaxError but got $x", x) - case Success(_) => fail("Bad WDL unexpectedly validated.") + WdlNamespace.loadUsingSource(errorWdl.wdl, None, Option(Seq(resolver))) match { + case Failure(e: SyntaxError) => + normalizeErrorMessage(e.getMessage) shouldEqual normalizeErrorMessage(errorWdl.errors) + case Failure(x) => throw new Exception(s"Expecting a SyntaxError but got $x", x) + case Success(_) => fail("Bad WDL unexpectedly validated.") } } } } - diff --git a/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala b/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala index 95cf4333c2a..697ed979b97 100644 --- a/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala @@ -12,96 +12,101 @@ import scala.annotation.nowarn class SyntaxHighlightSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { "SyntaxFormatter for typical workflow" should { - val namespace = WdlNamespace.loadUsingSource( - """ - |task PairedFastQsToUnmappedBAM { - | File fastq_1 - | File fastq_2 - | String readgroup_name - | String sample_name - | String library_name - | String platform_unit - | String run_date - | String platform_name - | String sequencing_center - | Int disk_size - | String mem_size - | - | command { - | java -Xmx3000m -jar /usr/gitc/picard.jar \ - | FastqToSam \ - | FASTQ=${fastq_1} \ - | FASTQ2=${fastq_2} \ - | OUTPUT=${readgroup_name}.bam \ - | READ_GROUP_NAME=${readgroup_name} \ - | SAMPLE_NAME=${sample_name} \ - | LIBRARY_NAME=${library_name} \ - | PLATFORM_UNIT=${platform_unit} \ - | RUN_DATE=${run_date} \ - | PLATFORM=${platform_name} \ - | SEQUENCING_CENTER=${sequencing_center} - | } - | runtime { - | docker: "broadinstitute/genomes-in-the-cloud:2.2.4-1469632282" - | memory: mem_size - | cpu: "1" - | disks: "local-disk " + disk_size + " HDD" - | } - | output { - | File output_bam = "${readgroup_name}.bam" - | } - | parameter_meta { - | memory_mb: "Amount of memory to allocate to the JVM" - | param: "Some arbitrary parameter" - | sample_id: "The ID of the sample in format foo_bar_baz" - | } - | meta { - | author: "Joe Somebody" - | email: "joe@company.org" - | } - |} - | - |# WORKFLOW DEFINITION - |workflow ConvertPairedFastQsToUnmappedBamWf { - | Array[String] readgroup_list - | Map[String, Array[File]] fastq_pairs - | Map[String, Array[String]] metadata - | - | # Convert multiple pairs of input fastqs in parallel - | scatter (readgroup in readgroup_list) { - | - | # Convert pair of FASTQs to uBAM - | call PairedFastQsToUnmappedBAM { - | input: - | fastq_1 = fastq_pairs[readgroup][0], - | fastq_2 = fastq_pairs[readgroup][1], - | readgroup_name = readgroup, - | sample_name = metadata[readgroup][0], - | library_name = metadata[readgroup][1], - | platform_unit = metadata[readgroup][2], - | run_date = metadata[readgroup][3], - | platform_name = metadata[readgroup][4], - | sequencing_center = metadata[readgroup][5] - | } - | } - | - | # Outputs that will be retained when execution is complete - | output { - | Array[File] output_bams = PairedFastQsToUnmappedBAM.output_bam - | } - | - | parameter_meta { - | memory_mb: "Amount of memory to allocate to the JVM" - | param: "Some arbitrary parameter" - | sample_id: "The ID of the sample in format foo_bar_baz" - | } - | - | meta { - | author: "Joe Somebody" - | email: "joe@company.org" - | } - |} - """.stripMargin, None, None).get + val namespace = WdlNamespace + .loadUsingSource( + """ + |task PairedFastQsToUnmappedBAM { + | File fastq_1 + | File fastq_2 + | String readgroup_name + | String sample_name + | String library_name + | String platform_unit + | String run_date + | String platform_name + | String sequencing_center + | Int disk_size + | String mem_size + | + | command { + | java -Xmx3000m -jar /usr/gitc/picard.jar \ + | FastqToSam \ + | FASTQ=${fastq_1} \ + | FASTQ2=${fastq_2} \ + | OUTPUT=${readgroup_name}.bam \ + | READ_GROUP_NAME=${readgroup_name} \ + | SAMPLE_NAME=${sample_name} \ + | LIBRARY_NAME=${library_name} \ + | PLATFORM_UNIT=${platform_unit} \ + | RUN_DATE=${run_date} \ + | PLATFORM=${platform_name} \ + | SEQUENCING_CENTER=${sequencing_center} + | } + | runtime { + | docker: "broadinstitute/genomes-in-the-cloud:2.2.4-1469632282" + | memory: mem_size + | cpu: "1" + | disks: "local-disk " + disk_size + " HDD" + | } + | output { + | File output_bam = "${readgroup_name}.bam" + | } + | parameter_meta { + | memory_mb: "Amount of memory to allocate to the JVM" + | param: "Some arbitrary parameter" + | sample_id: "The ID of the sample in format foo_bar_baz" + | } + | meta { + | author: "Joe Somebody" + | email: "joe@company.org" + | } + |} + | + |# WORKFLOW DEFINITION + |workflow ConvertPairedFastQsToUnmappedBamWf { + | Array[String] readgroup_list + | Map[String, Array[File]] fastq_pairs + | Map[String, Array[String]] metadata + | + | # Convert multiple pairs of input fastqs in parallel + | scatter (readgroup in readgroup_list) { + | + | # Convert pair of FASTQs to uBAM + | call PairedFastQsToUnmappedBAM { + | input: + | fastq_1 = fastq_pairs[readgroup][0], + | fastq_2 = fastq_pairs[readgroup][1], + | readgroup_name = readgroup, + | sample_name = metadata[readgroup][0], + | library_name = metadata[readgroup][1], + | platform_unit = metadata[readgroup][2], + | run_date = metadata[readgroup][3], + | platform_name = metadata[readgroup][4], + | sequencing_center = metadata[readgroup][5] + | } + | } + | + | # Outputs that will be retained when execution is complete + | output { + | Array[File] output_bams = PairedFastQsToUnmappedBAM.output_bam + | } + | + | parameter_meta { + | memory_mb: "Amount of memory to allocate to the JVM" + | param: "Some arbitrary parameter" + | sample_id: "The ID of the sample in format foo_bar_baz" + | } + | + | meta { + | author: "Joe Somebody" + | email: "joe@company.org" + | } + |} + """.stripMargin, + None, + None + ) + .get @nowarn("msg=Unicode escapes in triple quoted strings are deprecated, use the literal character instead") val console = @@ -264,117 +269,116 @@ class SyntaxHighlightSpec extends AnyWordSpec with CromwellTimeoutSpec with Matc |} """.stripMargin - def resolver(importUri: String): Draft2ResolvedImportBundle = { + def resolver(importUri: String): Draft2ResolvedImportBundle = importUri match { case "foo.wdl" => Draft2ResolvedImportBundle(fooTaskWdl, ResolvedImportRecord("foo.wdl")) case _ => throw new RuntimeException(s"Can't resolve $importUri") } - } val source = s""" - |import "foo.wdl" as foo_ns - | - |task t { - | String f - | Int p - | command { - | ./cmd $${f} $${p} - | } - |} - | - |task s { - | Array[File] input_file - | command <<< - | cat $${sep=' ' input_file} | awk '{s+=$$1} END {print s}' - | >>> - | output { - | String s = read_string(stdout()) - | } - |} - | - |task r { - | command { python -c "import random; print(random.randint(1,100))" } - |} - | - |workflow w { - | Int p = 2+2 - | call t - | call t as u { - | input: f="abc", p=p - | } - |}""".stripMargin + |import "foo.wdl" as foo_ns + | + |task t { + | String f + | Int p + | command { + | ./cmd $${f} $${p} + | } + |} + | + |task s { + | Array[File] input_file + | command <<< + | cat $${sep=' ' input_file} | awk '{s+=$$1} END {print s}' + | >>> + | output { + | String s = read_string(stdout()) + | } + |} + | + |task r { + | command { python -c "import random; print(random.randint(1,100))" } + |} + | + |workflow w { + | Int p = 2+2 + | call t + | call t as u { + | input: f="abc", p=p + | } + |}""".stripMargin val namespace = WdlNamespace.loadUsingSource(source, None, Option(Seq(resolver))).get val console = s"""\u001b[38;5;214mimport\u001b[0m 'foo.wdl' as foo_ns - | - |\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253mt\u001b[0m { - | \u001b[38;5;33mString\u001b[0m \u001b[38;5;112mf\u001b[0m - | \u001b[38;5;33mInt\u001b[0m \u001b[38;5;112mp\u001b[0m - | \u001b[38;5;214mcommand\u001b[0m { - | ./cmd $${f} $${p} - | } - |} - | - |\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253ms\u001b[0m { - | \u001b[38;5;33mArray[File]\u001b[0m \u001b[38;5;112minput_file\u001b[0m - | \u001b[38;5;214mcommand\u001b[0m <<< - | cat $${sep=" " input_file} | awk '{s+=$$1} END {print s}' - | >>> - | \u001b[38;5;214moutput\u001b[0m { - | \u001b[38;5;33mString\u001b[0m \u001b[38;5;112ms\u001b[0m = \u001b[38;5;13mread_string\u001b[0m(\u001b[38;5;13mstdout\u001b[0m()) - | } - |} - | - |\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253mr\u001b[0m { - | \u001b[38;5;214mcommand\u001b[0m { - | python -c "import random; print(random.randint(1,100))" - | } - |} - | - |\u001b[38;5;214mworkflow\u001b[0m \u001b[38;5;253mw\u001b[0m { - | \u001b[38;5;33mInt\u001b[0m \u001b[38;5;112mp\u001b[0m = 2 + 2 - | \u001b[38;5;214mcall\u001b[0m \u001b[38;5;253mt\u001b[0m - | \u001b[38;5;214mcall\u001b[0m \u001b[38;5;253mt\u001b[0m as u { - | input: f="abc", p=p - | } - |}""".stripMargin + | + |\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253mt\u001b[0m { + | \u001b[38;5;33mString\u001b[0m \u001b[38;5;112mf\u001b[0m + | \u001b[38;5;33mInt\u001b[0m \u001b[38;5;112mp\u001b[0m + | \u001b[38;5;214mcommand\u001b[0m { + | ./cmd $${f} $${p} + | } + |} + | + |\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253ms\u001b[0m { + | \u001b[38;5;33mArray[File]\u001b[0m \u001b[38;5;112minput_file\u001b[0m + | \u001b[38;5;214mcommand\u001b[0m <<< + | cat $${sep=" " input_file} | awk '{s+=$$1} END {print s}' + | >>> + | \u001b[38;5;214moutput\u001b[0m { + | \u001b[38;5;33mString\u001b[0m \u001b[38;5;112ms\u001b[0m = \u001b[38;5;13mread_string\u001b[0m(\u001b[38;5;13mstdout\u001b[0m()) + | } + |} + | + |\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253mr\u001b[0m { + | \u001b[38;5;214mcommand\u001b[0m { + | python -c "import random; print(random.randint(1,100))" + | } + |} + | + |\u001b[38;5;214mworkflow\u001b[0m \u001b[38;5;253mw\u001b[0m { + | \u001b[38;5;33mInt\u001b[0m \u001b[38;5;112mp\u001b[0m = 2 + 2 + | \u001b[38;5;214mcall\u001b[0m \u001b[38;5;253mt\u001b[0m + | \u001b[38;5;214mcall\u001b[0m \u001b[38;5;253mt\u001b[0m as u { + | input: f="abc", p=p + | } + |}""".stripMargin val html = s"""import 'foo.wdl' as foo_ns - | - |task t { - | String f - | Int p - | command { - | ./cmd $${f} $${p} - | } - |} - | - |task s { - | Array[File] input_file - | command <<< - | cat $${sep=" " input_file} | awk '{s+=$$1} END {print s}' - | >>> - | output { - | String s = read_string(stdout()) - | } - |} - | - |task r { - | command { - | python -c "import random; print(random.randint(1,100))" - | } - |} - | - |workflow w { - | Int p = 2 + 2 - | call t - | call t as u { - | input: f="abc", p=p - | } - |}""".stripMargin + | + |task t { + | String f + | Int p + | command { + | ./cmd $${f} $${p} + | } + |} + | + |task s { + | Array[File] input_file + | command <<< + | cat $${sep=" " input_file} | awk '{s+=$$1} END {print s}' + | >>> + | output { + | String s = read_string(stdout()) + | } + |} + | + |task r { + | command { + | python -c "import random; print(random.randint(1,100))" + | } + |} + | + |workflow w { + | Int p = 2 + 2 + | call t + | call t as u { + | input: f="abc", p=p + | } + |}""".stripMargin "format to console properly" in { val actual = new SyntaxFormatter(AnsiSyntaxHighlighter).format(namespace) diff --git a/wdl/model/draft2/src/test/scala/wdl/TaskSpec.scala b/wdl/model/draft2/src/test/scala/wdl/TaskSpec.scala index aee550334ee..45fab30c9a4 100644 --- a/wdl/model/draft2/src/test/scala/wdl/TaskSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/TaskSpec.scala @@ -21,12 +21,17 @@ class TaskSpec extends WdlTest { s"have a task with name 'wc'" in { wcTask.name shouldEqual "wc" wcTask.declarations.map(_.toWdlString) shouldEqual Vector("File in_file") - wcTask.instantiateCommand( - wcTask.inputsFromMap( - Map("wc.in_file" -> WomSingleFile("/path/to/file")) - ), - NoFunctions - ).toTry.get.head.commandString shouldEqual "cat /path/to/file | wc -l" + wcTask + .instantiateCommand( + wcTask.inputsFromMap( + Map("wc.in_file" -> WomSingleFile("/path/to/file")) + ), + NoFunctions + ) + .toTry + .get + .head + .commandString shouldEqual "cat /path/to/file | wc -l" wcTask.outputs.size shouldEqual 1 wcTask.outputs.head.unqualifiedName shouldEqual "count" wcTask.outputs.head.womType shouldEqual WomIntegerType @@ -38,12 +43,17 @@ class TaskSpec extends WdlTest { "String pattern", "File in_file" ) - cgrepTask.instantiateCommand( - cgrepTask.inputsFromMap( - Map("cgrep.pattern" -> WomString("^...$"), "cgrep.in_file" -> WomSingleFile("/path/to/file")) - ), - NoFunctions - ).toTry.get.head.commandString shouldEqual "grep '^...$' /path/to/file | wc -l" + cgrepTask + .instantiateCommand( + cgrepTask.inputsFromMap( + Map("cgrep.pattern" -> WomString("^...$"), "cgrep.in_file" -> WomSingleFile("/path/to/file")) + ), + NoFunctions + ) + .toTry + .get + .head + .commandString shouldEqual "grep '^...$' /path/to/file | wc -l" cgrepTask.outputs.size shouldEqual 1 cgrepTask.outputs.head.unqualifiedName shouldEqual "count" cgrepTask.outputs.head.womType shouldEqual WomIntegerType @@ -72,7 +82,12 @@ class TaskSpec extends WdlTest { "param_test.e" -> WomArray(WomArrayType(WomIntegerType), Seq(0, 1, 2) map WomInteger.apply), "param_test.f" -> WomBoolean.False ) - paramTestTask.instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions).toTry.get.head.commandString shouldEqual "./binary a_val -p b_val c0,c1,c2 1 0\t1\t2 --false" + paramTestTask + .instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions) + .toTry + .get + .head + .commandString shouldEqual "./binary a_val -p b_val c0,c1,c2 1 0\t1\t2 --false" } s"instantiate command (1)" in { @@ -83,7 +98,12 @@ class TaskSpec extends WdlTest { "param_test.e" -> WomArray(WomArrayType(WomIntegerType), Seq(0, 1, 2) map WomInteger.apply), "param_test.f" -> WomBoolean.True ) - paramTestTask.instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions).toTry.get.head.commandString shouldEqual "./binary a_val -p b_val c0,c1,c2 9 0\t1\t2 --true" + paramTestTask + .instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions) + .toTry + .get + .head + .commandString shouldEqual "./binary a_val -p b_val c0,c1,c2 9 0\t1\t2 --true" } s"instantiate command (2)" in { @@ -95,7 +115,12 @@ class TaskSpec extends WdlTest { "param_test.e" -> WomArray(WomArrayType(WomIntegerType), Seq()), "param_test.f" -> WomBoolean.True ) - paramTestTask.instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions).toTry.get.head.commandString shouldEqual "./binary a_val -p b_val c0 1 --true" + paramTestTask + .instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions) + .toTry + .get + .head + .commandString shouldEqual "./binary a_val -p b_val c0 1 --true" } s"instantiate command (3)" in { @@ -107,11 +132,18 @@ class TaskSpec extends WdlTest { "param_test.e" -> WomArray(WomArrayType(WomIntegerType), Seq()), "param_test.f" -> WomBoolean.True ) - paramTestTask.instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions).toTry.get.head.commandString shouldEqual "./binary a_val -p b_val 1 --true" + paramTestTask + .instantiateCommand(paramTestTask.inputsFromMap(inputs), NoFunctions) + .toTry + .get + .head + .commandString shouldEqual "./binary a_val -p b_val 1 --true" } s"fail to instantiate command if missing a required input" in { - paramTestTask.instantiateCommand(paramTestTask.inputsFromMap(Map("param_test.a" -> WomString("a_val"))), NoFunctions).toTry match { + paramTestTask + .instantiateCommand(paramTestTask.inputsFromMap(Map("param_test.a" -> WomString("a_val"))), NoFunctions) + .toTry match { case Failure(_) => // expected case _ => fail("Expected an exception") } diff --git a/wdl/model/draft2/src/test/scala/wdl/TestFileUtil.scala b/wdl/model/draft2/src/test/scala/wdl/TestFileUtil.scala index 12f6850347b..bda7e929db2 100644 --- a/wdl/model/draft2/src/test/scala/wdl/TestFileUtil.scala +++ b/wdl/model/draft2/src/test/scala/wdl/TestFileUtil.scala @@ -14,7 +14,6 @@ trait TestFileUtil { File(file).write(contents).path } - def createFile(name: String, dir: Path, contents: String) = { + def createFile(name: String, dir: Path, contents: String) = File(dir).createDirectories()./(name).write(contents).path - } } diff --git a/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportNamespaceSpec.scala b/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportNamespaceSpec.scala index 54e41467e1b..3b49adc5175 100644 --- a/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportNamespaceSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportNamespaceSpec.scala @@ -12,65 +12,63 @@ import scala.util.Failure class ThreeStepImportNamespaceSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val psTaskWdl = """ - |task ps { - | command { - | ps - | } - | output { - | File procs = stdout() - | } - |}""".stripMargin + |task ps { + | command { + | ps + | } + | output { + | File procs = stdout() + | } + |}""".stripMargin val cgrepTaskWdl = """ - |task cgrep { - | String pattern - | String in_file - | command { - | grep '${pattern}' ${in_file} | wc -l - | } - | output { - | Int count = read_int(stdout()) - | } - |}""".stripMargin + |task cgrep { + | String pattern + | String in_file + | command { + | grep '${pattern}' ${in_file} | wc -l + | } + | output { + | Int count = read_int(stdout()) + | } + |}""".stripMargin val wcTaskWdl = """ - |task wc { - | File in_file - | command { - | cat ${in_file} | wc -l - | } - | output { - | Int count = read_int(stdout()) - | } - |}""".stripMargin + |task wc { + | File in_file + | command { + | cat ${in_file} | wc -l + | } + | output { + | Int count = read_int(stdout()) + | } + |}""".stripMargin val workflowWdl = """ - |import "ps" as ns1 - |import "cgrep" as ns2 - |import "wc" as ns3 - | - |workflow three_step { - | call ns1.ps - | call ns2.cgrep { - | input: in_file=ps.procs - | } - | call ns3.wc { - | input: in_file=ps.procs - | } - |}""".stripMargin + |import "ps" as ns1 + |import "cgrep" as ns2 + |import "wc" as ns3 + | + |workflow three_step { + | call ns1.ps + | call ns2.cgrep { + | input: in_file=ps.procs + | } + | call ns3.wc { + | input: in_file=ps.procs + | } + |}""".stripMargin - def resolver(importUri: String): Draft2ResolvedImportBundle = { + def resolver(importUri: String): Draft2ResolvedImportBundle = importUri match { case "ps" => Draft2ResolvedImportBundle(psTaskWdl, ResolvedImportRecord("ps")) case "cgrep" => Draft2ResolvedImportBundle(cgrepTaskWdl, ResolvedImportRecord("cgrep")) case "wc" => Draft2ResolvedImportBundle(wcTaskWdl, ResolvedImportRecord("wc")) case _ => throw new RuntimeException(s"Can't resolve $importUri") } - } val namespace = WdlNamespaceWithWorkflow.load(workflowWdl, Seq(resolver _)).get - "WDL file with imports" should "Have 0 tasks (3 tasks are in separate namespace)" in { namespace.tasks.size shouldEqual 0 } @@ -78,16 +76,14 @@ class ThreeStepImportNamespaceSpec extends AnyFlatSpec with CromwellTimeoutSpec namespace.namespaces.size shouldEqual 3 } it should "Have 3 imported WdlNamespaces with tasks 'ps', 'cgrep', and 'wc'" in { - namespace.namespaces flatMap {_.tasks} map {_.name} shouldEqual Seq("ps", "cgrep", "wc") + namespace.namespaces flatMap { _.tasks } map { _.name } shouldEqual Seq("ps", "cgrep", "wc") } it should "Throw an exception if the import resolver fails to resolve an import" in { - def badResolver(s: String): Draft2ResolvedImportBundle = { + def badResolver(s: String): Draft2ResolvedImportBundle = throw new RuntimeException(s"Can't Resolve") - } WdlNamespace.loadUsingSource(workflowWdl, None, Option(Seq(badResolver))) match { case Failure(_: ValidationException) => case x => fail(s"Expecting ValidationException to be thrown when using badResolver but got $x") } } } - diff --git a/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportSpec.scala b/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportSpec.scala index 61272990382..e2dd8cbf208 100644 --- a/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/ThreeStepImportSpec.scala @@ -12,61 +12,60 @@ import scala.util.Failure class ThreeStepImportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val psTaskWdl = """ - |task ps { - | command { - | ps - | } - | output { - | File procs = stdout() - | } - |}""".stripMargin + |task ps { + | command { + | ps + | } + | output { + | File procs = stdout() + | } + |}""".stripMargin val cgrepTaskWdl = s""" - |task cgrep { - | String pattern - | File in_file - | command { - | grep '$${pattern}' $${in_file} | wc -l - | } - | output { - | Int count = read_int(stdout()) - | } - |}""".stripMargin + |task cgrep { + | String pattern + | File in_file + | command { + | grep '$${pattern}' $${in_file} | wc -l + | } + | output { + | Int count = read_int(stdout()) + | } + |}""".stripMargin val wcTaskWdl = s""" - |task wc { - | File in_file - | command { - | cat $${in_file} | wc -l - | } - | output { - | Int count = read_int(stdout()) - | } - |}""".stripMargin + |task wc { + | File in_file + | command { + | cat $${in_file} | wc -l + | } + | output { + | Int count = read_int(stdout()) + | } + |}""".stripMargin val workflowWdl = """ - |import "ps" - |import "cgrep" - |import "wc" - | - |workflow three_step { - | call ps.ps - | call cgrep.cgrep { - | input: in_file=ps.procs - | } - | call wc.wc { - | input: in_file=ps.procs - | } - |}""".stripMargin + |import "ps" + |import "cgrep" + |import "wc" + | + |workflow three_step { + | call ps.ps + | call cgrep.cgrep { + | input: in_file=ps.procs + | } + | call wc.wc { + | input: in_file=ps.procs + | } + |}""".stripMargin - def resolver(importUri: String): Draft2ResolvedImportBundle = { + def resolver(importUri: String): Draft2ResolvedImportBundle = importUri match { case "ps" => Draft2ResolvedImportBundle(psTaskWdl, ResolvedImportRecord("ps")) case "cgrep" => Draft2ResolvedImportBundle(cgrepTaskWdl, ResolvedImportRecord("cgrep")) case "wc" => Draft2ResolvedImportBundle(wcTaskWdl, ResolvedImportRecord("wc")) case _ => throw new RuntimeException(s"Can't resolve $importUri") } - } val namespace = WdlNamespaceWithWorkflow.load(workflowWdl, Seq(resolver _)).get @@ -78,12 +77,11 @@ class ThreeStepImportSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc namespace.namespaces.size shouldEqual 3 } it should "Have imported namespaces with tasks named 'ps', 'cgrep' and 'wc'" in { - namespace.namespaces flatMap {_.tasks} map {_.name} shouldEqual Seq("ps", "cgrep", "wc") + namespace.namespaces flatMap { _.tasks } map { _.name } shouldEqual Seq("ps", "cgrep", "wc") } it should "Throw an exception if the import resolver fails to resolve an import" in { - def badResolver(s: String): Draft2ResolvedImportBundle = { + def badResolver(s: String): Draft2ResolvedImportBundle = throw new RuntimeException(s"Can't Resolve") - } WdlNamespace.loadUsingSource(workflowWdl, None, Option(Seq(badResolver))) match { case Failure(_: ValidationException) => diff --git a/wdl/model/draft2/src/test/scala/wdl/WdlCallSpec.scala b/wdl/model/draft2/src/test/scala/wdl/WdlCallSpec.scala index 5b943bf13be..61459adb0d8 100644 --- a/wdl/model/draft2/src/test/scala/wdl/WdlCallSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/WdlCallSpec.scala @@ -25,17 +25,25 @@ class WdlCallSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { val inputs: WorkflowCoercedInputs = SampleWdl.TaskDeclarationsWdl.workflowInputs - def outputResolver(call: WdlGraphNode, index: Option[Int]): Try[WomValue] = { + def outputResolver(call: WdlGraphNode, index: Option[Int]): Try[WomValue] = (call, index) match { - case (`callT`, Some(2)) => Success(WdlCallOutputsObject(callT, Map("o" -> WomString(s"c ${index.getOrElse(-1)}")))) - case (`callT3`, Some(2)) => Success(WdlCallOutputsObject(callT, Map("o" -> WomString(s"c ${index.getOrElse(-1)}")))) - case (`callT2`, None) => Success(WdlCallOutputsObject(callT2, Map( - "outputArray" -> WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(0), WomInteger(1), WomInteger(2))) - ))) + case (`callT`, Some(2)) => + Success(WdlCallOutputsObject(callT, Map("o" -> WomString(s"c ${index.getOrElse(-1)}")))) + case (`callT3`, Some(2)) => + Success(WdlCallOutputsObject(callT, Map("o" -> WomString(s"c ${index.getOrElse(-1)}")))) + case (`callT2`, None) => + Success( + WdlCallOutputsObject(callT2, + Map( + "outputArray" -> WomArray(WomArrayType(WomIntegerType), + Seq(WomInteger(0), WomInteger(1), WomInteger(2)) + ) + ) + ) + ) case _ => Failure(new Exception(s"no output found for call ${call.fullyQualifiedName}")) } - } - + val shardMap = Map(namespace.scatters.head -> 2) val declarations = callV.evaluateTaskInputs(inputs, NoFunctions, outputResolver, shardMap).get @@ -56,7 +64,7 @@ class WdlCallSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { WomArray(WomArrayType(WomSingleFileType), Seq(WomSingleFile("a"), WomSingleFile("b"), WomSingleFile("c"))) declarations.find(_._1.unqualifiedName == "l").get._2 shouldBe WomOptionalValue(WomString("c 2")) } - + "accumulate input evaluation errors and throw an exception" in { val wdl = """ @@ -70,7 +78,7 @@ class WdlCallSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { | call t |} """.stripMargin - + val namespace = WdlNamespace.loadUsingSource(wdl, None, None).get val callT = namespace.calls.find(_.unqualifiedName == "t").get val exception = the[ValidationException] thrownBy { @@ -83,30 +91,30 @@ class WdlCallSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { val wdl = s""" - |workflow x { - | call cram - | call y as shouldntBeProblematic { - | input: - | cram = cram.scram - | } - | - |} - | - |task cram { - | command { - | echo "." - | } - | output { - | String scram = "." - | } - |} - | - |task y { - | String cram - | command { - | echo "." - | } - |}""".stripMargin + |workflow x { + | call cram + | call y as shouldntBeProblematic { + | input: + | cram = cram.scram + | } + | + |} + | + |task cram { + | command { + | echo "." + | } + | output { + | String scram = "." + | } + |} + | + |task y { + | String cram + | command { + | echo "." + | } + |}""".stripMargin val namespace = WdlNamespace.loadUsingSource(wdl, None, None).get namespace.workflows.head.calls.exists(_.alias == Option("shouldntBeProblematic")) shouldBe true @@ -116,115 +124,116 @@ class WdlCallSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { val wdl = s""" - |workflow x { - | call cram - | call y as shouldntBeProblematic { - | input: - | cram = "asdf", - | slam = cram.scram - | } - | - |} - | - |task cram { - | command { - | echo "." - | } - | output { - | String scram = "." - | } - |} - | - |task y { - | String cram - | String slam - | command { - | echo "." - | } - |}""".stripMargin + |workflow x { + | call cram + | call y as shouldntBeProblematic { + | input: + | cram = "asdf", + | slam = cram.scram + | } + | + |} + | + |task cram { + | command { + | echo "." + | } + | output { + | String scram = "." + | } + |} + | + |task y { + | String cram + | String slam + | command { + | echo "." + | } + |}""".stripMargin val namespace = WdlNamespace.loadUsingSource(wdl, None, None).get namespace.workflows.head.calls.exists(_.alias == Option("shouldntBeProblematic")) shouldBe true } - + "find workflows" in { val subWorkflow = s""" - |task hello2 { - | String addressee = "hey" - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - |} - | - |workflow sub_hello { - | call hello2 - | output { - | String result = hello2.salutation - | } - |} + |task hello2 { + | String addressee = "hey" + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + |} + | + |workflow sub_hello { + | call hello2 + | output { + | String result = hello2.salutation + | } + |} """.stripMargin val wdl = s""" - |import "placeholder" as sub - | - |task hello { - | String addressee - | command { - | echo "Hello $${addressee}!" - | } - | output { - | String salutation = read_string(stdout()) - | } - |} - | - |workflow wf_hello { - | call sub.sub_hello - | call hello {input: addressee = sub_hello.result } - |} + |import "placeholder" as sub + | + |task hello { + | String addressee + | command { + | echo "Hello $${addressee}!" + | } + | output { + | String salutation = read_string(stdout()) + | } + |} + | + |workflow wf_hello { + | call sub.sub_hello + | call hello {input: addressee = sub_hello.result } + |} """.stripMargin - val ns = WdlNamespaceWithWorkflow.load(wdl, Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri)))).get + val ns = WdlNamespaceWithWorkflow + .load(wdl, Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri)))) + .get ns.workflow.workflowCalls.size shouldBe 1 ns.workflow.taskCalls.size shouldBe 1 } - + "bubble up evaluation exception" in { val wdl = s""" - |task hello { - | String addressee - | command { - | echo "Hello $${addressee}!" - | } - | runtime { - | docker: "ubuntu:latest" - | } - | output { - | String salutation = read_string(stdout()) - | } - |} - | - |workflow wf_hello { - | File wf_hello_input - | File wf_hello_input2 - | String read = read_string(wf_hello_input) - | String read2 = read_string(wf_hello_input2) - | - | call hello {input: addressee = read_string(wf_hello_input) } - | call hello as hello2 {input: addressee = read } - | - | output { - | String salutation = hello.salutation - | } - |}""".stripMargin - - + |task hello { + | String addressee + | command { + | echo "Hello $${addressee}!" + | } + | runtime { + | docker: "ubuntu:latest" + | } + | output { + | String salutation = read_string(stdout()) + | } + |} + | + |workflow wf_hello { + | File wf_hello_input + | File wf_hello_input2 + | String read = read_string(wf_hello_input) + | String read2 = read_string(wf_hello_input2) + | + | call hello {input: addressee = read_string(wf_hello_input) } + | call hello as hello2 {input: addressee = read } + | + | output { + | String salutation = hello.salutation + | } + |}""".stripMargin + val functionsWithRead = new PureStandardLibraryFunctionsLike { override def readFile(path: String, sizeLimit: Int): String = { import better.files._ @@ -234,18 +243,25 @@ class WdlCallSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get val exception = intercept[ValidationException] { - ns.workflow.findCallByName("hello2").get.evaluateTaskInputs( + ns.workflow + .findCallByName("hello2") + .get + .evaluateTaskInputs( Map("wf_hello.wf_hello_input" -> WomSingleFile("/do/not/exist")), functionsWithRead - ).get + ) + .get } exception.getMessage shouldBe "Input evaluation for Call wf_hello.hello2 failed.:\naddressee:\n\tFile not found /do/not/exist" - val staticEvaluation = ns.staticDeclarationsRecursive(Map( - "wf_hello.wf_hello_input" -> WomSingleFile("/do/not/exist"), - "wf_hello.wf_hello_input2" -> WomSingleFile("/do/not/exist2") - ), functionsWithRead) - + val staticEvaluation = + ns.staticDeclarationsRecursive(Map( + "wf_hello.wf_hello_input" -> WomSingleFile("/do/not/exist"), + "wf_hello.wf_hello_input2" -> WomSingleFile("/do/not/exist2") + ), + functionsWithRead + ) + staticEvaluation.isFailure shouldBe true val exception2 = staticEvaluation.failed.get exception2.getMessage shouldBe "Could not evaluate workflow declarations:\nwf_hello.read:\n\tFile not found /do/not/exist\nwf_hello.read2:\n\tFile not found /do/not/exist2" diff --git a/wdl/model/draft2/src/test/scala/wdl/WdlTest.scala b/wdl/model/draft2/src/test/scala/wdl/WdlTest.scala index 9bef617b390..11de27dcfcb 100644 --- a/wdl/model/draft2/src/test/scala/wdl/WdlTest.scala +++ b/wdl/model/draft2/src/test/scala/wdl/WdlTest.scala @@ -11,23 +11,24 @@ import wom.ResolvedImportRecord trait WdlTest extends Matchers with AnyWordSpecLike { def resolver(root: File)(relPath: String): Draft2ResolvedImportBundle = Draft2ResolvedImportBundle((root / relPath).contentAsString, ResolvedImportRecord((root / relPath).pathAsString)) - def loadWdl(path: String) = loadWdlFile(currentWorkingDirectory/"wom"/"src"/"test"/"resources"/path) + def loadWdl(path: String) = loadWdlFile(currentWorkingDirectory / "wom" / "src" / "test" / "resources" / path) def loadWdlFile(wdlFile: File) = WdlNamespaceWithWorkflow.load(wdlFile.contentAsString, Seq(resolver(wdlFile / "..") _)).get def getTask(ns: WdlNamespace, name: String): WdlTask = ns.tasks.find(_.unqualifiedName == name).get - def getCall(ns: WdlNamespaceWithWorkflow, name: String): WdlTaskCall = ns.workflow.taskCalls.find(_.unqualifiedName == name) getOrElse { - fail(s"Expecting call with name '$name'") - } + def getCall(ns: WdlNamespaceWithWorkflow, name: String): WdlTaskCall = + ns.workflow.taskCalls.find(_.unqualifiedName == name) getOrElse { + fail(s"Expecting call with name '$name'") + } def getScatter(ns: WdlNamespaceWithWorkflow, index: Int): Scatter = { val scatterFqn = ns.workflow.unqualifiedName + ".$scatter_" + index - val resolution = ns.resolve(scatterFqn).collect({ case s: Scatter => s}) + val resolution = ns.resolve(scatterFqn).collect { case s: Scatter => s } resolution getOrElse { fail(s"Expecting a scatter block with FQN $scatterFqn") } } def getIf(ns: WdlNamespaceWithWorkflow, index: Int): If = { val ifFqn = ns.workflow.unqualifiedName + ".$if_" + index - val resolution = ns.resolve(ifFqn).collect({ case i: If => i }) + val resolution = ns.resolve(ifFqn).collect { case i: If => i } resolution getOrElse { fail(s"Expecting a scatter block with FQN $ifFqn") } diff --git a/wdl/model/draft2/src/test/scala/wdl/WdlWiringSpec.scala b/wdl/model/draft2/src/test/scala/wdl/WdlWiringSpec.scala index 5ab30d7d57c..44181e6b2ab 100644 --- a/wdl/model/draft2/src/test/scala/wdl/WdlWiringSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/WdlWiringSpec.scala @@ -19,7 +19,11 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val wdlFile = testDir / "test.wdl" if (!wdlFile.exists) fail(s"Expecting a 'test.wdl' file in directory 'cases/${testDir.name}'") def resolvers: Seq[Draft2ImportResolver] = - Seq((relPath: String) => Draft2ResolvedImportBundle((testDir / relPath).contentAsString, ResolvedImportRecord((testDir / relPath).pathAsString))) + Seq((relPath: String) => + Draft2ResolvedImportBundle((testDir / relPath).contentAsString, + ResolvedImportRecord((testDir / relPath).pathAsString) + ) + ) val namespace = WdlNamespaceWithWorkflow.load(File(wdlFile.path).contentAsString, resolvers).get val wdlFileRelPath = File(".").relativize(wdlFile) @@ -98,12 +102,17 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { expectedWorkflowInputsFile.write(jsObject.prettyPrint + "\n") } - expectedWorkflowInputsFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsString]] map { - case (k, v) => k -> v.value + expectedWorkflowInputsFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsString]] map { case (k, v) => + k -> v.value } } - private def expectedParents(testDir: File, namespace: WdlNamespaceWithWorkflow): Map[FullyQualifiedName, Option[FullyQualifiedName]] = { + private def expectedParents(testDir: File, + namespace: WdlNamespaceWithWorkflow + ): Map[FullyQualifiedName, Option[FullyQualifiedName]] = { val expectedParentsFile = testDir / "parents.expectations" if (!expectedParentsFile.exists) { @@ -120,7 +129,9 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { } } - private def expectedChildren(testDir: File, namespace: WdlNamespaceWithWorkflow): Map[FullyQualifiedName, Seq[Scope]] = { + private def expectedChildren(testDir: File, + namespace: WdlNamespaceWithWorkflow + ): Map[FullyQualifiedName, Seq[Scope]] = { val expectedChildrenFile = testDir / "children.expectations" if (!expectedChildrenFile.exists) { @@ -131,14 +142,18 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { expectedChildrenFile.write(jsObject.prettyPrint + "\n") } - expectedChildrenFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsArray]] map { - case (k, v) => - val children = v.elements.collect({ case s: JsString => s }).map(s => namespace.resolve(s.value).get) - k -> children + expectedChildrenFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsArray]] map { case (k, v) => + val children = v.elements.collect { case s: JsString => s }.map(s => namespace.resolve(s.value).get) + k -> children } } - private def expectedFullyQualifiedNames(testDir: File, namespace: WdlNamespaceWithWorkflow): Map[FullyQualifiedName, String] = { + private def expectedFullyQualifiedNames(testDir: File, + namespace: WdlNamespaceWithWorkflow + ): Map[FullyQualifiedName, String] = { val expectedFqnsAndClassFile = testDir / "fqn.expectations" if (!expectedFqnsAndClassFile.exists) { @@ -149,12 +164,17 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { expectedFqnsAndClassFile.write(jsObject.prettyPrint + "\n") } - expectedFqnsAndClassFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsString]] map { - case (k, v) => k -> v.value + expectedFqnsAndClassFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsString]] map { case (k, v) => + k -> v.value } } - private def expectedFullyQualifiedNamesWithIndexScopes(testDir: File, namespace: WdlNamespaceWithWorkflow): Map[FullyQualifiedName, String] = { + private def expectedFullyQualifiedNamesWithIndexScopes(testDir: File, + namespace: WdlNamespaceWithWorkflow + ): Map[FullyQualifiedName, String] = { val expectedFqnsAndClassFile = testDir / "fqn_index_scopes.expectations" if (!expectedFqnsAndClassFile.exists) { @@ -165,8 +185,11 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { expectedFqnsAndClassFile.write(jsObject.prettyPrint + "\n") } - expectedFqnsAndClassFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsString]] map { - case (k, v) => k -> v.value + expectedFqnsAndClassFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsString]] map { case (k, v) => + k -> v.value } } @@ -181,30 +204,39 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { expectedAncestryFile.write(jsObject.prettyPrint + "\n") } - expectedAncestryFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsArray]] map { - case (k, v) => - val expectedAncestry = v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get) - val resolvedFqn = namespace.resolve(k).get - resolvedFqn -> expectedAncestry + expectedAncestryFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsArray]] map { case (k, v) => + val expectedAncestry = v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get) + val resolvedFqn = namespace.resolve(k).get + resolvedFqn -> expectedAncestry } } - private def expectedUpstreamAncestry(testDir: File, namespace: WdlNamespaceWithWorkflow): Map[WdlGraphNode, Set[Scope]] = { + private def expectedUpstreamAncestry(testDir: File, + namespace: WdlNamespaceWithWorkflow + ): Map[WdlGraphNode, Set[Scope]] = { val expectedUpstreamFile = testDir / "upstreamAncestry.expectations" if (!expectedUpstreamFile.exists) { - val upstreamFqns = namespace.descendants.collect({ case n: WdlGraphNode => n }) map { node => - node.fullyQualifiedName -> JsArray(node.upstreamAncestry.toVector.map(_.fullyQualifiedName).sorted.map(JsString(_))) + val upstreamFqns = namespace.descendants.collect { case n: WdlGraphNode => n } map { node => + node.fullyQualifiedName -> JsArray( + node.upstreamAncestry.toVector.map(_.fullyQualifiedName).sorted.map(JsString(_)) + ) } val jsObject = JsObject(ListMap(upstreamFqns.toSeq.sortBy(_._1): _*)) expectedUpstreamFile.write(jsObject.prettyPrint + "\n") } - expectedUpstreamFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsArray]] map { - case (k, v) => - val expectedUpstreamAncestors = v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get).toSet - val resolvedFqn = namespace.resolve(k).get.asInstanceOf[WdlGraphNode] - resolvedFqn -> expectedUpstreamAncestors + expectedUpstreamFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsArray]] map { case (k, v) => + val expectedUpstreamAncestors = + v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get).toSet + val resolvedFqn = namespace.resolve(k).get.asInstanceOf[WdlGraphNode] + resolvedFqn -> expectedUpstreamAncestors } } @@ -212,18 +244,20 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val expectedUpstreamFile = testDir / "upstream.expectations" if (!expectedUpstreamFile.exists) { - val upstreamFqns = namespace.descendants.collect({ case n: WdlGraphNode => n }) map { node => + val upstreamFqns = namespace.descendants.collect { case n: WdlGraphNode => n } map { node => node.fullyQualifiedName -> JsArray(node.upstream.toVector.map(_.fullyQualifiedName).sorted.map(JsString(_))) } val jsObject = JsObject(ListMap(upstreamFqns.toSeq.sortBy(_._1): _*)) expectedUpstreamFile.write(jsObject.prettyPrint + "\n") } - expectedUpstreamFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsArray]] map { - case (k, v) => - val expectedUpstream = v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get).toSet - val resolvedFqn = namespace.resolve(k).get.asInstanceOf[WdlGraphNode] - resolvedFqn -> expectedUpstream + expectedUpstreamFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsArray]] map { case (k, v) => + val expectedUpstream = v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get).toSet + val resolvedFqn = namespace.resolve(k).get.asInstanceOf[WdlGraphNode] + resolvedFqn -> expectedUpstream } } @@ -231,14 +265,17 @@ class WdlWiringSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val expectedDownstreamFile = testDir / "downstream.expectations" if (!expectedDownstreamFile.exists) { - val downstreamFqns = namespace.descendants.collect({ case n: WdlGraphNode => n }) map { node => + val downstreamFqns = namespace.descendants.collect { case n: WdlGraphNode => n } map { node => node.fullyQualifiedName -> JsArray(node.downstream.toVector.map(_.fullyQualifiedName).sorted.map(JsString(_))) } val jsObject = JsObject(ListMap(downstreamFqns.toSeq.sortBy(_._1): _*)) expectedDownstreamFile.write(jsObject.prettyPrint + "\n") } - expectedDownstreamFile.contentAsString.parseJson.asInstanceOf[JsObject].fields.asInstanceOf[Map[String, JsArray]] map { case (k, v) => + expectedDownstreamFile.contentAsString.parseJson + .asInstanceOf[JsObject] + .fields + .asInstanceOf[Map[String, JsArray]] map { case (k, v) => val expectedDownstream = v.elements.asInstanceOf[Vector[JsString]].map(n => namespace.resolve(n.value).get).toSet val resolvedFqn = namespace.resolve(k).get.asInstanceOf[WdlGraphNode] resolvedFqn -> expectedDownstream diff --git a/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowImportsSpec.scala b/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowImportsSpec.scala index 38d84429af6..b6d7e5701b8 100644 --- a/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowImportsSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowImportsSpec.scala @@ -45,79 +45,79 @@ class WdlWorkflowImportsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M val basicWdl = s""" - |import "$echoHelloWdlFile" + |import "$echoHelloWdlFile" """.stripMargin + - """ - | - |task ls { - | command { - | ls -l - | } - | output { - | String fileList = read_string(stdout()) - | } - |} - | - |task pwd { - | command { - | pwd - | } - | output { - | String current = read_string(stdout()) - | } - |} - | - |workflow basic { - | call ls - | call pwd - | output { - | ls.fileList - | } - |} + """ + | + |task ls { + | command { + | ls -l + | } + | output { + | String fileList = read_string(stdout()) + | } + |} + | + |task pwd { + | command { + | pwd + | } + | output { + | String current = read_string(stdout()) + | } + |} + | + |workflow basic { + | call ls + | call pwd + | output { + | ls.fileList + | } + |} """.stripMargin val printNumsWdl = s""" - |import "$echoHelloWdlFile" as multilingualEcho - | + |import "$echoHelloWdlFile" as multilingualEcho + | """.stripMargin + - """ - |task ls { - | command { - | ls - | } - |} - | - | - |task print1 { - | Int x = 10 - | command { - | for i in `seq 1 ${x}` - | do - | echo $i - | done - | } - |} - | - |task print2 { - | Int x = 20 - | command { - | for i in `seq 1 ${x}` - | do - | echo $i - | done - | } - |} - | - |task print3 { - | Int x = 30 - | command { - | for i in `seq 1 ${x}` - | do - | echo $i - | done - | } - |} + """ + |task ls { + | command { + | ls + | } + |} + | + | + |task print1 { + | Int x = 10 + | command { + | for i in `seq 1 ${x}` + | do + | echo $i + | done + | } + |} + | + |task print2 { + | Int x = 20 + | command { + | for i in `seq 1 ${x}` + | do + | echo $i + | done + | } + |} + | + |task print3 { + | Int x = 30 + | command { + | for i in `seq 1 ${x}` + | do + | echo $i + | done + | } + |} """.stripMargin val snoozeWdl = @@ -176,11 +176,10 @@ class WdlWorkflowImportsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M |} """.stripMargin - val threeStepWdlWithImports = s""" - |import "$echoHelloWdlFile" as funEcho - | + |import "$echoHelloWdlFile" as funEcho + | """.stripMargin + threeStepWdl val basicWdlImportFile = addAndGetFile("basic", basicWdl) @@ -188,8 +187,7 @@ class WdlWorkflowImportsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M val snoozeWdlImportFile = addAndGetFile("snooze", snoozeWdl) val printNumsWdlImportFile = addAndGetFile("printNums", printNumsWdl) - def noExtension(fileName: String) = fileName.replace(".wdl","") - + def noExtension(fileName: String) = fileName.replace(".wdl", "") val imports = s""" @@ -202,41 +200,42 @@ class WdlWorkflowImportsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M val primaryWorkflow = s""" - | - |task testCaseTask { - | command { - | echo "Ruchi's birthday: 01/19" - | } - |} - | - |workflow testCases { - | call ${noExtension(basicWdlImportFile)}.ls as ls1 - | call ${noExtension(printNumsWdlImportFile)}.ls as ls2 - | - | #call ${noExtension(basicWdlImportFile)}.pwd as soBasic - | - | #call ${noExtension(printNumsWdlImportFile)}.print1 - | #call ${noExtension(printNumsWdlImportFile)}.print2 as printingFun - | - | - | call classicThreeStep.ps - | call classicThreeStep.ps as psAgain - | - | call trySleep.sleep - | call trySleep.sleep2 as sleepMore - | - | call ${noExtension(basicWdlImportFile)}.${noExtension(echoHelloWdlFile)}.inFrench - | call classicThreeStep.funEcho.inSpanish - | call classicThreeStep.funEcho.inSpanish as inPortugese - | call ${noExtension(printNumsWdlImportFile)}.multilingualEcho.inItalian - | - |} - |""".stripMargin + | + |task testCaseTask { + | command { + | echo "Ruchi's birthday: 01/19" + | } + |} + | + |workflow testCases { + | call ${noExtension(basicWdlImportFile)}.ls as ls1 + | call ${noExtension(printNumsWdlImportFile)}.ls as ls2 + | + | #call ${noExtension(basicWdlImportFile)}.pwd as soBasic + | + | #call ${noExtension(printNumsWdlImportFile)}.print1 + | #call ${noExtension(printNumsWdlImportFile)}.print2 as printingFun + | + | + | call classicThreeStep.ps + | call classicThreeStep.ps as psAgain + | + | call trySleep.sleep + | call trySleep.sleep2 as sleepMore + | + | call ${noExtension(basicWdlImportFile)}.${noExtension(echoHelloWdlFile)}.inFrench + | call classicThreeStep.funEcho.inSpanish + | call classicThreeStep.funEcho.inSpanish as inPortugese + | call ${noExtension(printNumsWdlImportFile)}.multilingualEcho.inItalian + | + |} + |""".stripMargin val wdlWithImports = imports + primaryWorkflow val namespace = { - val resolvers: Seq[Draft2ImportResolver] = Seq(WdlNamespace.directoryResolver(wdlDirectory), WdlNamespace.fileResolver) + val resolvers: Seq[Draft2ImportResolver] = + Seq(WdlNamespace.directoryResolver(wdlDirectory), WdlNamespace.fileResolver) WdlNamespaceWithWorkflow.load(wdlWithImports, resolvers).get } @@ -256,7 +255,8 @@ class WdlWorkflowImportsSpec extends AnyFlatSpec with CromwellTimeoutSpec with M } it should "import two WDL file (with clashing task names) and be able to reference all tasks by FQN" in { val clashingTaskNames = Seq(namespace.resolve(s"${noExtension(basicWdlImportFile)}.ls"), - namespace.resolve(s"${noExtension(printNumsWdlImportFile)}.ls")) + namespace.resolve(s"${noExtension(printNumsWdlImportFile)}.ls") + ) clashingTaskNames.size shouldEqual 2 } diff --git a/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowSpec.scala b/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowSpec.scala index 4b5f2811760..96d31533fd3 100644 --- a/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/WdlWorkflowSpec.scala @@ -77,82 +77,114 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers val workflowInputs = Map("main_workflow.workflow_input" -> WomString("workflow_input")) - def outputResolverForWorkflow(workflow: WdlWorkflow)(call: WdlGraphNode, index: Option[Int])= { + def outputResolverForWorkflow(workflow: WdlWorkflow)(call: WdlGraphNode, index: Option[Int]) = call match { // Main Task case c: WdlCall if c == workflow.findCallByName("main_task").get => - Success(WdlCallOutputsObject(c, Map( - "task_o1" -> WomString("MainTaskOutputString"), - "task_o2" -> WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(8))) - ))) + Success( + WdlCallOutputsObject(c, + Map( + "task_o1" -> WomString("MainTaskOutputString"), + "task_o2" -> WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(8))) + ) + ) + ) case c: WdlCall if c == workflow.findCallByName("main_task2").get => - Success(WdlCallOutputsObject(c, Map( - "task_o1" -> WomString("MainTask2OutputString"), - "task_o2" -> WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(16))) - ))) + Success( + WdlCallOutputsObject(c, + Map( + "task_o1" -> WomString("MainTask2OutputString"), + "task_o2" -> WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(16))) + ) + ) + ) case c: WdlCall if c == workflow.findCallByName("main_task_in_scatter").get => - Success(WdlCallOutputsObject(c, Map( - "task_o1" -> WomArray(WomArrayType(WomStringType), Seq(WomString("MainTaskOutputString"))) - ))) + Success( + WdlCallOutputsObject(c, + Map( + "task_o1" -> WomArray(WomArrayType(WomStringType), + Seq(WomString("MainTaskOutputString")) + ) + ) + ) + ) - //Sub Task + // Sub Task case c: WdlCall if c == workflow.findCallByName("sub_task").get => - Success(WdlCallOutputsObject(c, Map( - "sub_task_o1" -> WomString("SubTaskOutputString") - ))) + Success( + WdlCallOutputsObject(c, + Map( + "sub_task_o1" -> WomString("SubTaskOutputString") + ) + ) + ) case c: WdlCall if c == workflow.findCallByName("sub_task2").get => - Success(WdlCallOutputsObject(c, Map( - "sub_task_o1" -> WomString("SubTask2OutputString") - ))) + Success( + WdlCallOutputsObject(c, + Map( + "sub_task_o1" -> WomString("SubTask2OutputString") + ) + ) + ) // Workflow Task case c: WdlCall if c == workflow.findCallByName("sub_workflow").get => - Success(WdlCallOutputsObject(c, Map( - "sub_sub_workflow_sub_task_sub_task_o1" -> WomString("SubWorkflowSubTaskOutputString"), - "sub_o1" -> WomString("SubWorkflowOutputString") - ))) + Success( + WdlCallOutputsObject( + c, + Map( + "sub_sub_workflow_sub_task_sub_task_o1" -> WomString("SubWorkflowSubTaskOutputString"), + "sub_o1" -> WomString("SubWorkflowOutputString") + ) + ) + ) case c: WdlCall if c == workflow.findCallByName("sub_workflow2").get => - Success(WdlCallOutputsObject(c, Map( - "sub_sub_workflow_sub_task_sub_task_o1" -> WomString("SubWorkflow2SubTaskOutputString"), - "sub_o1" -> WomString("SubWorkflow2OutputString") - ))) + Success( + WdlCallOutputsObject( + c, + Map( + "sub_sub_workflow_sub_task_sub_task_o1" -> WomString("SubWorkflow2SubTaskOutputString"), + "sub_o1" -> WomString("SubWorkflow2OutputString") + ) + ) + ) case c: WdlCall => fail(s"No output found for call ${c.unqualifiedName}") case _ => Failure(new Exception()) } - } case class WorkflowOutputExpectation(unqualifiedName: FullyQualifiedName, womType: WomType, sourceString: String) implicit val workflowOutputEquality = new Equality[WorkflowOutput] { - override def areEqual(src: WorkflowOutput, expectation: Any): Boolean = { + override def areEqual(src: WorkflowOutput, expectation: Any): Boolean = expectation match { case output: WorkflowOutputExpectation => output.unqualifiedName == src.unqualifiedName && "main_workflow." + output.unqualifiedName == src.locallyQualifiedName(src.parent.get) && - output.womType.stableName == src.womType.stableName && - output.sourceString == src.requiredExpression.toWomString + output.womType.stableName == src.womType.stableName && + output.sourceString == src.requiredExpression.toWomString case _ => false } - } } def evaluateOutputs(workflow: WdlWorkflow, knownInputs: WorkflowCoercedInputs, wdlFunctions: WdlFunctions[WomValue], outputResolver: OutputResolver, - shards: Map[Scatter, Int] = Map.empty[Scatter, Int]): Try[Map[WorkflowOutput, WomValue]] = { + shards: Map[Scatter, Int] = Map.empty[Scatter, Int] + ): Try[Map[WorkflowOutput, WomValue]] = { - val evaluatedOutputs = workflow.outputs.foldLeft(Map.empty[WorkflowOutput, Try[WomValue]])((outputMap, output) => { - val currentOutputs = outputMap collect { - case (outputName, outputValue) if outputValue.isSuccess => outputName.fullyQualifiedName -> outputValue.get - } - def knownValues = currentOutputs ++ knownInputs - val lookup = workflow.lookupFunction(knownValues, wdlFunctions, outputResolver, shards, output) - val coerced = output.requiredExpression.evaluate(lookup, wdlFunctions) flatMap output.womType.coerceRawValue - val workflowOutput = output -> coerced + val evaluatedOutputs = workflow.outputs.foldLeft(Map.empty[WorkflowOutput, Try[WomValue]]) { + (outputMap, output) => + val currentOutputs = outputMap collect { + case (outputName, outputValue) if outputValue.isSuccess => outputName.fullyQualifiedName -> outputValue.get + } + def knownValues = currentOutputs ++ knownInputs + val lookup = workflow.lookupFunction(knownValues, wdlFunctions, outputResolver, shards, output) + val coerced = output.requiredExpression.evaluate(lookup, wdlFunctions) flatMap output.womType.coerceRawValue + val workflowOutput = output -> coerced - outputMap + workflowOutput - }) + outputMap + workflowOutput + } TryUtil.sequenceMap(evaluatedOutputs, "Failed to evaluate workflow outputs.\n") } @@ -160,36 +192,52 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers def verifyOutputsForNamespace(ns: WdlNamespaceWithWorkflow, declarationExpectations: Seq[WorkflowOutputExpectation], evaluationExpectations: Map[String, WomValue], - outputResolver: OutputResolver) = { + outputResolver: OutputResolver + ) = { val outputs = ns.workflow.outputs outputs should contain theSameElementsAs declarationExpectations val evaluatedOutputs = evaluateOutputs(ns.workflow, workflowInputs, NoFunctions, outputResolver) evaluatedOutputs match { - case Success(v) => v.map { case (output, outputValue) => output.unqualifiedName -> outputValue }.toList should contain theSameElementsAs evaluationExpectations + case Success(v) => + v.map { case (output, outputValue) => + output.unqualifiedName -> outputValue + }.toList should contain theSameElementsAs evaluationExpectations case Failure(e) => fail(e) } } - - def verifyOutputs(outputString: String, declarationExpectations: Seq[WorkflowOutputExpectation], evaluationExpectations: Map[String, WomValue]) = { - val ns = WdlNamespaceWithWorkflow.load( - wdl.replace("<>", outputString), Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri)))).get - verifyOutputsForNamespace(ns, declarationExpectations, evaluationExpectations, outputResolverForWorkflow(ns.workflow)) + + def verifyOutputs(outputString: String, + declarationExpectations: Seq[WorkflowOutputExpectation], + evaluationExpectations: Map[String, WomValue] + ) = { + val ns = WdlNamespaceWithWorkflow + .load(wdl.replace("<>", outputString), + Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri))) + ) + .get + verifyOutputsForNamespace(ns, + declarationExpectations, + evaluationExpectations, + outputResolverForWorkflow(ns.workflow) + ) } - case class WorkflowOutputTestCase(description: String, output: String, + case class WorkflowOutputTestCase(description: String, + output: String, declarationExpectation: Seq[WorkflowOutputExpectation], - evaluatedOutputExpectation: Map[String, WomValue]) - + evaluatedOutputExpectation: Map[String, WomValue] + ) + Seq( /* WILDCARD OUTPUTS */ /* main_task.* # task wildcard main_task2.* # aliased task wildcard - + sub_task.* # sub task wildcard sub_task2.* # aliased sub task wildcard - + sub_workflow.* # sub workflow wildcard sub_workflow2.* # aliased sub workflow wildcard */ @@ -229,16 +277,16 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers Seq(WorkflowOutputExpectation("sub_task2.sub_task_o1", WomStringType, "sub_task2.sub_task_o1")), Map("sub_task2.sub_task_o1" -> WomString("SubTask2OutputString")) ), - + /* DIRECT OUTPUT REFERENCES */ /* main_task.task_o1 # task output main_task2.task_o2 # aliased task output main_task_in_scatter.task_o1 # task output in scatter - + sub_task.sub_task_o1 # sub task output sub_task2.sub_task_o1 # aliased sub task output - + sub_workflow.sub_o1 # sub workflow output sub_workflow2.sub_o1 # aliased sub workflow output */ @@ -257,15 +305,24 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers WorkflowOutputTestCase( "task output in scatter", "main_task_in_scatter.task_o1", - Seq(WorkflowOutputExpectation("main_task_in_scatter.task_o1", WomArrayType(WomStringType), "main_task_in_scatter.task_o1")), - Map("main_task_in_scatter.task_o1" -> WomArray(WomArrayType(WomStringType), Seq(WomString("MainTaskOutputString")))) + Seq( + WorkflowOutputExpectation("main_task_in_scatter.task_o1", + WomArrayType(WomStringType), + "main_task_in_scatter.task_o1" + ) + ), + Map( + "main_task_in_scatter.task_o1" -> WomArray(WomArrayType(WomStringType), + Seq(WomString("MainTaskOutputString")) + ) + ) ), WorkflowOutputTestCase( "sub task output", "sub_task.sub_task_o1", Seq(WorkflowOutputExpectation("sub_task.sub_task_o1", WomStringType, "sub_task.sub_task_o1")), Map("sub_task.sub_task_o1" -> WomString("SubTaskOutputString")) - ), + ), WorkflowOutputTestCase( "aliased sub task output", "sub_task2.sub_task_o1", @@ -289,21 +346,21 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers /* String o1 = main_task.task_o1 # task output Array[Int] o2 = main_task2.task_o2 # aliased task output - + String o3 = sub_task.sub_task_o1 # sub task output String o4 = sub_task2.sub_task_o1 # aliased sub task output - + String o5 = sub_workflow.sub_o1 # sub workflow output String o6 = sub_workflow2.sub_o1 # aliased sub workflow output - + String o7 = o1 # reference to output - String o8 = workflow_input # reference to empty input declaration + String o8 = workflow_input # reference to empty input declaration String o9 = workflow_input2 # reference to provided input declaration File o10 = workflow_input2 # coercion 1 Array[Int] o11 = main_task2.task_o2 # complex type Map[Int, String] o12 = {1: "1"} # inline declaration with complex type String o13 = o1 + " " + o3 # simple expression - + Array[String] o14 = main_task_in_scatter.task_o1 # task in scatter String? o15 = optionalValue # optional value */ @@ -386,7 +443,7 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers "inline declaration with complex type", "Map[Int, String] o12 = {1: \"1\"}", Seq(WorkflowOutputExpectation("o12", WomMapType(WomIntegerType, WomStringType), "{1: \"1\"}")), - Map("o12" -> WomMap(WomMapType(WomIntegerType,WomStringType), Map(WomInteger(1) -> WomString("1")))) + Map("o12" -> WomMap(WomMapType(WomIntegerType, WomStringType), Map(WomInteger(1) -> WomString("1")))) ), WorkflowOutputTestCase( "simple expression", @@ -406,7 +463,7 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers Seq(WorkflowOutputExpectation("o15", WomOptionalType(WomStringType), "optionalValue")), Map("o15" -> WomOptionalValue(WomString("optional"))) ), - + /* LEGACY SYNTAX FOLLOWED BY NEW SYNTAX */ WorkflowOutputTestCase( "support legacy syntax followed by new syntax", @@ -437,7 +494,11 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers """.stripMargin a[SyntaxError] should be thrownBy { - WdlNamespaceWithWorkflow.load(wdl.replace("<>", output), Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri)))).get + WdlNamespaceWithWorkflow + .load(wdl.replace("<>", output), + Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri))) + ) + .get } } @@ -460,7 +521,7 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers WorkflowOutputExpectation("t.o1", WomStringType, "t.o1"), WorkflowOutputExpectation("t.o2", WomStringType, "t.o2") ) - + val expectedEvaluatedOutputs = Map( "t.o1" -> WomString("o1"), "t.o2" -> WomString("o2") @@ -468,21 +529,23 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers val ns = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get - def outputResolver(call: WdlGraphNode, index: Option[Int])= { + def outputResolver(call: WdlGraphNode, index: Option[Int]) = call match { case c: WdlCall if c == ns.workflow.findCallByName("t").get => - Success(WdlCallOutputsObject(c, Map( - "o1" -> WomString("o1"), - "o2" -> WomString("o2") + Success( + WdlCallOutputsObject(c, + Map( + "o1" -> WomString("o1"), + "o2" -> WomString("o2") + ) ) - )) + ) case _ => Failure(new Exception()) } - } - + verifyOutputsForNamespace(ns, expectedDeclarations, expectedEvaluatedOutputs, outputResolver) } - + "Throw a clear error when trying to use outputs declared with the old syntax in a parent workflow" in { val subWorkflow = """ @@ -511,12 +574,16 @@ class WdlWorkflowSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers | call sub.sub_workflow |} """.stripMargin - + val exception = the[SyntaxError] thrownBy - WdlNamespaceWithWorkflow.load(parentWorkflow, Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri)))).get + WdlNamespaceWithWorkflow + .load(parentWorkflow, + Seq((uri: String) => Draft2ResolvedImportBundle(subWorkflow, ResolvedImportRecord(uri))) + ) + .get exception.getMessage shouldBe s"""Workflow sub_workflow is used as a sub workflow but has outputs declared with a deprecated syntax not compatible with sub workflows. - |To use this workflow as a sub workflow please update the workflow outputs section to the latest WDL specification. - |See https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#outputs""".stripMargin + |To use this workflow as a sub workflow please update the workflow outputs section to the latest WDL specification. + |See https://github.com/broadinstitute/wdl/blob/develop/SPEC.md#outputs""".stripMargin } } } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/DNAxTypeEvalTest.scala b/wdl/model/draft2/src/test/scala/wdl/expression/DNAxTypeEvalTest.scala index 42e1740ed17..46f776e0e18 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/DNAxTypeEvalTest.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/DNAxTypeEvalTest.scala @@ -43,31 +43,26 @@ class DNAxTypeEvalTest extends AnyFlatSpec with CromwellTimeoutSpec with Matcher |} |""".stripMargin - // Figure out the type of an expression - def evalType(expr: WdlExpression, parent: Scope) : Try[WomType] = { - expr.evaluateType(WdlNamespace.lookupType(parent), - new WdlStandardLibraryFunctionsType, - Some(parent)) - } + def evalType(expr: WdlExpression, parent: Scope): Try[WomType] = + expr.evaluateType(WdlNamespace.lookupType(parent), new WdlStandardLibraryFunctionsType, Some(parent)) it should "correctly evaluate expression types" in { val ns = WdlNamespaceWithWorkflow.load(wdlCode, Seq.empty).get val wf = ns.workflow - val call:WdlCall = wf.findCallByName("Add") match { + val call: WdlCall = wf.findCallByName("Add") match { case None => throw new Exception(s"Call Add not found in WDL file") case Some(call) => call } - val ssc:Scatter = wf.scatters.head + val ssc: Scatter = wf.scatters.head call.inputMappings.foreach { case (_, expr) => - val t:Try[WomType] = evalType(expr, ssc) + val t: Try[WomType] = evalType(expr, ssc) t should equal(Success(WomIntegerType)) } } - val wdlCode2 = """|task Copy { | File src @@ -100,7 +95,7 @@ class DNAxTypeEvalTest extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val ns = WdlNamespaceWithWorkflow.load(wdlCode2, Seq.empty).get val wf = ns.workflow - val copy2call:WdlCall = wf.findCallByName("Copy2") match { + val copy2call: WdlCall = wf.findCallByName("Copy2") match { case None => throw new Exception(s"Call Add not found in WDL file") case Some(call) => call } @@ -114,7 +109,6 @@ class DNAxTypeEvalTest extends AnyFlatSpec with CromwellTimeoutSpec with Matcher t2 should equal(Success(WomStringType)) } - val wdlCode3 = """| |task Inc { @@ -160,7 +154,7 @@ class DNAxTypeEvalTest extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val ns = WdlNamespaceWithWorkflow.load(wdlCode3, Seq.empty).get val wf = ns.workflow - val incCall:WdlCall = wf.findCallByName("Inc") match { + val incCall: WdlCall = wf.findCallByName("Inc") match { case None => throw new Exception(s"Call Add not found in WDL file") case Some(call) => call } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala index 075d8910003..e6aac8c0f0d 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala @@ -20,74 +20,102 @@ class Draft2SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report a 2048 byte file, in bytes by default" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomSingleFile("blah"))))) { res => assert(res == WomFloat(2048d)) } + validate(readLike.size(Seq(Success(WomSingleFile("blah")))))(res => assert(res == WomFloat(2048d))) } it should "correctly report a 2048 byte file, in bytes" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("B"))))) { res => assert(res == WomFloat(2048d)) } + validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("B"))))) { res => + assert(res == WomFloat(2048d)) + } } it should "correctly report a 2048 byte file, in KB" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("KB"))))) { res => assert(res == WomFloat(2d)) } + validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("KB"))))) { res => + assert(res == WomFloat(2d)) + } } it should "correctly report a 2048 byte file, in KiB" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("Ki"))))) { res => assert(res == WomFloat(2d)) } + validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("Ki"))))) { res => + assert(res == WomFloat(2d)) + } } it should "correctly report the size of a supplied, optional, 2048 byte file" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))))))) { res => assert(res == WomFloat(2048d)) } + validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))))))) { res => + assert(res == WomFloat(2048d)) + } } it should "correctly report the size of a supplied, optional optional, 2048 byte file" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomOptionalValue( - WomOptionalType(WomSingleFileType), - Option(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah")))) - ))))) { res => assert(res == WomFloat(2048d)) } + validate( + readLike.size( + Seq( + Success( + WomOptionalValue( + WomOptionalType(WomSingleFileType), + Option(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah")))) + ) + ) + ) + ) + )(res => assert(res == WomFloat(2048d))) } it should "correctly report the size of a supplied, optional, 2048 byte file, in MB" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomOptionalValue( - WomSingleFileType, Option(WomSingleFile("blah")))), Success(WomString("MB") - )))) { res => assert(res == WomFloat(0.001953125d)) } + validate( + readLike.size( + Seq(Success(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah")))), Success(WomString("MB"))) + ) + )(res => assert(res == WomFloat(0.001953125d))) } it should "correctly report that an unsupplied optional file is empty" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, None))))) { res => assert(res == WomFloat(0d)) } + validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, None))))) { res => + assert(res == WomFloat(0d)) + } } it should "correctly report that an unsupplied File?? is empty" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomOptionalValue(WomOptionalType(WomSingleFileType), None))))) { res => assert(res == WomFloat(0d)) } + validate(readLike.size(Seq(Success(WomOptionalValue(WomOptionalType(WomSingleFileType), None))))) { res => + assert(res == WomFloat(0d)) + } } it should "correctly report that an unsupplied optional file is empty, even in MB" in { val readLike = testFunctions(Success(2048L)) - validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, None)), Success(WomString("MB"))))) { res => assert(res == WomFloat(0d)) } + validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, None)), Success(WomString("MB"))))) { res => + assert(res == WomFloat(0d)) + } } it should "refuse to report file sizes for Ints" in { - val readLike = testFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size"))) + val readLike = + testFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size"))) val oops = readLike.size(Seq(Success(WomInteger(7)))) oops match { case Success(x) => fail(s"Expected a string to not have a file length but instead got $x") - case Failure(e) => e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int.") + case Failure(e) => + e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int.") } } it should "refuse to report file sizes for Int?s" in { - val readLike = testFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size"))) + val readLike = + testFunctions(Failure(new Exception("Bad result: WdlIntegers shouldn't even be tried for getting file size"))) val oops = readLike.size(Seq(Success(WomOptionalValue(WomIntegerType, None)))) oops match { case Success(x) => fail(s"Expected a string to not have a file length but instead got $x") - case Failure(e) => e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int?.") + case Failure(e) => + e.getMessage should be("The 'size' method expects a 'File' or 'File?' argument but instead got Int?.") } } @@ -95,7 +123,8 @@ class Draft2SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M val readLike = testFunctions(Failure(new Exception("'size' inner exception, expect me to be passed on"))) val oops = readLike.size(Seq(Success(WomSingleFile("blah")))) oops match { - case Success(_) => fail(s"The 'size' engine function didn't return the error generated in the inner 'size' method") + case Success(_) => + fail(s"The 'size' engine function didn't return the error generated in the inner 'size' method") case Failure(e) => e.getMessage should be("'size' inner exception, expect me to be passed on") } } @@ -107,7 +136,10 @@ class Draft2SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M } object Draft2SizeFunctionSpec { - def testFunctions(sizeResult: Try[Long]): WdlStandardLibraryFunctions = WdlStandardLibraryFunctions.fromIoFunctionSet( new EmptyIoFunctionSet { - override def size(path: String): Future[Long] = Future.fromTry(sizeResult) - }, FileSizeLimitationConfig.default) + def testFunctions(sizeResult: Try[Long]): WdlStandardLibraryFunctions = WdlStandardLibraryFunctions.fromIoFunctionSet( + new EmptyIoFunctionSet { + override def size(path: String): Future[Long] = Future.fromTry(sizeResult) + }, + FileSizeLimitationConfig.default + ) } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/FileEvaluatorSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/FileEvaluatorSpec.scala index a36d1112839..4614dfcad6a 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/FileEvaluatorSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/FileEvaluatorSpec.scala @@ -20,39 +20,61 @@ class FileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ("read_int(stdout() + 3)", Seq.empty[WomSingleFile], WomIntegerType), ("""read_int("/etc/" + read_int("somefile") + ".txt"))""", Seq(WomSingleFile("somefile")), WomIntegerType), ("""-read_int("/etc/file1")""", Seq(WomSingleFile("/etc/file1")), WomIntegerType), - ("""read_int("/etc/file1") + read_string("/bin/file2")""", Seq(WomSingleFile("/etc/file1"), WomSingleFile("/bin/file2")), WomStringType), - ("""read_int("/etc/file1") + read_string("/bin/file2") + read_string("/bin/file3") + read_string("/bin/file4") + read_string("/bin/file5")""", Seq( - WomSingleFile("/etc/file1"), - WomSingleFile("/bin/file2"), - WomSingleFile("/bin/file3"), - WomSingleFile("/bin/file4"), - WomSingleFile("/bin/file5") - ), WomStringType), + ("""read_int("/etc/file1") + read_string("/bin/file2")""", + Seq(WomSingleFile("/etc/file1"), WomSingleFile("/bin/file2")), + WomStringType + ), + ("""read_int("/etc/file1") + read_string("/bin/file2") + read_string("/bin/file3") + read_string("/bin/file4") + read_string("/bin/file5")""", + Seq( + WomSingleFile("/etc/file1"), + WomSingleFile("/bin/file2"), + WomSingleFile("/bin/file3"), + WomSingleFile("/bin/file4"), + WomSingleFile("/bin/file5") + ), + WomStringType + ), (""" "foo" + "bar" """, Seq(WomSingleFile("foobar")), WomSingleFileType), (""" "foo" + "bar" """, Seq.empty[WomSingleFile], WomStringType), (""" ["a", "b", "c"] """, - Seq(WomSingleFile("a"), WomSingleFile("b"), WomSingleFile("c")), WomArrayType(WomSingleFileType)), + Seq(WomSingleFile("a"), WomSingleFile("b"), WomSingleFile("c")), + WomArrayType(WomSingleFileType) + ), (""" ["a", "b", "c"] """, Seq.empty[WomSingleFile], WomArrayType(WomStringType)), - (""" {"a": "1", "b": "2", "c":"3"} """, Seq( - WomSingleFile("a"), - WomSingleFile("1"), - WomSingleFile("b"), - WomSingleFile("2"), - WomSingleFile("c"), - WomSingleFile("3") - ), WomMapType(WomSingleFileType, WomSingleFileType)), - (""" [read_string("x"), read_string("y")] """, Seq(WomSingleFile("x"), WomSingleFile("y")), WomArrayType(WomStringType)), + (""" {"a": "1", "b": "2", "c":"3"} """, + Seq( + WomSingleFile("a"), + WomSingleFile("1"), + WomSingleFile("b"), + WomSingleFile("2"), + WomSingleFile("c"), + WomSingleFile("3") + ), + WomMapType(WomSingleFileType, WomSingleFileType) + ), + (""" [read_string("x"), read_string("y")] """, + Seq(WomSingleFile("x"), WomSingleFile("y")), + WomArrayType(WomStringType) + ), (""" [fileNameAsStringInput, "${fileNameAsStringInput}.bai"] """, - Seq(WomSingleFile("sommat.bam"), WomSingleFile("sommat.bam.bai")), WomArrayType(WomSingleFileType)), + Seq(WomSingleFile("sommat.bam"), WomSingleFile("sommat.bam.bai")), + WomArrayType(WomSingleFileType) + ), (""" [ fileNameAsStringInput, mapToFileName["Chris"] ] """, - Seq(WomSingleFile("sommat.bam"), WomSingleFile("sommatStupid.bam")), WomArrayType(WomSingleFileType)), + Seq(WomSingleFile("sommat.bam"), WomSingleFile("sommatStupid.bam")), + WomArrayType(WomSingleFileType) + ), (""" {read_int("a"): read_string("x"), 4: read_string("y")} """, - Seq(WomSingleFile("a"), WomSingleFile("x"), WomSingleFile("y")), WomMapType(WomIntegerType, WomStringType)), + Seq(WomSingleFile("a"), WomSingleFile("x"), WomSingleFile("y")), + WomMapType(WomIntegerType, WomStringType) + ), (""" glob("out-*.txt") """, Seq(WomGlobFile("out-*.txt")), WomSingleFileType), (""" glob("out-*.txt")[0] """, Seq(WomGlobFile("out-*.txt")), WomSingleFileType), (""" read_tsv("my_file") """, Seq(WomSingleFile("my_file")), WomSingleFileType), (""" if read_int("i.txt") == 10 then "a.txt" else "b.txt" """, - Seq(WomSingleFile("a.txt"), WomSingleFile("b.txt"), WomSingleFile("i.txt")), WomSingleFileType), + Seq(WomSingleFile("a.txt"), WomSingleFile("b.txt"), WomSingleFile("i.txt")), + WomSingleFileType + ), (""" if "a" == "b" then "a.txt" else "b.txt" """, Seq(WomSingleFile("b.txt")), WomSingleFileType), (""" if b then read_string("t") else "nope" """, Seq(WomSingleFile("t")), WomStringType), (""" read_string(basename(fileInput, ".txt") + ".bam") """, Seq(WomSingleFile("input.bam")), WomStringType), @@ -60,7 +82,6 @@ class FileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche (""" round(size("foo.txt")) """, Seq(WomSingleFile("foo.txt")), WomIntegerType), (""" size("foo.txt", "GB") """, Seq(WomSingleFile("foo.txt")), WomIntegerType), (""" round(size("foo.txt", "GB")) """, Seq(WomSingleFile("foo.txt")), WomIntegerType) - ) val lookupFunction = Map( @@ -70,9 +91,13 @@ class FileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche "mapToFileName" -> WomMap(Map(WomString("Chris") -> WomString("sommatStupid.bam"))) ) - forAll (expressions) { (expression, files, anticipatedType) => + forAll(expressions) { (expression, files, anticipatedType) => it should s"evaluate $expression (coerced to: $anticipatedType) => $files" in { - WdlExpression.fromString(expression).evaluateFiles(lookupFunction, PureStandardLibraryFunctions, anticipatedType).get.toSet should be(files.toSet) + WdlExpression + .fromString(expression) + .evaluateFiles(lookupFunction, PureStandardLibraryFunctions, anticipatedType) + .get + .toSet should be(files.toSet) } } } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/PureStandardLibraryFunctionsSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/PureStandardLibraryFunctionsSpec.scala index 8d0406aa203..a7208b80341 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/PureStandardLibraryFunctionsSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/PureStandardLibraryFunctionsSpec.scala @@ -14,16 +14,22 @@ class PureStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutS behavior of "transpose" it should "transpose a 2x3 into a 3x2" in { - val inArray = WomArray(WomArrayType(WomArrayType(WomIntegerType)), List( - WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2), WomInteger(3))), - WomArray(WomArrayType(WomIntegerType), List(WomInteger(4), WomInteger(5), WomInteger(6))) - )) - - val expectedResult = WomArray(WomArrayType(WomArrayType(WomIntegerType)), List( - WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(4))), - WomArray(WomArrayType(WomIntegerType), List(WomInteger(2), WomInteger(5))), - WomArray(WomArrayType(WomIntegerType), List(WomInteger(3), WomInteger(6))) - )) + val inArray = WomArray( + WomArrayType(WomArrayType(WomIntegerType)), + List( + WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2), WomInteger(3))), + WomArray(WomArrayType(WomIntegerType), List(WomInteger(4), WomInteger(5), WomInteger(6))) + ) + ) + + val expectedResult = WomArray( + WomArrayType(WomArrayType(WomIntegerType)), + List( + WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(4))), + WomArray(WomArrayType(WomIntegerType), List(WomInteger(2), WomInteger(5))), + WomArray(WomArrayType(WomIntegerType), List(WomInteger(3), WomInteger(6))) + ) + ) PureStandardLibraryFunctions.transpose(Seq(Success(inArray))) should be(Success(expectedResult)) } @@ -47,9 +53,10 @@ class PureStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutS val ar3 = WomArray(WomArrayType(WomIntegerType), List.empty) val ar4 = WomArray(WomArrayType(WomIntegerType), List(WomInteger(6))) val aar = WomArray(WomArrayType(WomArrayType(WomIntegerType)), List(ar1, ar2, ar3, ar4)) - val flat_ar = WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2), - WomInteger(3), WomInteger(4), - WomInteger(5), WomInteger(6))) + val flat_ar = + WomArray(WomArrayType(WomIntegerType), + List(WomInteger(1), WomInteger(2), WomInteger(3), WomInteger(4), WomInteger(5), WomInteger(6)) + ) PureStandardLibraryFunctions.flatten(Seq(Success(aar))) should be(Success(flat_ar)) } @@ -57,9 +64,10 @@ class PureStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutS val sar1 = WomArray(WomArrayType(WomStringType), List(WomString("chatting"), WomString("is"))) val sar2 = WomArray(WomArrayType(WomStringType), List(WomString("great"), WomString("for"), WomString("you"))) val saar = WomArray(WomArrayType(WomArrayType(WomStringType)), List(sar1, sar2)) - val flat_sar = WomArray(WomArrayType(WomStringType), List(WomString("chatting"), WomString("is"), - WomString("great"), WomString("for"), - WomString("you"))) + val flat_sar = + WomArray(WomArrayType(WomStringType), + List(WomString("chatting"), WomString("is"), WomString("great"), WomString("for"), WomString("you")) + ) PureStandardLibraryFunctions.flatten(Seq(Success(saar))) should be(Success(flat_sar)) } @@ -72,10 +80,8 @@ class PureStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutS } it should "return errors for arguments which are not arrays" in { - val nonArrays: List[WomValue] = List(WomInteger(17), - WomString("banana"), - WomSingleFile("/tmp/bubbles")) - nonArrays.foreach{ elem => + val nonArrays: List[WomValue] = List(WomInteger(17), WomString("banana"), WomSingleFile("/tmp/bubbles")) + nonArrays.foreach { elem => PureStandardLibraryFunctions.flatten(Seq(Success(elem))) should be(a[Failure[_]]) } } @@ -86,16 +92,23 @@ class PureStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutS val strings = List("foo", "bar", "baz") val stringWdlValues = WomArray(WomArrayType(WomStringType), strings map WomString.apply) - val stringsExpectation = WomArray(WomArrayType(WomStringType), strings map { f => WomString("-f " + f) } ) - PureStandardLibraryFunctions.prefix(Seq(Success(WomString("-f ")), Success(stringWdlValues))) should be(Success(stringsExpectation)) + val stringsExpectation = WomArray(WomArrayType(WomStringType), strings map { f => WomString("-f " + f) }) + PureStandardLibraryFunctions.prefix(Seq(Success(WomString("-f ")), Success(stringWdlValues))) should be( + Success(stringsExpectation) + ) val noStringWdlValues = WomArray(WomArrayType(WomStringType), List.empty) - PureStandardLibraryFunctions.prefix(Seq(Success(WomString("-f ")), Success(noStringWdlValues))) should be(Success(WomArray(WomArrayType(WomStringType), Seq.empty))) + PureStandardLibraryFunctions.prefix(Seq(Success(WomString("-f ")), Success(noStringWdlValues))) should be( + Success(WomArray(WomArrayType(WomStringType), Seq.empty)) + ) val integers = List(1, 2, 3) - val integerWdlValues = WomArray(WomArrayType(WomIntegerType), integers map { i => WomInteger.apply(Integer.valueOf(i)) }) - val integersExpectation = WomArray(WomArrayType(WomStringType), integers map { i => WomString("-f " + i)}) - PureStandardLibraryFunctions.prefix(Seq(Success(WomString("-f ")), Success(integerWdlValues))) should be(Success(integersExpectation)) + val integerWdlValues = + WomArray(WomArrayType(WomIntegerType), integers map { i => WomInteger.apply(Integer.valueOf(i)) }) + val integersExpectation = WomArray(WomArrayType(WomStringType), integers map { i => WomString("-f " + i) }) + PureStandardLibraryFunctions.prefix(Seq(Success(WomString("-f ")), Success(integerWdlValues))) should be( + Success(integersExpectation) + ) } behavior of "basename" @@ -106,11 +119,15 @@ class PureStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutS ("gs://bucket/charlie.bucket", "charlie.bucket", ".wonka", "charlie.bucket") ) foreach { case (full, baseWithExtension, suffixToStrip, suffixStripped) => it should s"get the file name for $full" in { - PureStandardLibraryFunctions.basename(Seq(Success(WomString(full)))) should be(Success(WomString(baseWithExtension))) + PureStandardLibraryFunctions.basename(Seq(Success(WomString(full)))) should be( + Success(WomString(baseWithExtension)) + ) } it should s"get the file name for $full and strip the suffix '$suffixToStrip'" in { - PureStandardLibraryFunctions.basename(Seq(Success(WomString(full)), Success(WomString(suffixToStrip)))) should be(Success(WomString(suffixStripped))) + PureStandardLibraryFunctions.basename(Seq(Success(WomString(full)), Success(WomString(suffixToStrip)))) should be( + Success(WomString(suffixStripped)) + ) } } } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/TypeEvaluatorSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/TypeEvaluatorSpec.scala index 97912908565..e5f22226fd0 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/TypeEvaluatorSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/TypeEvaluatorSpec.scala @@ -19,20 +19,23 @@ class TypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche def noLookup(String: String): WomType = fail("No identifiers should be looked up in this test") - def identifierLookup(String: String): WomType = { + def identifierLookup(String: String): WomType = String match { case "cgrep" => WdlCallOutputsObjectType(namespace.workflow.calls.find(_.unqualifiedName == "cgrep").get) case "ps" => WdlCallOutputsObjectType(namespace.workflow.calls.find(_.unqualifiedName == "ps").get) } - } - def identifierEval(exprStr: String): WomPrimitiveType = expr(exprStr).evaluateType(identifierLookup, new WdlStandardLibraryFunctionsType).asInstanceOf[Try[WomPrimitiveType]].get - def identifierEvalError(exprStr: String): Unit = { - expr(exprStr).evaluateType(identifierLookup, new WdlStandardLibraryFunctionsType).asInstanceOf[Try[WomPrimitive]] match { + def identifierEval(exprStr: String): WomPrimitiveType = expr(exprStr) + .evaluateType(identifierLookup, new WdlStandardLibraryFunctionsType) + .asInstanceOf[Try[WomPrimitiveType]] + .get + def identifierEvalError(exprStr: String): Unit = + expr(exprStr) + .evaluateType(identifierLookup, new WdlStandardLibraryFunctionsType) + .asInstanceOf[Try[WomPrimitive]] match { case Failure(_) => // Expected case Success(badValue) => fail(s"Operation was supposed to fail, instead I got value: $badValue") } - } private def operate(lhs: WomType, op: String, rhs: WomType): Try[WomType] = op match { case "+" => lhs.add(rhs) @@ -384,13 +387,13 @@ class TypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche (WomBooleanType, "&&", WomSingleFileType) ) - forAll (validOperations) { (lhs, op, rhs, expectedType) => + forAll(validOperations) { (lhs, op, rhs, expectedType) => it should s"validate the output type for the expression: $lhs $op $rhs = $expectedType" in { operate(lhs, op, rhs) shouldEqual Success(expectedType) } } - forAll (invalidOperations) { (lhs, op, rhs) => + forAll(invalidOperations) { (lhs, op, rhs) => it should s"not allow the expression: $lhs $op $rhs" in { operate(lhs, op, rhs) should be(a[Failure[_]]) } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala index 42004e1e8a5..0c5a3c93227 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala @@ -26,11 +26,14 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match case "b" => WomInteger(2) case "s" => WomString("s") case "array_str" => WomArray(WomArrayType(WomStringType), Seq("foo", "bar", "baz").map(WomString)) - case "map_str_int" => WomMap(WomMapType(WomStringType, WomIntegerType), Map( - WomString("a") -> WomInteger(0), - WomString("b") -> WomInteger(1), - WomString("c") -> WomInteger(2) - )) + case "map_str_int" => + WomMap(WomMapType(WomStringType, WomIntegerType), + Map( + WomString("a") -> WomInteger(0), + WomString("b") -> WomInteger(1), + WomString("c") -> WomInteger(2) + ) + ) case "o" => WomObject(Map("key1" -> WomString("value1"), "key2" -> WomInteger(9))) case "myPair" => WomPair(WomInteger(3), WomString("hello")) case "etc_f" => WomSingleFile("/etc") @@ -38,14 +41,14 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match case "etc_s" => WomString("/etc") case "sudoers_f" => WomSingleFile("/sudoers") case "sudoers_s" => WomString("/sudoers") - case "f" => WomFloat(0.5F) + case "f" => WomFloat(0.5f) case "t" => WomBoolean(true) case "someIntAsString" => WomOptionalValue(WomString("1")) case "someFloatAsString" => WomOptionalValue(WomString("0.5")) case "someStr" => WomOptionalValue(WomString("someStr")) case "someInt" => WomOptionalValue(WomInteger(1)) case "someBoolean" => WomOptionalValue(WomBoolean(false)) - case "someFloat" => WomOptionalValue(WomFloat(0.5F)) + case "someFloat" => WomOptionalValue(WomFloat(0.5f)) case "someFile" => WomOptionalValue(WomSingleFile("file")) case "noneStr" => WomOptionalValue.none(WomStringType) case "noneInt" => WomOptionalValue.none(WomIntegerType) @@ -54,7 +57,6 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match case "noneFile" => WomOptionalValue.none(WomSingleFileType) } - def identifierTypeLookup(name: String): WomType = identifierLookup(name).womType class TestValueFunctions extends WdlStandardLibraryFunctions { @@ -62,7 +64,9 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match override def readFile(path: String, sizeLimit: Int): String = throw new UnsupportedOperationException() - override def writeFile(path: String, content: String): Try[WomSingleFile] = Failure(new UnsupportedOperationException()) + override def writeFile(path: String, content: String): Try[WomSingleFile] = Failure( + new UnsupportedOperationException() + ) override def stdout(params: Seq[Try[WomValue]]): Try[WomSingleFile] = Failure(new UnsupportedOperationException()) @@ -70,9 +74,13 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match override def read_json(params: Seq[Try[WomValue]]): Try[WomValue] = Failure(new UnsupportedOperationException()) - override def write_tsv(params: Seq[Try[WomValue]]): Try[WomSingleFile] = Failure(new UnsupportedOperationException()) + override def write_tsv(params: Seq[Try[WomValue]]): Try[WomSingleFile] = Failure( + new UnsupportedOperationException() + ) - override def write_json(params: Seq[Try[WomValue]]): Try[WomSingleFile] = Failure(new UnsupportedOperationException()) + override def write_json(params: Seq[Try[WomValue]]): Try[WomSingleFile] = Failure( + new UnsupportedOperationException() + ) override def size(params: Seq[Try[WomValue]]): Try[WomFloat] = Failure(new UnsupportedOperationException()) @@ -101,23 +109,23 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match def constEval(exprStr: String): WomValue = expr(exprStr).evaluate(noLookup, new TestValueFunctions()).get - def constEvalType(exprStr: String): WomType = expr(exprStr).evaluateType(identifierTypeLookup, new TestTypeFunctions).get + def constEvalType(exprStr: String): WomType = + expr(exprStr).evaluateType(identifierTypeLookup, new TestTypeFunctions).get - def constEvalError(exprStr: String): Throwable = { + def constEvalError(exprStr: String): Throwable = expr(exprStr).evaluate(noLookup, new TestValueFunctions()).asInstanceOf[Try[WomPrimitive]] match { case Failure(ex) => ex case Success(v) => fail(s"Operation was supposed to fail, instead I got value: $v") } - } - def identifierEval(exprStr: String): WomPrimitive = expr(exprStr).evaluate(identifierLookup, new TestValueFunctions()).asInstanceOf[Try[WomPrimitive]].get + def identifierEval(exprStr: String): WomPrimitive = + expr(exprStr).evaluate(identifierLookup, new TestValueFunctions()).asInstanceOf[Try[WomPrimitive]].get - def identifierEvalError(exprStr: String): Unit = { + def identifierEvalError(exprStr: String): Unit = expr(exprStr).evaluate(identifierLookup, new TestValueFunctions()).asInstanceOf[Try[WomPrimitive]] match { case Failure(_) => // Expected case Success(v) => fail(s"Operation was supposed to fail, instead I got value: $v") } - } val constantExpressions = Table( ("expression", "value"), @@ -227,128 +235,137 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("(1)", WomInteger(1)), // Array in pair: - ("(\"hello\", [ 1, 2, 3 ])", WomPair(WomString("hello"), WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(2), WomInteger(3))))), + ("(\"hello\", [ 1, 2, 3 ])", + WomPair(WomString("hello"), + WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(2), WomInteger(3))) + ) + ), // Map to pairs: ("""{ - | 1: (1, 2), - | 2: (2, 3) - |} - """.stripMargin, WomMap(WomMapType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), Map( - WomInteger(1) -> WomPair(WomInteger(1), WomInteger(2)), - WomInteger(2) -> WomPair(WomInteger(2), WomInteger(3)) - ))) + | 1: (1, 2), + | 2: (2, 3) + |} + """.stripMargin, + WomMap( + WomMapType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), + Map( + WomInteger(1) -> WomPair(WomInteger(1), WomInteger(2)), + WomInteger(2) -> WomPair(WomInteger(2), WomInteger(3)) + ) + ) + ) ) val badExpressions = Table( - ("expression"), + "expression", // Integers - ("1+true"), - ("1-true"), - (""" 1-"s" """), - ("1*true"), - (""" 1*"s" """), - ("1 / 0"), - ("1 / 0.0"), - ("25/0.0"), - ("1/true"), - (""" 1/"s" """), - ("1%false"), - (""" 1%"s" """), - ("1%0"), - (" 24 == false "), - (""" 1 == "s" """), - (" 24 != false "), - (""" 1 != "s" """), - (" 24 < false "), - (""" 1 < "s" """), - (" 24 <= false "), - (""" 1 <= "s" """), - ("4 > false"), - (""" 1 > "s" """), - ("4 >= false"), - (""" 1 >= "s" """), + "1+true", + "1-true", + """ 1-"s" """, + "1*true", + """ 1*"s" """, + "1 / 0", + "1 / 0.0", + "25/0.0", + "1/true", + """ 1/"s" """, + "1%false", + """ 1%"s" """, + "1%0", + " 24 == false ", + """ 1 == "s" """, + " 24 != false ", + """ 1 != "s" """, + " 24 < false ", + """ 1 < "s" """, + " 24 <= false ", + """ 1 <= "s" """, + "4 > false", + """ 1 > "s" """, + "4 >= false", + """ 1 >= "s" """, // Floats - ("1.0+true"), - ("1.0-true"), - (""" 1.0-"s" """), - ("1.0*true"), - (""" 1.0*"s" """), - ("1.0/true"), - ("1.0/0.0"), - ("1.0/0"), - (""" 1.0/"s" """), - ("10.0 % 0"), - ("10.0 % 0.0"), - ("1.0%false"), - (""" 1.0%"s" """), - ("24.0 == false "), - (""" 1.0 == "s" """), - ("24.0 != false "), - (""" 1.0 != "s" """), - ("24.0 < false "), - (""" 1.0 < "s" """), - ("24.0 <= false "), - (""" 1.0 <= "s" """), - ("4.0 > false"), - (""" 1.0 > "s" """), - ("4.0 >= false"), - (""" 1.0 >= "s" """), + "1.0+true", + "1.0-true", + """ 1.0-"s" """, + "1.0*true", + """ 1.0*"s" """, + "1.0/true", + "1.0/0.0", + "1.0/0", + """ 1.0/"s" """, + "10.0 % 0", + "10.0 % 0.0", + "1.0%false", + """ 1.0%"s" """, + "24.0 == false ", + """ 1.0 == "s" """, + "24.0 != false ", + """ 1.0 != "s" """, + "24.0 < false ", + """ 1.0 < "s" """, + "24.0 <= false ", + """ 1.0 <= "s" """, + "4.0 > false", + """ 1.0 > "s" """, + "4.0 >= false", + """ 1.0 >= "s" """, // Booleans - (""" true + "String" """), - ("true+2"), - ("true+2.3"), - ("false+true"), - ("false-5"), - ("false-6.6"), - ("true-true"), - (""" true-"s" """), - ("false * 7"), - ("false * 7.2"), - ("false*true"), - (""" false*"s" """), - ("false / 4"), - ("false/2.0"), - ("false/true"), - (""" true/"s" """), - ("true % 3"), - ("true % 3.5"), - ("false%false"), - (""" true % "s" """), - ("true == 24 "), - ("true == 24.0 "), - ("""true == "s" """), - ("true != 0 "), - ("true != 0.0 "), - ("""true != "s" """), - ("true < 3"), - ("true < 3.0"), - ("true < 5.0"), - ("""true < "s" """), - ("true <= 4"), - ("true <= 3.0"), - ("""true <= "s" """), - ("true > 3"), - ("true > 3.0"), - ("true >= 4"), - ("true >= 4.0"), - ("""true >= "s" """), - ("false || 4"), - ("false || 4.0"), - ("""false || "s" """), - ("true && 4"), - ("true && 4.0"), - ("""true && "s" """), + """ true + "String" """, + "true+2", + "true+2.3", + "false+true", + "false-5", + "false-6.6", + "true-true", + """ true-"s" """, + "false * 7", + "false * 7.2", + "false*true", + """ false*"s" """, + "false / 4", + "false/2.0", + "false/true", + """ true/"s" """, + "true % 3", + "true % 3.5", + "false%false", + """ true % "s" """, + "true == 24 ", + "true == 24.0 ", + """true == "s" """, + "true != 0 ", + "true != 0.0 ", + """true != "s" """, + "true < 3", + "true < 3.0", + "true < 5.0", + """true < "s" """, + "true <= 4", + "true <= 3.0", + """true <= "s" """, + "true > 3", + "true > 3.0", + "true >= 4", + "true >= 4.0", + """true >= "s" """, + "false || 4", + "false || 4.0", + """false || "s" """, + "true && 4", + "true && 4.0", + """true && "s" """, // Strings - (""" "hello" + true """), - (""" "hello" == true """), - (""" "hello" != true """), - (""" "hello" < true """), - (""" "hello" > true """) + """ "hello" + true """, + """ "hello" == true """, + """ "hello" != true """, + """ "hello" < true """, + """ "hello" > true """ ) val identifierLookupExpressions = Table( @@ -413,7 +430,6 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("s == someStr", WomBoolean(false)), ("s < someStr", WomBoolean(true)), ("s > someStr", WomBoolean(false)), - ("someStr + s", WomString("someStrs")), ("someInt + s", WomString("1s")), ("someFloat + s", WomString("0.5s")), @@ -430,7 +446,6 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("a == someInt", WomBoolean(true)), ("a > someInt", WomBoolean(false)), ("a < someInt", WomBoolean(false)), - ("someIntAsString + a", WomString("11")), ("someInt + a", WomInteger(2)), ("someInt * a", WomInteger(1)), @@ -438,7 +453,6 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("someInt == a", WomBoolean(true)), ("someInt > a", WomBoolean(false)), ("someInt < a", WomBoolean(false)), - ("-someInt", WomInteger(-1)), ("+someInt", WomInteger(1)), @@ -450,7 +464,6 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("f == someFloat", WomBoolean(true)), ("f > someFloat", WomBoolean(false)), ("f < someFloat", WomBoolean(false)), - ("someFloatAsString + f", WomString("0.50.5")), ("someFloat + f", WomFloat(1)), ("someFloat * f", WomFloat(0.25)), @@ -458,7 +471,6 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("someFloat == f", WomBoolean(true)), ("someFloat > f", WomBoolean(false)), ("someFloat < f", WomBoolean(false)), - ("-someFloat", WomFloat(-0.5)), ("+someFloat", WomFloat(0.5)), @@ -468,64 +480,61 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("t < someBoolean", WomBoolean(false)), ("t && someBoolean", WomBoolean(false)), ("t || someBoolean", WomBoolean(true)), - ("someBoolean == t", WomBoolean(false)), ("someBoolean > t", WomBoolean(false)), ("someBoolean < t", WomBoolean(true)), ("someBoolean && t", WomBoolean(false)), ("someBoolean || t", WomBoolean(true)), - ("!someBoolean", WomBoolean(true)), // File ("etc_f + someStr", WomSingleFile("/etcsomeStr")), ("etc_f == someStr", WomBoolean(false)), ("etc_f == someFile", WomBoolean(false)), - ("someFile == etc_f", WomBoolean(false)) ) val badIdentifierExpressions = Table( - ("expression"), - ("etc_f + 1"), - ("etc_f == 1"), - ("0.key3"), - ("array_str[3]"), - ("""map_str_int["d"]""") + "expression", + "etc_f + 1", + "etc_f == 1", + "0.key3", + "array_str[3]", + """map_str_int["d"]""" ) - forAll (constantExpressions) { (expression, value) => + forAll(constantExpressions) { (expression, value) => it should s"evaluate $expression into ${value.valueString} (${value.womType.stableName})" in { constEval(expression) shouldEqual value } } - forAll (constantExpressions) { (expression, value) => + forAll(constantExpressions) { (expression, value) => it should s"evaluate $expression into type ${value.womType.stableName}" in { constEvalType(expression) shouldEqual value.womType } } - forAll (identifierLookupExpressions) { (expression, value) => + forAll(identifierLookupExpressions) { (expression, value) => it should s"evaluate $expression into ${value.valueString} (${value.womType.stableName})" in { identifierEval(expression) shouldEqual value } } - forAll (identifierLookupExpressions) { (expression, value) => + forAll(identifierLookupExpressions) { (expression, value) => it should s"evaluate $expression into type ${value.womType.stableName}" in { // need to skip the object expressions because we don't know the types of sub-objects if (!expression.startsWith("o.key")) constEvalType(expression) shouldEqual value.womType } } - forAll (badExpressions) { (expression) => + forAll(badExpressions) { expression => it should s"error when evaluating: $expression" in { constEvalError(expression) } } - forAll (badIdentifierExpressions) { (expression) => + forAll(badIdentifierExpressions) { expression => it should s"error when evaluating: $expression" in { identifierEvalError(expression) } @@ -534,7 +543,9 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match "A string with special characters in it" should "convert to escape sequences when converted to WDL" in { WomString("a\nb").toWomString shouldEqual "\"a\\nb\"" WomString("a\nb\t").toWomString shouldEqual "\"a\\nb\\t\"" - WomString("be \u266f or be \u266e, just don't be \u266d").toWomString shouldEqual "\"be \\u266F or be \\u266E, just don't be \\u266D\"" + WomString( + "be \u266f or be \u266e, just don't be \u266d" + ).toWomString shouldEqual "\"be \\u266F or be \\u266E, just don't be \\u266D\"" } "Optional values" should "fail to perform addition with the + operator if the argument is None" in { @@ -552,15 +563,21 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } "Ternary if blocks" should "fail to evaluate if the condition is not a boolean" in { - constEvalError(""" if 5 + 6 then 6 + 7 else 14 * 15 """).getMessage should be("'if' expression must be given a boolean argument but got: 11") + constEvalError(""" if 5 + 6 then 6 + 7 else 14 * 15 """).getMessage should be( + "'if' expression must be given a boolean argument but got: 11" + ) } "Ternary if blocks" should "fail to evaluate if the chosen LHS expression fails to evaluate" in { - constEvalError(""" if (5 == 4 + 1) then fail() else 13 """).getClass.getSimpleName should be("NoSuchMethodException") + constEvalError(""" if (5 == 4 + 1) then fail() else 13 """).getClass.getSimpleName should be( + "NoSuchMethodException" + ) } "Ternary if blocks" should "fail to evaluate if the chosen RHS expression fails to evaluate" in { - constEvalError(""" if (5 == 6 + 1) then 13 else fail() """).getClass.getSimpleName should be("NoSuchMethodException") + constEvalError(""" if (5 == 6 + 1) then 13 else fail() """).getClass.getSimpleName should be( + "NoSuchMethodException" + ) } "WdlMaps" should "be coerced to their lowest common WomType" in { @@ -583,10 +600,13 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ) val exp = model.WdlExpression.fromString(str) - val expectedMap: WomMap = WomMap(WomMapType(WomStringType, WomOptionalType(WomStringType)), Map ( - WomString("i") -> WomOptionalValue(WomStringType, Some(WomString("1"))), - WomString("s") -> WomOptionalValue(WomStringType, Some(WomString("two"))) - )) + val expectedMap: WomMap = WomMap( + WomMapType(WomStringType, WomOptionalType(WomStringType)), + Map( + WomString("i") -> WomOptionalValue(WomStringType, Some(WomString("1"))), + WomString("s") -> WomOptionalValue(WomStringType, Some(WomString("two"))) + ) + ) val evaluated = exp.evaluate(lookup, NoFunctions) diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/WdlStandardLibraryFunctionsSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/WdlStandardLibraryFunctionsSpec.scala index c314a8ad80b..47d6cdfba5f 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/WdlStandardLibraryFunctionsSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/WdlStandardLibraryFunctionsSpec.scala @@ -11,7 +11,6 @@ import wom.values._ import scala.util.{Success, Try} - class WdlStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { import TableDrivenPropertyChecks._ @@ -26,7 +25,6 @@ class WdlStandardLibraryFunctionsSpec extends AnyFlatSpec with CromwellTimeoutSp ) selectionTable foreach { case (input, select_first_ifAppropriate, select_all) => - val functionInput = Seq(Success(mkWdlArray(input))) if (select_first_ifAppropriate.isDefined) { val select_first = select_first_ifAppropriate.get diff --git a/wdl/model/draft2/src/test/scala/wdl/types/WdlArrayTypeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/types/WdlArrayTypeSpec.scala index 4d3dd0469a4..0de95db815b 100644 --- a/wdl/model/draft2/src/test/scala/wdl/types/WdlArrayTypeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/types/WdlArrayTypeSpec.scala @@ -8,7 +8,7 @@ import wdl.draft2.parser.WdlParser.SyntaxError import wom.types.{WomArrayType, WomIntegerType} import wom.values.{WomArray, WomInteger} -class WdlArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { +class WdlArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val intArray = WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(2), WomInteger(3))) it should "convert WDL source code to WdlArray" in { diff --git a/wdl/model/draft2/src/test/scala/wdl/types/WdlMapTypeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/types/WdlMapTypeSpec.scala index 8af3f0409bd..a05facb6da6 100644 --- a/wdl/model/draft2/src/test/scala/wdl/types/WdlMapTypeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/types/WdlMapTypeSpec.scala @@ -1,6 +1,5 @@ package wdl.types - import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -9,15 +8,19 @@ import wdl.draft2.parser.WdlParser.SyntaxError import wom.types.{WomIntegerType, WomMapType, WomStringType} import wom.values.{WomInteger, WomMap, WomString} -class WdlMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val stringIntMap = WomMap(WomMapType(WomStringType, WomIntegerType), Map( - WomString("a") -> WomInteger(1), - WomString("b") -> WomInteger(2), - WomString("c") -> WomInteger(3) - )) - +class WdlMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { + val stringIntMap = WomMap(WomMapType(WomStringType, WomIntegerType), + Map( + WomString("a") -> WomInteger(1), + WomString("b") -> WomInteger(2), + WomString("c") -> WomInteger(3) + ) + ) + it should "convert WDL source code to WdlMap" in { - WomMapType(WomStringType, WomIntegerType).fromWorkflowSource("{\"a\": 1, \"b\": 2, \"c\": 3}") shouldEqual stringIntMap + WomMapType(WomStringType, WomIntegerType).fromWorkflowSource( + "{\"a\": 1, \"b\": 2, \"c\": 3}" + ) shouldEqual stringIntMap } it should "NOT successfully convert WDL source code to WdlMap if passed a bogus AST" in { try { @@ -52,4 +55,3 @@ class WdlMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } } } - diff --git a/wdl/model/draft2/src/test/scala/wdl/types/WdlObjectTypeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/types/WdlObjectTypeSpec.scala index 4b86b5b5f7c..1292d359223 100644 --- a/wdl/model/draft2/src/test/scala/wdl/types/WdlObjectTypeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/types/WdlObjectTypeSpec.scala @@ -9,22 +9,25 @@ import wom.types.{WomMapType, WomObjectType, WomStringType} import wom.values.{WomMap, WomObject, WomString} class WdlObjectTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val abcObject = WomObject(Map( - "a" -> WomString("one"), - "b" -> WomString("two"), - "c" -> WomString("three") - )) + val abcObject = WomObject( + Map( + "a" -> WomString("one"), + "b" -> WomString("two"), + "c" -> WomString("three") + ) + ) - val coerceableMap = WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("a") -> WomString("one"), - WomString("b") -> WomString("two"), - WomString("c") -> WomString("three")) + val coerceableMap = WomMap( + WomMapType(WomStringType, WomStringType), + Map(WomString("a") -> WomString("one"), WomString("b") -> WomString("two"), WomString("c") -> WomString("three")) ) - val nonCoerceableMap = WomMap(WomMapType(WomStringType, WomObjectType), Map( - WomString("a") -> WomObject(Map.empty), - WomString("b") -> WomObject(Map.empty), - WomString("c") -> WomObject(Map.empty)) + val nonCoerceableMap = WomMap( + WomMapType(WomStringType, WomObjectType), + Map(WomString("a") -> WomObject(Map.empty), + WomString("b") -> WomObject(Map.empty), + WomString("c") -> WomObject(Map.empty) + ) ) it should "convert WDL source code to WdlMap" in { diff --git a/wdl/model/draft2/src/test/scala/wdl/types/WdlPairTypeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/types/WdlPairTypeSpec.scala index 0ea14e14819..49e2208455d 100644 --- a/wdl/model/draft2/src/test/scala/wdl/types/WdlPairTypeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/types/WdlPairTypeSpec.scala @@ -17,26 +17,35 @@ class WdlPairTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val simplePair = WomPair(WomString("a"), WomInteger(1)) - val stringIntMap = WomMap(WomMapType(WomStringType, WomIntegerType), Map( - WomString("a") -> WomInteger(1), - WomString("b") -> WomInteger(2), - WomString("c") -> WomInteger(3) - )) - - val arrayOfPairs = WomArray(WomArrayType(WomPairType(WomStringType, WomIntegerType)), Seq( - WomPair(WomString("a"),WomInteger(1)), - WomPair(WomString("b"),WomInteger(2)), - WomPair(WomString("c"),WomInteger(3)) - )) - - val arrayOfPairsOfArrays = WomArray(WomArrayType(WomPairType(WomArrayType(WomStringType), WomArrayType(WomIntegerType))), + val stringIntMap = WomMap(WomMapType(WomStringType, WomIntegerType), + Map( + WomString("a") -> WomInteger(1), + WomString("b") -> WomInteger(2), + WomString("c") -> WomInteger(3) + ) + ) + + val arrayOfPairs = WomArray( + WomArrayType(WomPairType(WomStringType, WomIntegerType)), + Seq( + WomPair(WomString("a"), WomInteger(1)), + WomPair(WomString("b"), WomInteger(2)), + WomPair(WomString("c"), WomInteger(3)) + ) + ) + + val arrayOfPairsOfArrays = WomArray( + WomArrayType(WomPairType(WomArrayType(WomStringType), WomArrayType(WomIntegerType))), Seq( - WomPair(WomArray(WomArrayType(WomStringType), Seq(WomString("a"), WomString("b"))), - WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(11)))), - WomPair(WomArray(WomArrayType(WomStringType), Seq(WomString("c"), WomString("d"))), - WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(2), WomInteger(21)))), - WomPair(WomArray(WomArrayType(WomStringType), Seq(WomString("e"), WomString("f"))), - WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(3), WomInteger(31)))) + WomPair(WomArray(WomArrayType(WomStringType), Seq(WomString("a"), WomString("b"))), + WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(11))) + ), + WomPair(WomArray(WomArrayType(WomStringType), Seq(WomString("c"), WomString("d"))), + WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(2), WomInteger(21))) + ), + WomPair(WomArray(WomArrayType(WomStringType), Seq(WomString("e"), WomString("f"))), + WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(3), WomInteger(31))) + ) ) ) @@ -47,31 +56,41 @@ class WdlPairTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers WomPairType(WomStringType, WomStringType), Some(WomPair(WomString("1"), WomString("2"))) ), - ( - WomPair(WomMap(WomMapType(WomIntegerType, WomStringType), Map( - WomInteger(1) -> WomString("100"), - WomInteger(2) -> WomString("200") - )), WomString("300")), + WomPair(WomMap(WomMapType(WomIntegerType, WomStringType), + Map( + WomInteger(1) -> WomString("100"), + WomInteger(2) -> WomString("200") + ) + ), + WomString("300") + ), WomPairType(WomMapType(WomStringType, WomIntegerType), WomIntegerType), - Some(WomPair(WomMap(WomMapType(WomStringType, WomIntegerType), Map( - WomString("1") -> WomInteger(100), - WomString("2") -> WomInteger(200) - )), WomInteger(300))) + Some( + WomPair(WomMap(WomMapType(WomStringType, WomIntegerType), + Map( + WomString("1") -> WomInteger(100), + WomString("2") -> WomInteger(200) + ) + ), + WomInteger(300) + ) + ) ), - - ( - WomPair(WomMap(WomMapType(WomIntegerType, WomStringType), Map( - WomInteger(1) -> WomString("100"), - WomInteger(2) -> WomString("200") - )), WomString("300")), - WomPairType(WomArrayType(WomStringType), WomStringType), - None) - + (WomPair(WomMap(WomMapType(WomIntegerType, WomStringType), + Map( + WomInteger(1) -> WomString("100"), + WomInteger(2) -> WomString("200") + ) + ), + WomString("300") + ), + WomPairType(WomArrayType(WomStringType), WomStringType), + None + ) ) coerceables foreach { case (fromValue, toType, coercedValue) => - val notString = coercedValue map { _ => "" } getOrElse "not " val coercionDefined = coercedValue.isDefined @@ -85,7 +104,8 @@ class WdlPairTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers case (Success(actualValue), Some(expectedValue)) => actualValue should be(expectedValue) case (Success(actualValue), None) => fail(s"Coercion should have failed but instead got $actualValue") case (Failure(_), None) => // Correctly failed to coerce - case (Failure(t), Some(expectedValue)) => fail(s"Expected coercion to produce $expectedValue but instead got exception $t") + case (Failure(t), Some(expectedValue)) => + fail(s"Expected coercion to produce $expectedValue but instead got exception $t") } } } @@ -153,7 +173,8 @@ class WdlPairTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers |] """.stripMargin.parseJson - WomArrayType(WomPairType(WomArrayType(WomStringType), WomArrayType(WomIntegerType))).coerceRawValue(complexJsArray) match { + WomArrayType(WomPairType(WomArrayType(WomStringType), WomArrayType(WomIntegerType))) + .coerceRawValue(complexJsArray) match { case Success(array) => array shouldEqual arrayOfPairsOfArrays case Failure(f) => fail(s"exception while coercing JsObject to WdlPair: $f") } @@ -196,7 +217,9 @@ class WdlPairTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val results = WomPairType(WomStringType, WomIntegerType).coerceRawValue(invalidPair) results match { case Failure(ex) => - ex.getMessage should (startWith("Failed to coerce") and endWith("requires for Right/Left value(s) to be defined.)")) + ex.getMessage should (startWith("Failed to coerce") and endWith( + "requires for Right/Left value(s) to be defined.)" + )) case Success(_) => fail("Unexpected successful coercion to WdlPair") } } diff --git a/wdl/model/draft2/src/test/scala/wdl/types/WomArrayTypeSpec.scala b/wdl/model/draft2/src/test/scala/wdl/types/WomArrayTypeSpec.scala index 41c54345b7f..69f58ef6407 100644 --- a/wdl/model/draft2/src/test/scala/wdl/types/WomArrayTypeSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/types/WomArrayTypeSpec.scala @@ -10,22 +10,28 @@ import wom.values.{WomArray, WomValue} import scala.util.{Failure, Success} - -class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { +class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WomArrayType" - List(WomStringType, WomArrayType(WomIntegerType), WomPairType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), WomOptionalType(WomStringType)) foreach { desiredMemberType => + List(WomStringType, + WomArrayType(WomIntegerType), + WomPairType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), + WomOptionalType(WomStringType) + ) foreach { desiredMemberType => it should s"be able to construct an empty Array[${desiredMemberType.stableName}] value" in { val desiredArrayType = WomArrayType(desiredMemberType) WdlExpression.fromString("[]").evaluate(noLookup, NoFunctions) match { - case Success(emptyArray @ WomArray(actualArrayType @ WomMaybeEmptyArrayType(actualMemberType), actualArrayValue)) => + case Success( + emptyArray @ WomArray(actualArrayType @ WomMaybeEmptyArrayType(actualMemberType), actualArrayValue) + ) => actualMemberType should be(WomNothingType) actualArrayValue should be(Seq.empty) desiredArrayType.isCoerceableFrom(actualArrayType) should be(true) desiredArrayType.coerceRawValue(emptyArray) should be(Success(WomArray(desiredArrayType, Seq.empty))) - case Success(WomArray(WomNonEmptyArrayType(_), _)) => fail("Empty arrays should not be created with an Array[_]+ type") + case Success(WomArray(WomNonEmptyArrayType(_), _)) => + fail("Empty arrays should not be created with an Array[_]+ type") case Success(other) => fail(s"Array literal somehow got evaluated as a ${other.womType} type?!?") case Failure(f) => fail("Unable to create an empty array.", f) } @@ -44,16 +50,24 @@ class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val nonEmptyLiteral = "[1, 2, 3]" - List(WomNonEmptyArrayType(WomIntegerType), WomOptionalType(WomNonEmptyArrayType(WomIntegerType))) foreach { targetType => - it should s"be able to coerce an array literal into ${targetType.stableName}" in { + List(WomNonEmptyArrayType(WomIntegerType), WomOptionalType(WomNonEmptyArrayType(WomIntegerType))) foreach { + targetType => + it should s"be able to coerce an array literal into ${targetType.stableName}" in { - val evaluatedLiteral: WomValue = WdlExpression.fromString(nonEmptyLiteral).evaluate(noLookup, NoFunctions).getOrElse(fail(s"Unable to evaluate non-empty literal $nonEmptyLiteral")) + val evaluatedLiteral: WomValue = WdlExpression + .fromString(nonEmptyLiteral) + .evaluate(noLookup, NoFunctions) + .getOrElse(fail(s"Unable to evaluate non-empty literal $nonEmptyLiteral")) - targetType.coerceRawValue(evaluatedLiteral) match { - case Success(womValue) => womValue.womType should be(targetType) - case Failure(e) => fail(s"Unable to coerce $evaluatedLiteral (${evaluatedLiteral.womType.stableName}) into ${targetType.stableName}", e) + targetType.coerceRawValue(evaluatedLiteral) match { + case Success(womValue) => womValue.womType should be(targetType) + case Failure(e) => + fail( + s"Unable to coerce $evaluatedLiteral (${evaluatedLiteral.womType.stableName}) into ${targetType.stableName}", + e + ) + } } - } } def noLookup(String: String): WomValue = fail("No identifiers should be looked up in this test") diff --git a/wdl/model/draft2/src/test/scala/wdl/util/StringUtilSpec.scala b/wdl/model/draft2/src/test/scala/wdl/util/StringUtilSpec.scala index 42af69a0405..7c466aca30e 100644 --- a/wdl/model/draft2/src/test/scala/wdl/util/StringUtilSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/util/StringUtilSpec.scala @@ -5,7 +5,6 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks - class StringUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { behavior of "StringUtilSpec" diff --git a/wdl/model/draft2/src/test/scala/wdl/values/WdlValueSpec.scala b/wdl/model/draft2/src/test/scala/wdl/values/WdlValueSpec.scala index 225c7b6b98f..da7d53576a8 100644 --- a/wdl/model/draft2/src/test/scala/wdl/values/WdlValueSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/values/WdlValueSpec.scala @@ -59,7 +59,8 @@ class WdlValueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { (WdlExpression.fromString("a + b"), "a + b"), (WdlExpression.fromString("a(b, c)"), "a(b, c)"), (WdlExpression.fromString("\"a\" + \"b\""), "\"a\" + \"b\""), - (WdlExpression.fromString("a.b.c"), "a.b.c")) + (WdlExpression.fromString("a.b.c"), "a.b.c") + ) forAll(wdlExpressionRawStrings) { (womValue, rawString) => it should s"resemble a ${womValue.typeName} to/from raw string '$rawString'" in { @@ -98,41 +99,64 @@ class WdlValueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { WomObject(Map("0" -> WomString("zero"))) ), ( - WomObject(Map( - "0" -> WomString("zero"), "1" -> WomString("one"), "2" -> WomString("two"), "3" -> WomString("three") - )), - WomObject(Map( - "0" -> WomString("zero"), "1" -> WomString("one"), "2" -> WomString("two") - )) + WomObject( + Map( + "0" -> WomString("zero"), + "1" -> WomString("one"), + "2" -> WomString("two"), + "3" -> WomString("three") + ) + ), + WomObject( + Map( + "0" -> WomString("zero"), + "1" -> WomString("one"), + "2" -> WomString("two") + ) + ) ), ( WdlCallOutputsObject(testCall, Map("0" -> WomString("zero"))), WdlCallOutputsObject(testCall, Map("0" -> WomString("zero"))) ), ( - WdlCallOutputsObject(testCall, Map( - "0" -> WomString("zero"), "1" -> WomString("one"), "2" -> WomString("two"), "3" -> WomString("three") - )), - WdlCallOutputsObject(testCall, Map( - "0" -> WomString("zero"), "1" -> WomString("one"), "2" -> WomString("two") - )) + WdlCallOutputsObject(testCall, + Map( + "0" -> WomString("zero"), + "1" -> WomString("one"), + "2" -> WomString("two"), + "3" -> WomString("three") + ) + ), + WdlCallOutputsObject(testCall, + Map( + "0" -> WomString("zero"), + "1" -> WomString("one"), + "2" -> WomString("two") + ) + ) ), ( WomMap(WomMapType(WomStringType, WomStringType), Map(WomString("0") -> WomString("zero"))), WomMap(WomMapType(WomStringType, WomStringType), Map(WomString("0") -> WomString("zero"))) ), ( - WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("0") -> WomString("zero"), - WomString("1") -> WomString("one"), - WomString("2") -> WomString("two"), - WomString("3") -> WomString("three") - )), - WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("0") -> WomString("zero"), - WomString("1") -> WomString("one"), - WomString("2") -> WomString("two") - )) + WomMap( + WomMapType(WomStringType, WomStringType), + Map( + WomString("0") -> WomString("zero"), + WomString("1") -> WomString("one"), + WomString("2") -> WomString("two"), + WomString("3") -> WomString("three") + ) + ), + WomMap(WomMapType(WomStringType, WomStringType), + Map( + WomString("0") -> WomString("zero"), + WomString("1") -> WomString("one"), + WomString("2") -> WomString("two") + ) + ) ), ( WomArray(WomArrayType(WomStringType), Seq(WomString("0"))), @@ -146,18 +170,38 @@ class WdlValueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { WomArray( WomArrayType(WomArrayType(WomStringType)), Seq( - WomArray(WomArrayType(WomStringType), Seq( - WomString("a0"), WomString("a1"), WomString("a2"), WomString("a3") - )), - WomArray(WomArrayType(WomStringType), Seq( - WomString("b0"), WomString("b1"), WomString("b2"), WomString("b3") - )), - WomArray(WomArrayType(WomStringType), Seq( - WomString("c0"), WomString("c1"), WomString("c2"), WomString("c3") - )), - WomArray(WomArrayType(WomStringType), Seq( - WomString("d0"), WomString("d1"), WomString("d2"), WomString("d3") - )) + WomArray(WomArrayType(WomStringType), + Seq( + WomString("a0"), + WomString("a1"), + WomString("a2"), + WomString("a3") + ) + ), + WomArray(WomArrayType(WomStringType), + Seq( + WomString("b0"), + WomString("b1"), + WomString("b2"), + WomString("b3") + ) + ), + WomArray(WomArrayType(WomStringType), + Seq( + WomString("c0"), + WomString("c1"), + WomString("c2"), + WomString("c3") + ) + ), + WomArray(WomArrayType(WomStringType), + Seq( + WomString("d0"), + WomString("d1"), + WomString("d2"), + WomString("d3") + ) + ) ) ), WomArray( @@ -171,13 +215,12 @@ class WdlValueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { ) ) - private def describe(womValue: WomValue): String = { + private def describe(womValue: WomValue): String = womValue match { case WdlCallOutputsObject(call, outputs) => s"WdlCallOutputsObject(${call.unqualifiedName}, ${outputs.safeMapValues(_.toWomString)})" case _ => womValue.toWomString } - } forAll(wdlValueMaxedElements) { (womValue, expected) => it should s"take max elements for ${describe(womValue)}" in { diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CallElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CallElement.scala index 35b7f8be3cc..1d4a3e428ca 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CallElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CallElement.scala @@ -5,5 +5,6 @@ final case class CallElement(callableReference: String, alias: Option[String], afters: Vector[String], body: Option[CallBodyElement], - override val sourceLocation : Option[SourceFileLocation]) - extends LanguageElement with WorkflowGraphElement + override val sourceLocation: Option[SourceFileLocation] +) extends LanguageElement + with WorkflowGraphElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CommandPartElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CommandPartElement.scala index cb8049fb4a8..2d89d6a20ed 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CommandPartElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/CommandPartElement.scala @@ -4,5 +4,7 @@ sealed trait CommandPartElement extends TaskSectionElement object CommandPartElement { final case class StringCommandPartElement(value: String) extends CommandPartElement - final case class PlaceholderCommandPartElement(expressionElement: ExpressionElement, attributes: PlaceholderAttributeSet) extends CommandPartElement + final case class PlaceholderCommandPartElement(expressionElement: ExpressionElement, + attributes: PlaceholderAttributeSet + ) extends CommandPartElement } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/DeclarationElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/DeclarationElement.scala index 1a1bf71e8dd..ef85d3973bf 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/DeclarationElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/DeclarationElement.scala @@ -8,25 +8,35 @@ final case class DeclarationContent(typeElement: TypeElement, name: String, expr /** * A Declaration outside of an input or output block */ -final case class IntermediateValueDeclarationElement(typeElement: TypeElement, name: String, expression: ExpressionElement) extends WorkflowGraphElement with TaskSectionElement +final case class IntermediateValueDeclarationElement(typeElement: TypeElement, + name: String, + expression: ExpressionElement +) extends WorkflowGraphElement + with TaskSectionElement object IntermediateValueDeclarationElement { - def fromContent(content: DeclarationContent): IntermediateValueDeclarationElement = IntermediateValueDeclarationElement(content.typeElement, content.name, content.expression) + def fromContent(content: DeclarationContent): IntermediateValueDeclarationElement = + IntermediateValueDeclarationElement(content.typeElement, content.name, content.expression) } /** * A declaration in an output block */ -final case class OutputDeclarationElement(typeElement: TypeElement, name: String, expression: ExpressionElement) extends LanguageElement with WorkflowGraphElement +final case class OutputDeclarationElement(typeElement: TypeElement, name: String, expression: ExpressionElement) + extends LanguageElement + with WorkflowGraphElement object OutputDeclarationElement { - def fromContent(content: DeclarationContent): OutputDeclarationElement = OutputDeclarationElement(content.typeElement, content.name, content.expression) + def fromContent(content: DeclarationContent): OutputDeclarationElement = + OutputDeclarationElement(content.typeElement, content.name, content.expression) } /** * A declaration in an input block */ -final case class InputDeclarationElement(typeElement: TypeElement, name: String, expression: Option[ExpressionElement]) extends LanguageElement with WorkflowGraphElement +final case class InputDeclarationElement(typeElement: TypeElement, name: String, expression: Option[ExpressionElement]) + extends LanguageElement + with WorkflowGraphElement object DeclarationElement { /* Custom unapply so that elsewhere we can do things like this, and otherwise treat all declarations the same: @@ -34,10 +44,11 @@ object DeclarationElement { case DeclarationElement(typeElement, name, Some(expr)) => ... } */ - def unapply(languageElement: LanguageElement): Option[(TypeElement, String, Option[ExpressionElement])] = languageElement match { - case IntermediateValueDeclarationElement(typeElement, name, expr) => Option((typeElement, name, Option(expr))) - case OutputDeclarationElement(typeElement, name, expr) => Option((typeElement, name, Option(expr))) - case InputDeclarationElement(typeElement, name, expr) => Option((typeElement, name, expr)) - case _ => None - } + def unapply(languageElement: LanguageElement): Option[(TypeElement, String, Option[ExpressionElement])] = + languageElement match { + case IntermediateValueDeclarationElement(typeElement, name, expr) => Option((typeElement, name, Option(expr))) + case OutputDeclarationElement(typeElement, name, expr) => Option((typeElement, name, Option(expr))) + case InputDeclarationElement(typeElement, name, expr) => Option((typeElement, name, expr)) + case _ => None + } } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ExpressionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ExpressionElement.scala index ced84d77d75..a2fc6202193 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ExpressionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ExpressionElement.scala @@ -31,7 +31,6 @@ object ExpressionElement { override val unescape: String = codePoint.toChar.toString } - final case class KvPair(key: String, value: ExpressionElement) final case class ObjectLiteral(elements: Map[String, ExpressionElement]) extends ExpressionElement final case class ArrayLiteral(elements: Seq[ExpressionElement]) extends ExpressionElement @@ -42,6 +41,7 @@ object ExpressionElement { * Represents a unary operation (i.e. a operator symbol followed by a single argument expression) */ sealed trait UnaryOperation extends ExpressionElement { + /** * The expression which follows the unary operator. The argument to the operation. */ @@ -56,6 +56,7 @@ object ExpressionElement { * A two-argument expression. Almost certainly comes from an infix operation in WDL (eg the '+' in '7 + read_int(x)') */ sealed trait BinaryOperation extends ExpressionElement { + /** * The left-hand-side of the operation ('7' in the example above). */ @@ -67,21 +68,35 @@ object ExpressionElement { def right: ExpressionElement } - final case class LogicalOr(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class LogicalAnd(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class Equals(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class NotEquals(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class LessThan(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class LessThanOrEquals(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class GreaterThan(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class GreaterThanOrEquals(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class Add(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class Subtract(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class Multiply(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class Divide(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - final case class Remainder(override val left: ExpressionElement, override val right: ExpressionElement) extends BinaryOperation - - final case class TernaryIf(condition: ExpressionElement, ifTrue: ExpressionElement, ifFalse: ExpressionElement) extends ExpressionElement + final case class LogicalOr(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class LogicalAnd(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class Equals(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class NotEquals(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class LessThan(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class LessThanOrEquals(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class GreaterThan(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class GreaterThanOrEquals(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class Add(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class Subtract(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class Multiply(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class Divide(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + final case class Remainder(override val left: ExpressionElement, override val right: ExpressionElement) + extends BinaryOperation + + final case class TernaryIf(condition: ExpressionElement, ifTrue: ExpressionElement, ifFalse: ExpressionElement) + extends ExpressionElement sealed trait FunctionCallElement extends ExpressionElement // 0-param functions @@ -127,11 +142,13 @@ object ExpressionElement { def firstParam: ExpressionElement def secondParam: Option[ExpressionElement] } - final case class Size(file: ExpressionElement, unit: Option[ExpressionElement]) extends OneOrTwoParamFunctionCallElement { + final case class Size(file: ExpressionElement, unit: Option[ExpressionElement]) + extends OneOrTwoParamFunctionCallElement { override def firstParam: ExpressionElement = file override def secondParam: Option[ExpressionElement] = unit } - final case class Basename(param: ExpressionElement, suffixToRemove: Option[ExpressionElement]) extends OneOrTwoParamFunctionCallElement { + final case class Basename(param: ExpressionElement, suffixToRemove: Option[ExpressionElement]) + extends OneOrTwoParamFunctionCallElement { override def firstParam: ExpressionElement = param override def secondParam: Option[ExpressionElement] = suffixToRemove } @@ -157,7 +174,8 @@ object ExpressionElement { def arg2: ExpressionElement def arg3: ExpressionElement } - final case class Sub(input: ExpressionElement, pattern: ExpressionElement, replace: ExpressionElement) extends ThreeParamFunctionCallElement { + final case class Sub(input: ExpressionElement, pattern: ExpressionElement, replace: ExpressionElement) + extends ThreeParamFunctionCallElement { override def arg1: ExpressionElement = input override def arg2: ExpressionElement = pattern override def arg3: ExpressionElement = replace @@ -203,7 +221,8 @@ object ExpressionElement { * - But, the second element might be part of the identifier to look up (eg my_task.pair_of_pairs) OR it might * be part of a member access chain (eg pair_of_pairs.left.right). We won't know until we do the linking. */ - final case class IdentifierMemberAccess(first: String, second: String, memberAccessTail: Seq[String]) extends ExpressionElement + final case class IdentifierMemberAccess(first: String, second: String, memberAccessTail: Seq[String]) + extends ExpressionElement /** * A member access which is based on an expression rather than an identifier. @@ -211,7 +230,8 @@ object ExpressionElement { * eg: * (1, 2).left */ - final case class ExpressionMemberAccess(expression: ExpressionElement, memberAccessTail: NonEmptyList[String]) extends ExpressionElement + final case class ExpressionMemberAccess(expression: ExpressionElement, memberAccessTail: NonEmptyList[String]) + extends ExpressionElement /** * diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/FileElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/FileElement.scala index 65f11aded9d..ecf45a1a193 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/FileElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/FileElement.scala @@ -1,6 +1,7 @@ package wdl.model.draft3.elements final case class FileElement(imports: Seq[ImportElement], - structs: Seq[StructElement], - workflows: Seq[WorkflowDefinitionElement], - tasks: Seq[TaskDefinitionElement]) extends LanguageElement + structs: Seq[StructElement], + workflows: Seq[WorkflowDefinitionElement], + tasks: Seq[TaskDefinitionElement] +) extends LanguageElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/IfElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/IfElement.scala index 36620018803..24d851abead 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/IfElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/IfElement.scala @@ -1,4 +1,4 @@ package wdl.model.draft3.elements -final case class IfElement(conditionExpression: ExpressionElement, - graphElements: Seq[WorkflowGraphElement]) extends WorkflowGraphElement +final case class IfElement(conditionExpression: ExpressionElement, graphElements: Seq[WorkflowGraphElement]) + extends WorkflowGraphElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ImportElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ImportElement.scala index 835e81d6a92..92bf4a6289b 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ImportElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ImportElement.scala @@ -1,5 +1,4 @@ package wdl.model.draft3.elements -final case class ImportElement(importUrl: String, - namespace: Option[String], - structRenames: Map[String, String]) extends LanguageElement +final case class ImportElement(importUrl: String, namespace: Option[String], structRenames: Map[String, String]) + extends LanguageElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/InputsSectionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/InputsSectionElement.scala index 4b060b39b4d..58b537d8446 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/InputsSectionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/InputsSectionElement.scala @@ -1,3 +1,5 @@ package wdl.model.draft3.elements -final case class InputsSectionElement(inputDeclarations: Seq[InputDeclarationElement]) extends WorkflowBodyElement with TaskSectionElement +final case class InputsSectionElement(inputDeclarations: Seq[InputDeclarationElement]) + extends WorkflowBodyElement + with TaskSectionElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/LanguageElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/LanguageElement.scala index 8839cf8c2d3..ea29be54dcd 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/LanguageElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/LanguageElement.scala @@ -2,5 +2,5 @@ package wdl.model.draft3.elements import wom.SourceFileLocation trait LanguageElement { - val sourceLocation : Option[SourceFileLocation] = None + val sourceLocation: Option[SourceFileLocation] = None } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/MetaSectionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/MetaSectionElement.scala index 0f318f10309..74020102db6 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/MetaSectionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/MetaSectionElement.scala @@ -2,4 +2,6 @@ package wdl.model.draft3.elements import wom.callable.MetaValueElement -final case class MetaSectionElement(meta: Map[String, MetaValueElement]) extends WorkflowBodyElement with TaskSectionElement +final case class MetaSectionElement(meta: Map[String, MetaValueElement]) + extends WorkflowBodyElement + with TaskSectionElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/OutputsSectionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/OutputsSectionElement.scala index cea7438e1f5..96b88e94cb7 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/OutputsSectionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/OutputsSectionElement.scala @@ -1,3 +1,5 @@ package wdl.model.draft3.elements -final case class OutputsSectionElement(outputs: Seq[OutputDeclarationElement]) extends WorkflowBodyElement with TaskSectionElement +final case class OutputsSectionElement(outputs: Seq[OutputDeclarationElement]) + extends WorkflowBodyElement + with TaskSectionElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ParameterMetaSectionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ParameterMetaSectionElement.scala index f951103b5d6..8eb74929abc 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ParameterMetaSectionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ParameterMetaSectionElement.scala @@ -2,4 +2,6 @@ package wdl.model.draft3.elements import wom.callable.MetaValueElement -final case class ParameterMetaSectionElement(metaAttributes: Map[String, MetaValueElement]) extends WorkflowBodyElement with TaskSectionElement +final case class ParameterMetaSectionElement(metaAttributes: Map[String, MetaValueElement]) + extends WorkflowBodyElement + with TaskSectionElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/PlaceholderAttributeElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/PlaceholderAttributeElement.scala index b21f2946078..9b955dcd158 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/PlaceholderAttributeElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/PlaceholderAttributeElement.scala @@ -7,7 +7,11 @@ final case class TrueAttributeElement(value: String) extends PlaceholderAttribut final case class FalseAttributeElement(value: String) extends PlaceholderAttributeElement final case class SepAttributeElement(value: String) extends PlaceholderAttributeElement -final case class PlaceholderAttributeSet(defaultAttribute: Option[String], trueAttribute: Option[String], falseAttribute: Option[String], sepAttribute: Option[String]) +final case class PlaceholderAttributeSet(defaultAttribute: Option[String], + trueAttribute: Option[String], + falseAttribute: Option[String], + sepAttribute: Option[String] +) object PlaceholderAttributeSet { val empty = PlaceholderAttributeSet(None, None, None, None) diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ScatterElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ScatterElement.scala index 59a242fb6d7..d9374e4aea9 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ScatterElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/ScatterElement.scala @@ -6,11 +6,12 @@ final case class ScatterElement(scatterName: String, scatterExpression: ExpressionElement, scatterVariableName: String, graphElements: Seq[WorkflowGraphElement], - override val sourceLocation : Option[SourceFileLocation]) extends WorkflowGraphElement { + override val sourceLocation: Option[SourceFileLocation] +) extends WorkflowGraphElement { // Scatter names do not contain intrinsic information about the scatter; rather they are a sort // of hash based on the declarations's physical location in the source. - override def equals(other: scala.Any): Boolean = { + override def equals(other: scala.Any): Boolean = other match { case otherScatter: ScatterElement => this.scatterExpression == otherScatter.scatterExpression && @@ -18,7 +19,6 @@ final case class ScatterElement(scatterName: String, this.graphElements == otherScatter.graphElements case _ => false } - } // Shorthand to only include certain members for hashing purposes // https://stackoverflow.com/a/31915429/818054 diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/StringEscapeSequence.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/StringEscapeSequence.scala index d2abab6ca58..b88453d1ede 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/StringEscapeSequence.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/StringEscapeSequence.scala @@ -2,7 +2,15 @@ package wdl.model.draft3.elements import cats.syntax.validated._ import common.validation.ErrorOr.ErrorOr -import wdl.model.draft3.elements.ExpressionElement.{BackslashEscape, DoubleQuoteEscape, NewlineEscape, SingleQuoteEscape, StringEscapeSequence, TabEscape, UnicodeCharacterEscape} +import wdl.model.draft3.elements.ExpressionElement.{ + BackslashEscape, + DoubleQuoteEscape, + NewlineEscape, + SingleQuoteEscape, + StringEscapeSequence, + TabEscape, + UnicodeCharacterEscape +} object StringEscapeSequence { val Octal = "\\\\([0-7]{3})".r @@ -21,7 +29,6 @@ object StringEscapeSequence { case FourDigitUnicode(codePoint) => UnicodeCharacterEscape(BigInt(codePoint, 16).intValue).validNel case EightDigitUnicode(codePoint) => UnicodeCharacterEscape(BigInt(codePoint, 16).intValue).validNel - case _ => s"Unrecognized escape sequence '$seq'".invalidNel } } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TaskDefinitionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TaskDefinitionElement.scala index 84a08b3715c..c952f69b468 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TaskDefinitionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TaskDefinitionElement.scala @@ -9,4 +9,5 @@ final case class TaskDefinitionElement(name: String, runtimeSection: Option[RuntimeAttributesSectionElement], metaSection: Option[MetaSectionElement], parameterMetaSection: Option[ParameterMetaSectionElement], - override val sourceLocation : Option[SourceFileLocation]) extends FileBodyElement + override val sourceLocation: Option[SourceFileLocation] +) extends FileBodyElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TypeElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TypeElement.scala index 67e5598abb9..ed44c25f39d 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TypeElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/TypeElement.scala @@ -7,7 +7,7 @@ sealed trait TypeElement extends LanguageElement case class PrimitiveTypeElement(primitiveType: WomPrimitiveType) extends TypeElement case class ArrayTypeElement(inner: TypeElement) extends TypeElement -case class MapTypeElement(keyType: TypeElement, valueType: TypeElement ) extends TypeElement +case class MapTypeElement(keyType: TypeElement, valueType: TypeElement) extends TypeElement case class OptionalTypeElement(maybeType: TypeElement) extends TypeElement case class NonEmptyTypeElement(arrayType: TypeElement) extends TypeElement case class PairTypeElement(leftType: TypeElement, rightType: TypeElement) extends TypeElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/WorkflowDefinitionElement.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/WorkflowDefinitionElement.scala index d6aec431f88..de48042e9fb 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/WorkflowDefinitionElement.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/elements/WorkflowDefinitionElement.scala @@ -7,4 +7,5 @@ final case class WorkflowDefinitionElement(name: String, outputsSection: Option[OutputsSectionElement], metaSection: Option[MetaSectionElement], parameterMetaSection: Option[ParameterMetaSectionElement], - override val sourceLocation : Option[SourceFileLocation]) extends FileBodyElement + override val sourceLocation: Option[SourceFileLocation] +) extends FileBodyElement diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/GeneratedValueHandle.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/GeneratedValueHandle.scala index 034854c56d6..e9e32149cfd 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/GeneratedValueHandle.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/GeneratedValueHandle.scala @@ -8,7 +8,8 @@ sealed trait GeneratedValueHandle { } final case class GeneratedIdentifierValueHandle(linkableName: String, womType: WomType) extends GeneratedValueHandle -final case class GeneratedCallOutputValueHandle(callName: String, outputName: String, womType: WomType) extends GeneratedValueHandle { +final case class GeneratedCallOutputValueHandle(callName: String, outputName: String, womType: WomType) + extends GeneratedValueHandle { override def linkableName: String = s"$callName.$outputName" } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/LinkedGraph.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/LinkedGraph.scala index 26c0a84f693..2ca92b8bd34 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/LinkedGraph.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/LinkedGraph.scala @@ -7,7 +7,7 @@ final case class LinkedGraph(elements: Set[WorkflowGraphElement], edges: Set[LinkedGraphEdge], generatedHandles: Set[GeneratedValueHandle], consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], - typeAliases: Map[String, WomType]) { -} + typeAliases: Map[String, WomType] +) {} final case class LinkedGraphEdge(upstream: WorkflowGraphElement, downstream: WorkflowGraphElement) diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedConsumedValueHook.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedConsumedValueHook.scala index 0a2a3a2ed06..de43a0edc52 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedConsumedValueHook.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedConsumedValueHook.scala @@ -14,7 +14,7 @@ final case class UnlinkedIdentifierHook(name: String) extends UnlinkedConsumedVa * Until we do the linking, we can't tell whether a consumed 'x.y' is a call output or a member access for 'y' on * a variable called 'x'. */ -final case class UnlinkedCallOutputOrIdentifierAndMemberAccessHook(name: String, - firstLookup: String) extends UnlinkedConsumedValueHook +final case class UnlinkedCallOutputOrIdentifierAndMemberAccessHook(name: String, firstLookup: String) + extends UnlinkedConsumedValueHook final case class UnlinkedAfterCallHook(upstreamCallName: String) extends UnlinkedConsumedValueHook diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueConsumer.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueConsumer.scala index 33104de1b3e..2fcaf0e4070 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueConsumer.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueConsumer.scala @@ -8,14 +8,14 @@ import wom.types.WomType @typeclass trait GraphElementValueConsumer[A] { - def graphElementConsumedValueHooks(a: A, - typeAliases: Map[String, WomType], - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[Set[UnlinkedConsumedValueHook]] + def graphElementConsumedValueHooks(a: A, typeAliases: Map[String, WomType], callables: Map[String, Callable])(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): ErrorOr[Set[UnlinkedConsumedValueHook]] } @typeclass trait ExpressionValueConsumer[A] { - def expressionConsumedValueHooks(a: A) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] + def expressionConsumedValueHooks(a: A)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueGenerator.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueGenerator.scala index e21e2cf2582..8e7af412f39 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueGenerator.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/UnlinkedValueGenerator.scala @@ -7,5 +7,8 @@ import wom.types.WomType @typeclass trait UnlinkedValueGenerator[A] { - def generatedValueHandles(a: A, typeAliases: Map[String, WomType], callables: Map[String, Callable]): ErrorOr[Set[GeneratedValueHandle]] + def generatedValueHandles(a: A, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + ): ErrorOr[Set[GeneratedValueHandle]] } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/FileEvaluator.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/FileEvaluator.scala index 8c9c4b4578e..62b5e662860 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/FileEvaluator.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/FileEvaluator.scala @@ -15,19 +15,22 @@ trait FileEvaluator[A <: ExpressionElement] { final def evaluateFilesNeededToEvaluate(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = valueEvaluator.evaluateValue(a, inputs, ioFunctionSet, None) match { case Valid(womValue) => FileEvaluatorUtil.findFilesToDelocalize(womValue.value, coerceTo).toSet.validNel case _ => predictFilesNeededToEvaluate(a, inputs, ioFunctionSet, coerceTo) } - } def predictFilesNeededToEvaluate(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/TypeEvaluator.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/TypeEvaluator.scala index 068e99537c1..b7910478d83 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/TypeEvaluator.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/TypeEvaluator.scala @@ -8,7 +8,7 @@ import wom.types.WomType @typeclass trait TypeEvaluator[A] { - def evaluateType(a: A, - linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] + def evaluateType(a: A, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] } diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/ValueEvaluator.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/ValueEvaluator.scala index aba476675a8..4acb2612845 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/ValueEvaluator.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/ValueEvaluator.scala @@ -9,6 +9,7 @@ import wom.values.WomValue @typeclass trait ValueEvaluator[A] { + /** * Evaluate a value from an A * @param a The A to evaluate @@ -20,8 +21,8 @@ trait ValueEvaluator[A] { def evaluateValue(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] } final case class ForCommandInstantiationOptions(valueMapper: WomValue => WomValue) diff --git a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/WomExpressionMaker.scala b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/WomExpressionMaker.scala index 02f5fdf8796..039b96ee768 100644 --- a/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/WomExpressionMaker.scala +++ b/wdl/model/draft3/src/main/scala/wdl/model/draft3/graph/expression/WomExpressionMaker.scala @@ -10,5 +10,6 @@ import wom.types.WomType trait WomExpressionMaker[A] { def makeWomExpression(a: A, typeAliases: Map[String, WomType], - consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]): ErrorOr[WomExpression] + consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + ): ErrorOr[WomExpression] } diff --git a/wdl/model/shared/src/main/scala/wdl/shared/FileSizeLimitationConfig.scala b/wdl/model/shared/src/main/scala/wdl/shared/FileSizeLimitationConfig.scala index 51af5d175da..205e149f7d4 100644 --- a/wdl/model/shared/src/main/scala/wdl/shared/FileSizeLimitationConfig.scala +++ b/wdl/model/shared/src/main/scala/wdl/shared/FileSizeLimitationConfig.scala @@ -42,19 +42,18 @@ object FileSizeLimitationConfig { lazy val fileSizeLimitationConfig: FileSizeLimitationConfig = config.as[FileSizeLimitationConfig]("input-read-limits") - implicit val configReader : ValueReader[FileSizeLimitationConfig] = ValueReader.relative{c => + implicit val configReader: ValueReader[FileSizeLimitationConfig] = ValueReader.relative { c => def f(s: String) = c.as[Int](s) new FileSizeLimitationConfig { - val readLinesLimit = f("lines") - val readBoolLimit = f("bool") - val readIntLimit = f("int") - val readFloatLimit = f("float") + val readLinesLimit = f("lines") + val readBoolLimit = f("bool") + val readIntLimit = f("int") + val readFloatLimit = f("float") val readStringLimit = f("string") - val readJsonLimit = f("json") - val readTsvLimit = f("tsv") - val readMapLimit = f("map") + val readJsonLimit = f("json") + val readTsvLimit = f("tsv") + val readMapLimit = f("map") val readObjectLimit = f("object") } } } - diff --git a/wdl/model/shared/src/main/scala/wdl/shared/model/expression/FileEvaluatorUtil.scala b/wdl/model/shared/src/main/scala/wdl/shared/model/expression/FileEvaluatorUtil.scala index cfadaed56f6..1fb4f0f12d3 100644 --- a/wdl/model/shared/src/main/scala/wdl/shared/model/expression/FileEvaluatorUtil.scala +++ b/wdl/model/shared/src/main/scala/wdl/shared/model/expression/FileEvaluatorUtil.scala @@ -11,12 +11,17 @@ object FileEvaluatorUtil { coercedValue match { case Success(f: WomFile) => Seq(f) case Success(a: WomArray) => - a.value.flatMap(findFilesToDelocalize(_, coerceTo, coerce=false)) + a.value.flatMap(findFilesToDelocalize(_, coerceTo, coerce = false)) case Success(m: WomMap) => - (m.value flatMap { case (k, v) => Seq(k, v) } flatMap(findFilesToDelocalize(_, coerceTo, coerce=false))).toSeq - case Success(WomOptionalValue(_, Some(v))) => findFilesToDelocalize(v, coerceTo, coerce=false) - case Success(WomPair(l, r)) => findFilesToDelocalize(l, coerceTo, coerce = false) ++ findFilesToDelocalize(r, coerceTo, coerce = false) - case Success(o: WomObject) => o.values.values.flatMap(inner => findFilesToDelocalize(inner, inner.womType, coerce = false)).toSeq + (m.value flatMap { case (k, v) => Seq(k, v) } flatMap (findFilesToDelocalize(_, + coerceTo, + coerce = false + ))).toSeq + case Success(WomOptionalValue(_, Some(v))) => findFilesToDelocalize(v, coerceTo, coerce = false) + case Success(WomPair(l, r)) => + findFilesToDelocalize(l, coerceTo, coerce = false) ++ findFilesToDelocalize(r, coerceTo, coerce = false) + case Success(o: WomObject) => + o.values.values.flatMap(inner => findFilesToDelocalize(inner, inner.womType, coerce = false)).toSeq case _ => Seq.empty[WomFile] } } diff --git a/wdl/model/shared/src/main/scala/wdl/shared/model/expression/ValueEvaluation.scala b/wdl/model/shared/src/main/scala/wdl/shared/model/expression/ValueEvaluation.scala index e8b4e72eccb..e15ac113e27 100644 --- a/wdl/model/shared/src/main/scala/wdl/shared/model/expression/ValueEvaluation.scala +++ b/wdl/model/shared/src/main/scala/wdl/shared/model/expression/ValueEvaluation.scala @@ -2,12 +2,27 @@ package wdl.shared.model.expression import spray.json.{JsArray, JsBoolean, JsNull, JsNumber, JsObject, JsString, JsValue} import wom.TsvSerializable -import wom.values.{WomArray, WomBoolean, WomFile, WomFloat, WomInteger, WomMap, WomObjectLike, WomOptionalValue, WomPair, WomString, WomValue} +import wom.values.{ + WomArray, + WomBoolean, + WomFile, + WomFloat, + WomInteger, + WomMap, + WomObjectLike, + WomOptionalValue, + WomPair, + WomString, + WomValue +} import scala.util.Try object ValueEvaluation { - def serializeWomValue[A <: WomValue with TsvSerializable](functionName: String, womValue: WomValue, defaultIfOptionalEmpty: A): Try[String] = { + def serializeWomValue[A <: WomValue with TsvSerializable](functionName: String, + womValue: WomValue, + defaultIfOptionalEmpty: A + ): Try[String] = { val wdlClass = defaultIfOptionalEmpty.getClass def castOrDefault(womValue: WomValue): A = womValue match { case WomOptionalValue(_, None) => defaultIfOptionalEmpty @@ -31,9 +46,10 @@ object ValueEvaluation { case WomArray(_, values) => JsArray(values.map(valueToJson).toVector) case WomMap(_, value) => JsObject(value map { case (k, v) => k.valueString -> valueToJson(v) }) case o: WomObjectLike => JsObject(o.values map { case (k, v) => k -> valueToJson(v) }) - case opt: WomOptionalValue => opt.value match { - case Some(inner) => valueToJson(inner) - case None => JsNull - } + case opt: WomOptionalValue => + opt.value match { + case Some(inner) => valueToJson(inner) + case None => JsNull + } } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/AstToNewExpressionElements.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/AstToNewExpressionElements.scala index b0c77565a77..52ac86b10a6 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/AstToNewExpressionElements.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/AstToNewExpressionElements.scala @@ -3,7 +3,7 @@ package wdl.transforms.biscayne.ast2wdlom import cats.syntax.validated._ import common.validation.ErrorOr.ErrorOr import wdl.model.draft3.elements.ExpressionElement -import wdl.model.draft3.elements.ExpressionElement.{Keys, AsMap, AsPairs, CollectByKey, Min, Max, Sep} +import wdl.model.draft3.elements.ExpressionElement.{AsMap, AsPairs, CollectByKey, Keys, Max, Min, Sep} import wdl.transforms.base.ast2wdlom.AstNodeToExpressionElement object AstToNewExpressionElements { @@ -12,15 +12,20 @@ object AstToNewExpressionElements { "as_map" -> AstNodeToExpressionElement.validateOneParamEngineFunction(AsMap, "as_map"), "as_pairs" -> AstNodeToExpressionElement.validateOneParamEngineFunction(AsPairs, "as_pairs"), "collect_by_key" -> AstNodeToExpressionElement.validateOneParamEngineFunction(CollectByKey, "collect_by_key"), - "min" -> AstNodeToExpressionElement.validateTwoParamEngineFunction(Min, "min"), "max" -> AstNodeToExpressionElement.validateTwoParamEngineFunction(Max, "max"), - "sep" -> AstNodeToExpressionElement.validateTwoParamEngineFunction(Sep, "sep"), - - "read_object" -> (_ => "read_object is no longer available in this WDL version. Consider using read_json instead".invalidNel), - "read_objects" -> (_ => "read_objects is no longer available in this WDL version. Consider using read_json instead".invalidNel), - "write_object" -> (_ => "write_object is no longer available in this WDL version. Consider using write_json instead".invalidNel), - "write_objects" -> (_ => "write_objects is no longer available in this WDL version. Consider using write_json instead".invalidNel), + "read_object" -> (_ => + "read_object is no longer available in this WDL version. Consider using read_json instead".invalidNel + ), + "read_objects" -> (_ => + "read_objects is no longer available in this WDL version. Consider using read_json instead".invalidNel + ), + "write_object" -> (_ => + "write_object is no longer available in this WDL version. Consider using write_json instead".invalidNel + ), + "write_objects" -> (_ => + "write_objects is no longer available in this WDL version. Consider using write_json instead".invalidNel + ) ) } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala index cdfbc8569b3..bf063b4ea70 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala @@ -5,7 +5,8 @@ import wdl.transforms.base.ast2wdlom.{GenericAst, GenericAstList, GenericAstNode import scala.jdk.CollectionConverters._ case class BiscayneGenericAst(ast: Ast) extends GenericAst { - override def getAttribute(attr: String): GenericAstNode = Option(ast.getAttribute(attr)).map(BiscayneGenericAstNode.apply).orNull + override def getAttribute(attr: String): GenericAstNode = + Option(ast.getAttribute(attr)).map(BiscayneGenericAstNode.apply).orNull override def getAttributes: Map[String, GenericAstNode] = ast.getAttributes.asScala.toMap collect { case (key, value) if value != null => key -> BiscayneGenericAstNode(value) } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala index 6bc3bcd94bc..1f84e5c142c 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala @@ -13,50 +13,81 @@ import wom.callable.MetaKvPair package object ast2wdlom { - val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck { a => BiscayneGenericAst(a).validNelCheck } - val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => BiscayneGenericAstNode(a).validNelCheck } + val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck(a => BiscayneGenericAst(a).validNelCheck) + val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => + BiscayneGenericAstNode(a).validNelCheck + } - implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement() + implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = + AstNodeToStaticString.astNodeToStaticStringElement() // meta sections implicit val astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] = AstNodeToMetaKvPair.astNodeToMetaKvPair - implicit val astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement] = astNodeToAst andThen AstToMetaSectionElement.astToMetaSectionElement - implicit val astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] = astNodeToAst andThen AstToParameterMetaSectionElement.astToParameterMetaSectionElement + implicit val astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement] = + astNodeToAst andThen AstToMetaSectionElement.astToMetaSectionElement + implicit val astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] = + astNodeToAst andThen AstToParameterMetaSectionElement.astToParameterMetaSectionElement - implicit val astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] = AstNodeToExpressionElement.astNodeToExpressionElement(customEngineFunctionMakers = AstToNewExpressionElements.newBiscayneEngineFunctionMakers) - implicit val astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] = AstNodeToKvPair.astNodeToKvPair(astNodeToExpressionElement) + implicit val astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] = + AstNodeToExpressionElement.astNodeToExpressionElement(customEngineFunctionMakers = + AstToNewExpressionElements.newBiscayneEngineFunctionMakers + ) + implicit val astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] = + AstNodeToKvPair.astNodeToKvPair(astNodeToExpressionElement) - implicit val astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] = AstNodeToTypeElement.astNodeToTypeElement(Map.empty) + implicit val astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] = + AstNodeToTypeElement.astNodeToTypeElement(Map.empty) implicit val astToStructElement: CheckedAtoB[GenericAst, StructElement] = AstToStructElement.astToStructElement - implicit val astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement] = astNodeToAst andThen AstToImportElement.astToImportElement + implicit val astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement] = + astNodeToAst andThen AstToImportElement.astToImportElement - implicit val astNodeToInputDeclarationElement: CheckedAtoB[GenericAstNode, InputDeclarationElement] = astNodeToAst andThen AstToInputDeclarationElement.astToInputDeclarationElement - implicit val astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement] = astNodeToAst andThen AstToInputsSectionElement.astToInputsSectionElement + implicit val astNodeToInputDeclarationElement: CheckedAtoB[GenericAstNode, InputDeclarationElement] = + astNodeToAst andThen AstToInputDeclarationElement.astToInputDeclarationElement + implicit val astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement] = + astNodeToAst andThen AstToInputsSectionElement.astToInputsSectionElement - implicit val astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent] = astNodeToAst andThen AstToDeclarationContent.astToDeclarationContent - implicit val astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement] = astNodeToAst andThen AstToOutputsSectionElement.astToOutputSectionElement + implicit val astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent] = + astNodeToAst andThen AstToDeclarationContent.astToDeclarationContent + implicit val astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement] = + astNodeToAst andThen AstToOutputsSectionElement.astToOutputSectionElement val astToWorkflowGraphNodeElementConverterMaker = new AstToWorkflowGraphNodeElementConverterMaker() - implicit val astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] = astNodeToAst andThen astToWorkflowGraphNodeElementConverterMaker.converter - implicit val astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement] = astNodeToAst andThen AstToCallElement.astToCallElement - implicit val astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement] = astNodeToAst andThen AstToScatterElement.astToScatterElement - implicit val astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] = astNodeToAst andThen AstToIfElement.astToIfElement + implicit val astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] = + astNodeToAst andThen astToWorkflowGraphNodeElementConverterMaker.converter + implicit val astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement] = + astNodeToAst andThen AstToCallElement.astToCallElement + implicit val astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement] = + astNodeToAst andThen AstToScatterElement.astToScatterElement + implicit val astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] = + astNodeToAst andThen AstToIfElement.astToIfElement astToWorkflowGraphNodeElementConverterMaker.astNodeToScatterElement = Some(astNodeToScatterElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToIfElement = Some(astNodeToIfElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToCallElement = Some(astNodeToCallElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToDeclarationContent = Some(astNodeToDeclarationContent) - implicit val astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] = astNodeToAst andThen AstToWorkflowBodyElement.astToWorkflowBodyElement - implicit val astToWorkflowDefinitionElement: CheckedAtoB[GenericAst, WorkflowDefinitionElement] = AstToWorkflowDefinitionElement.astToWorkflowDefinitionElement + implicit val astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] = + astNodeToAst andThen AstToWorkflowBodyElement.astToWorkflowBodyElement + implicit val astToWorkflowDefinitionElement: CheckedAtoB[GenericAst, WorkflowDefinitionElement] = + AstToWorkflowDefinitionElement.astToWorkflowDefinitionElement - implicit val astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] = astNodeToAstList andThen AstNodeToPlaceholderAttributeSet.attributeKvpConverter - implicit val astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] = AstNodeToCommandPartElement.astNodeToCommandPartElement - implicit val astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement] = astNodeToAst andThen AstToCommandSectionElement.astToCommandSectionElement - implicit val astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement] = astNodeToAst andThen AstToRuntimeAttributesSectionElement.astToRuntimeSectionElement - implicit val astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] = astNodeToAst andThen AstToTaskSectionElement.astToTaskSectionElement - implicit val astToTaskDefinitionElement: CheckedAtoB[GenericAst, TaskDefinitionElement] = AstToTaskDefinitionElement.astToTaskDefinitionElement + implicit val astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] = + astNodeToAstList andThen AstNodeToPlaceholderAttributeSet.attributeKvpConverter + implicit val astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] = + AstNodeToCommandPartElement.astNodeToCommandPartElement + implicit val astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement] = + astNodeToAst andThen AstToCommandSectionElement.astToCommandSectionElement + implicit val astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement] = + astNodeToAst andThen AstToRuntimeAttributesSectionElement.astToRuntimeSectionElement + implicit val astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] = + astNodeToAst andThen AstToTaskSectionElement.astToTaskSectionElement + implicit val astToTaskDefinitionElement: CheckedAtoB[GenericAst, TaskDefinitionElement] = + AstToTaskDefinitionElement.astToTaskDefinitionElement - implicit val astToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] = astNodeToAst andThen AstToFileBodyElement.astToFileBodyElement(astToWorkflowDefinitionElement, astToTaskDefinitionElement, astToStructElement) + implicit val astToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] = + astNodeToAst andThen AstToFileBodyElement.astToFileBodyElement(astToWorkflowDefinitionElement, + astToTaskDefinitionElement, + astToStructElement + ) implicit val astToFileElement: CheckedAtoB[GenericAst, FileElement] = AstToFileElement.astToFileElement implicit val fileToFileElement: CheckedAtoB[File, FileElement] = fileToAst andThen wrapAst andThen astToFileElement diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumers.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumers.scala index 99c929f9c4a..ad168067177 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumers.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumers.scala @@ -6,52 +6,64 @@ import wdl.model.draft3.graph.{ExpressionValueConsumer, UnlinkedConsumedValueHoo object BiscayneExpressionValueConsumers { implicit val keysExpressionValueConsumer: ExpressionValueConsumer[Keys] = new ExpressionValueConsumer[Keys] { - override def expressionConsumedValueHooks(a: Keys)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: Keys)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) - } } implicit val asMapExpressionValueConsumer: ExpressionValueConsumer[AsMap] = new ExpressionValueConsumer[AsMap] { - override def expressionConsumedValueHooks(a: AsMap)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: AsMap)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) - } } implicit val asPairsExpressionValueConsumer: ExpressionValueConsumer[AsPairs] = new ExpressionValueConsumer[AsPairs] { - override def expressionConsumedValueHooks(a: AsPairs)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: AsPairs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) - } } - implicit val collectByKeyExpressionValueConsumer: ExpressionValueConsumer[CollectByKey] = new ExpressionValueConsumer[CollectByKey] { - override def expressionConsumedValueHooks(a: CollectByKey)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) + implicit val collectByKeyExpressionValueConsumer: ExpressionValueConsumer[CollectByKey] = + new ExpressionValueConsumer[CollectByKey] { + override def expressionConsumedValueHooks(a: CollectByKey)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) } - } implicit val minExpressionValueConsumer: ExpressionValueConsumer[Min] = new ExpressionValueConsumer[Min] { - override def expressionConsumedValueHooks(a: Min)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer.expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) - } + override def expressionConsumedValueHooks( + a: Min + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer + .expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) } implicit val maxExpressionValueConsumer: ExpressionValueConsumer[Max] = new ExpressionValueConsumer[Max] { - override def expressionConsumedValueHooks(a: Max)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer.expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) - } + override def expressionConsumedValueHooks( + a: Max + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer + .expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) } implicit val sepExpressionValueConsumer: ExpressionValueConsumer[Sep] = new ExpressionValueConsumer[Sep] { - override def expressionConsumedValueHooks(a: Sep)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: Sep)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer.expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) - } } - implicit val noneLiteralExpressionValueConsumer: ExpressionValueConsumer[NoneLiteralElement.type] = new ExpressionValueConsumer[NoneLiteralElement.type] { - override def expressionConsumedValueHooks(a: NoneLiteralElement.type)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - // None literals consume no values: - Set.empty[UnlinkedConsumedValueHook] + implicit val noneLiteralExpressionValueConsumer: ExpressionValueConsumer[NoneLiteralElement.type] = + new ExpressionValueConsumer[NoneLiteralElement.type] { + override def expressionConsumedValueHooks(a: NoneLiteralElement.type)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + // None literals consume no values: + Set.empty[UnlinkedConsumedValueHook] } - } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/consumed.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/consumed.scala index a370f42ca29..f70c3603b4a 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/consumed.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/consumed/consumed.scala @@ -14,97 +14,101 @@ import wdl.transforms.biscayne.linking.expression.consumed.BiscayneExpressionVal package object consumed { - implicit val expressionElementUnlinkedValueConsumer: ExpressionValueConsumer[ExpressionElement] = new ExpressionValueConsumer[ExpressionElement] { - override def expressionConsumedValueHooks(a: ExpressionElement)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = a match { - case _: PrimitiveLiteralExpressionElement | _: StringLiteral => Set.empty[UnlinkedConsumedValueHook] - case a: NoneLiteralElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: StringExpression => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ObjectLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: PairLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ArrayLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: MapLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Member access: - case a: IdentifierLookup => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: IdentifierMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ExpressionMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: IndexAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Unary operators: - case a: UnaryNegation => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: UnaryPlus => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LogicalNot => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Binary operators: - case a: LogicalOr => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LogicalAnd => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Equals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: NotEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LessThan => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LessThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: GreaterThan => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: GreaterThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Add => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Subtract => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Multiply => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Divide => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Remainder => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: TernaryIf => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Engine functions: - case a: StdoutElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: StderrElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: ReadLines => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadTsv => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadObject => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadObjects => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadJson => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadInt => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadString => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadFloat => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadBoolean => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteLines => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteTsv => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteObject => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteObjects => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteJson => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Range => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Transpose => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Length => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Flatten => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Prefix => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: SelectFirst => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: SelectAll => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Defined => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Floor => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Ceil => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Round => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Glob => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: Size => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Basename => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: Zip => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Cross => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: Sub => a.expressionConsumedValueHooks(expressionValueConsumer) - - // New WDL biscayne expressions: - case a: Keys => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: AsMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: AsPairs => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: CollectByKey => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Sep => sepExpressionValueConsumer.expressionConsumedValueHooks(a)(expressionValueConsumer) - - case a: Min => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Max => a.expressionConsumedValueHooks(expressionValueConsumer) - - case other => throw new Exception(s"Cannot generate consumed values for ExpressionElement ${other.getClass.getSimpleName}") + implicit val expressionElementUnlinkedValueConsumer: ExpressionValueConsumer[ExpressionElement] = + new ExpressionValueConsumer[ExpressionElement] { + override def expressionConsumedValueHooks(a: ExpressionElement)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a match { + case _: PrimitiveLiteralExpressionElement | _: StringLiteral => Set.empty[UnlinkedConsumedValueHook] + case a: NoneLiteralElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: StringExpression => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ObjectLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: PairLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ArrayLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: MapLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Member access: + case a: IdentifierLookup => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: IdentifierMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ExpressionMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: IndexAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Unary operators: + case a: UnaryNegation => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: UnaryPlus => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LogicalNot => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Binary operators: + case a: LogicalOr => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LogicalAnd => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Equals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: NotEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LessThan => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LessThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: GreaterThan => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: GreaterThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Add => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Subtract => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Multiply => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Divide => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Remainder => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: TernaryIf => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Engine functions: + case a: StdoutElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: StderrElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: ReadLines => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadTsv => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadObject => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadObjects => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadJson => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadInt => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadString => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadFloat => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadBoolean => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteLines => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteTsv => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteObject => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteObjects => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteJson => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Range => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Transpose => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Length => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Flatten => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Prefix => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: SelectFirst => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: SelectAll => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Defined => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Floor => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Ceil => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Round => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Glob => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: Size => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Basename => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: Zip => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Cross => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: Sub => a.expressionConsumedValueHooks(expressionValueConsumer) + + // New WDL biscayne expressions: + case a: Keys => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: AsMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: AsPairs => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: CollectByKey => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Sep => sepExpressionValueConsumer.expressionConsumedValueHooks(a)(expressionValueConsumer) + + case a: Min => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Max => a.expressionConsumedValueHooks(expressionValueConsumer) + + case other => + throw new Exception(s"Cannot generate consumed values for ExpressionElement ${other.getClass.getSimpleName}") + } } - } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluators.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluators.scala index 2e8fec5e815..fa968525bbc 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluators.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluators.scala @@ -8,9 +8,12 @@ import wdl.transforms.base.linking.expression.files.EngineFunctionEvaluators.two object BiscayneFileEvaluators { implicit val keysFileEvaluator: FileEvaluator[Keys] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator - implicit val asMapFileEvaluator: FileEvaluator[AsMap] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator - implicit val asPairsFileEvaluator: FileEvaluator[AsPairs] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator - implicit val collectByKeyFileEvaluator: FileEvaluator[CollectByKey] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator + implicit val asMapFileEvaluator: FileEvaluator[AsMap] = + EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator + implicit val asPairsFileEvaluator: FileEvaluator[AsPairs] = + EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator + implicit val collectByKeyFileEvaluator: FileEvaluator[CollectByKey] = + EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator implicit val sepFunctionEvaluator: FileEvaluator[Sep] = twoParameterFunctionPassthroughFileEvaluator[Sep] diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/files.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/files.scala index 00a9a89ccd6..d143e948272 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/files.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/files/files.scala @@ -21,82 +21,134 @@ package object files { implicit val expressionFileEvaluator: FileEvaluator[ExpressionElement] = new FileEvaluator[ExpressionElement] { - override def predictFilesNeededToEvaluate(a: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { - + override def predictFilesNeededToEvaluate(a: ExpressionElement, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = a match { // Literals: - case a: PrimitiveLiteralExpressionElement => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: StringLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ObjectLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: MapLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ArrayLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: PairLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: PrimitiveLiteralExpressionElement => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: StringLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ObjectLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: MapLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ArrayLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: PairLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Lookups and member accesses: - case a: IdentifierLookup => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ExpressionMemberAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: IdentifierMemberAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: IndexAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IdentifierLookup => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ExpressionMemberAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IdentifierMemberAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IndexAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Unary operators: - case a: UnaryNegation => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: UnaryPlus => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LogicalNot => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: UnaryNegation => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: UnaryPlus => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalNot => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Binary operators (at some point we might want to split these into separate cases): - case a: LogicalOr => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LogicalAnd => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalOr => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalAnd => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Equals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: NotEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LessThan => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LessThanOrEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: GreaterThan => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: GreaterThanOrEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: NotEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LessThan => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LessThanOrEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: GreaterThan => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: GreaterThanOrEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Add => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Subtract => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Multiply => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Subtract => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Multiply => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Divide => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Remainder => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Remainder => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: TernaryIf => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: TernaryIf => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Engine functions: - case StdoutElement => StdoutElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case StderrElement => StderrElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - - case a: ReadLines => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadTsv => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadObject => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadObjects => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadJson => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadInt => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadString => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadFloat => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadBoolean => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteLines => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteTsv => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteObject => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteObjects => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteJson => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case StdoutElement => + StdoutElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case StderrElement => + StderrElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + + case a: ReadLines => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadTsv => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadMap => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadObject => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadObjects => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadJson => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadInt => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadString => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadFloat => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadBoolean => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteLines => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteTsv => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteMap => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteObject => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteObjects => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteJson => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Range => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Transpose => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Transpose => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Length => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Flatten => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Flatten => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Prefix => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: SelectFirst => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: SelectAll => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Defined => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: SelectFirst => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: SelectAll => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Defined => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Floor => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Ceil => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Round => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Glob => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Size => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Basename => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Basename => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Zip => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Cross => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) @@ -105,8 +157,10 @@ package object files { case a: Keys => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: AsMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: AsPairs => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: CollectByKey => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: AsPairs => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: CollectByKey => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Sep => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Min => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) @@ -114,6 +168,5 @@ package object files { case other => s"No implementation of FileEvaluator[${other.getClass.getSimpleName}]".invalidNel } - } } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluators.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluators.scala index 173dac60f14..9a993f6b5b8 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluators.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluators.scala @@ -11,59 +11,64 @@ import wom.types._ object BiscayneTypeEvaluators { implicit val keysFunctionEvaluator: TypeEvaluator[Keys] = new TypeEvaluator[Keys] { - override def evaluateType(a: Keys, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Keys, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomMapType(WomAnyType, WomAnyType)) flatMap { case WomMapType(keyType, _) => WomArrayType(keyType).validNel case other => s"Cannot invoke 'keys' on type '${other.stableName}'. Expected a map".invalidNel } - } } implicit val asMapFunctionEvaluator: TypeEvaluator[AsMap] = new TypeEvaluator[AsMap] { - override def evaluateType(a: AsMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: AsMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomPairType(WomAnyType, WomAnyType))) flatMap { case WomArrayType(WomPairType(x: WomPrimitiveType, y)) => WomMapType(x, y).validNel - case other @ WomArrayType(WomPairType(x, _)) => s"Cannot invoke 'as_map' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel + case other @ WomArrayType(WomPairType(x, _)) => + s"Cannot invoke 'as_map' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel case other => s"Cannot invoke 'as_map' on type '${other.stableName}'. Expected an array of pairs".invalidNel } - } } implicit val asPairsFunctionEvaluator: TypeEvaluator[AsPairs] = new TypeEvaluator[AsPairs] { - override def evaluateType(a: AsPairs, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: AsPairs, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomMapType(WomAnyType, WomAnyType)) flatMap { case WomMapType(x, y) => WomArrayType(WomPairType(x, y)).validNel case other => s"Cannot invoke 'as_pairs' on type '${other.stableName}'. Expected a map".invalidNel } - } } implicit val collectByKeyFunctionEvaluator: TypeEvaluator[CollectByKey] = new TypeEvaluator[CollectByKey] { - override def evaluateType(a: CollectByKey, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: CollectByKey, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomPairType(WomAnyType, WomAnyType))) flatMap { case WomArrayType(WomPairType(x: WomPrimitiveType, y)) => WomMapType(x, WomArrayType(y)).validNel - case other @ WomArrayType(WomPairType(x, _)) => s"Cannot invoke 'collect_by_key' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel - case other => s"Cannot invoke 'collect_by_key' on type '${other.stableName}'. Expected an array of pairs".invalidNel + case other @ WomArrayType(WomPairType(x, _)) => + s"Cannot invoke 'collect_by_key' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel + case other => + s"Cannot invoke 'collect_by_key' on type '${other.stableName}'. Expected an array of pairs".invalidNel } - } } - private def resultTypeOfIntVsFloat(functionName: String)(type1: WomType, type2: WomType): ErrorOr[WomType] = (type1, type2) match { - case (WomIntegerType, WomIntegerType) => WomIntegerType.validNel - case (WomIntegerType, WomFloatType) => WomFloatType.validNel - case (WomFloatType, WomIntegerType) => WomFloatType.validNel - case (WomFloatType, WomFloatType) => WomFloatType.validNel - case (other1, other2) => s"Cannot call '$functionName' with arguments (${other1.friendlyName}, ${other2.friendlyName}). Must be Int or Long.".invalidNel - } + private def resultTypeOfIntVsFloat(functionName: String)(type1: WomType, type2: WomType): ErrorOr[WomType] = + (type1, type2) match { + case (WomIntegerType, WomIntegerType) => WomIntegerType.validNel + case (WomIntegerType, WomFloatType) => WomFloatType.validNel + case (WomFloatType, WomIntegerType) => WomFloatType.validNel + case (WomFloatType, WomFloatType) => WomFloatType.validNel + case (other1, other2) => + s"Cannot call '$functionName' with arguments (${other1.friendlyName}, ${other2.friendlyName}). Must be Int or Long.".invalidNel + } implicit val minFunctionEvaluator: TypeEvaluator[Min] = new TypeEvaluator[Min] { - override def evaluateType(a: Min, - linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Min, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val type1 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) val type2 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) @@ -72,9 +77,9 @@ object BiscayneTypeEvaluators { } implicit val maxFunctionEvaluator: TypeEvaluator[Max] = new TypeEvaluator[Max] { - override def evaluateType(a: Max, - linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Max, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val type1 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) val type2 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) @@ -83,15 +88,16 @@ object BiscayneTypeEvaluators { } implicit val sepFunctionEvaluator: TypeEvaluator[Sep] = new TypeEvaluator[Sep] { - override def evaluateType(a: Sep, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Sep, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.arg2, linkedValues, WomArrayType(WomAnyType)) flatMap { - case WomArrayType(WomArrayType(_)) => s"Cannot invoke 'sep' on type 'Array[Array[_]]'. Expected an Array[String].".invalidNel + case WomArrayType(WomArrayType(_)) => + s"Cannot invoke 'sep' on type 'Array[Array[_]]'. Expected an Array[String].".invalidNel case WomArrayType(_) => WomStringType.validNel case other => s"Cannot invoke 'sep' on type '${other.stableName}'. Expected an Array[String].".invalidNel } - } } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/types.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/types.scala index 904baf2f076..0b61f7fd71c 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/types.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/types/types.scala @@ -18,9 +18,9 @@ import wdl.transforms.biscayne.linking.expression.types.BiscayneTypeEvaluators._ package object types { implicit val expressionTypeEvaluator: TypeEvaluator[ExpressionElement] = new TypeEvaluator[ExpressionElement] { - override def evaluateType(a: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit typeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - + override def evaluateType(a: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit typeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a match { // Literals: case a: PrimitiveLiteralExpressionElement => a.evaluateType(linkedValues)(typeEvaluator) @@ -111,8 +111,8 @@ package object types { case a: Min => a.evaluateType(linkedValues)(typeEvaluator) case a: Max => a.evaluateType(linkedValues)(typeEvaluator) - case other => s"Unable to process ${other.getClass.getSimpleName}: No evaluateType exists for that type.".invalidNel + case other => + s"Unable to process ${other.getClass.getSimpleName}: No evaluateType exists for that type.".invalidNel } - } } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluators.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluators.scala index b55afa916c2..bdb3516f787 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluators.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluators.scala @@ -9,40 +9,54 @@ import common.collections.EnhancedCollections._ import wdl.model.draft3.elements.ExpressionElement import wdl.model.draft3.elements.ExpressionElement._ import wdl.model.draft3.graph.expression.{EvaluatedValue, ForCommandInstantiationOptions, ValueEvaluator} -import wdl.transforms.base.linking.expression.values.EngineFunctionEvaluators.{processValidatedSingleValue, processTwoValidatedValues} +import wdl.transforms.base.linking.expression.values.EngineFunctionEvaluators.{ + processTwoValidatedValues, + processValidatedSingleValue +} import wom.expression.IoFunctionSet import wom.types._ -import wom.values.{WomArray, WomInteger, WomFloat, WomMap, WomOptionalValue, WomPair, WomValue, WomString} +import wom.values.{WomArray, WomFloat, WomInteger, WomMap, WomOptionalValue, WomPair, WomString, WomValue} import wom.types.coercion.defaults._ object BiscayneValueEvaluators { - implicit val noneLiteralEvaluator: ValueEvaluator[NoneLiteralElement.type] = new ValueEvaluator[ExpressionElement.NoneLiteralElement.type] { - override def evaluateValue(a: ExpressionElement.NoneLiteralElement.type, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions])(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - EvaluatedValue( - value = WomOptionalValue(WomNothingType, None), - sideEffectFiles = Seq.empty).validNel + implicit val noneLiteralEvaluator: ValueEvaluator[NoneLiteralElement.type] = + new ValueEvaluator[ExpressionElement.NoneLiteralElement.type] { + override def evaluateValue(a: ExpressionElement.NoneLiteralElement.type, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + EvaluatedValue(value = WomOptionalValue(WomNothingType, None), sideEffectFiles = Seq.empty).validNel } - } implicit val asMapFunctionEvaluator: ValueEvaluator[AsMap] = new ValueEvaluator[AsMap] { - override def evaluateValue(a: AsMap, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomArray, WomMap](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { + override def evaluateValue(a: AsMap, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomArray, WomMap]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { case WomArray(WomArrayType(WomPairType(_: WomPrimitiveType, _)), values) => val validPairs: ErrorOr[List[(WomValue, WomValue)]] = values.toList traverse { case WomPair(l, r) => (l, r).validNel - case other => s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel - } leftMap { - errors => NonEmptyList.fromListUnsafe(errors.toList.distinct) + case other => + s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel + } leftMap { errors => + NonEmptyList.fromListUnsafe(errors.toList.distinct) } validPairs flatMap { pairs => val grouped = pairs.groupBy(_._1) val tooManyKeyErrors = grouped collect { - case (name, list) if list.length != 1 => s"keys can only appear once but ${name.toWomString} appeared ${list.size} times." + case (name, list) if list.length != 1 => + s"keys can only appear once but ${name.toWomString} appeared ${list.size} times." } if (tooManyKeyErrors.isEmpty) { - val pairs = grouped map { case (key, value) => (key -> value.head._2) } + val pairs = grouped map { case (key, value) => key -> value.head._2 } EvaluatedValue(WomMap(pairs), Seq.empty).validNel } else { @@ -50,49 +64,69 @@ object BiscayneValueEvaluators { } } - case WomArray(womType@WomArrayType(WomPairType(x, _)), _) => s"Cannot evaluate 'as_map' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel - case other => s"Invalid call of 'as_map' on parameter of type '${other.womType.stableName}' (expected Array[Pair[X, Y]])".invalidNel - } (coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) - } + case WomArray(womType @ WomArrayType(WomPairType(x, _)), _) => + s"Cannot evaluate 'as_map' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel + case other => + s"Invalid call of 'as_map' on parameter of type '${other.womType.stableName}' (expected Array[Pair[X, Y]])".invalidNel + }(coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) } implicit val keysFunctionEvaluator: ValueEvaluator[Keys] = new ValueEvaluator[Keys] { override def evaluateValue(a: Keys, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - - processValidatedSingleValue[WomMap, WomArray](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { - case WomMap(WomMapType(keyType, _), values) => EvaluatedValue(WomArray(WomArrayType(keyType), values.keys.toList), Seq.empty).validNel - case other => s"Invalid call of 'keys' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomMap, WomArray]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { + case WomMap(WomMapType(keyType, _), values) => + EvaluatedValue(WomArray(WomArrayType(keyType), values.keys.toList), Seq.empty).validNel + case other => + s"Invalid call of 'keys' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel } - } } implicit val asPairsFunctionEvaluator: ValueEvaluator[AsPairs] = new ValueEvaluator[AsPairs] { - override def evaluateValue(a: AsPairs, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomMap, WomArray](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { + override def evaluateValue(a: AsPairs, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomMap, WomArray]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { case WomMap(WomMapType(keyType, valueType), values) => - val validPairs: List[WomPair] = values.toList map { - case (l, r) => WomPair(l, r) + val validPairs: List[WomPair] = values.toList map { case (l, r) => + WomPair(l, r) } EvaluatedValue(WomArray(WomArrayType(WomPairType(keyType, valueType)), validPairs), Seq.empty).validNel - case other => s"Invalid call of 'as_pairs' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel + case other => + s"Invalid call of 'as_pairs' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel } - } } implicit val collectByKeyFunctionEvaluator: ValueEvaluator[CollectByKey] = new ValueEvaluator[CollectByKey] { - override def evaluateValue(a: CollectByKey, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomArray, WomMap](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { + override def evaluateValue(a: CollectByKey, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomArray, WomMap]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { case WomArray(WomArrayType(WomPairType(_: WomPrimitiveType, _)), values) => val validPairs: ErrorOr[List[(WomValue, WomValue)]] = values.toList traverse { case WomPair(l, r) => (l, r).validNel - case other => s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel + case other => + s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel } validPairs flatMap { kvpairs => val grouped: Map[WomValue, WomArray] = kvpairs.groupBy(_._1).safeMapValues(v => WomArray(v.map(_._2))) @@ -100,22 +134,24 @@ object BiscayneValueEvaluators { } - case WomArray(womType@WomArrayType(WomPairType(x, _)), _) => s"Cannot evaluate 'collect_by_key' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel - case other => s"Invalid call of 'collect_by_key' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel - } (coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) - } + case WomArray(womType @ WomArrayType(WomPairType(x, _)), _) => + s"Cannot evaluate 'collect_by_key' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel + case other => + s"Invalid call of 'collect_by_key' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel + }(coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) } private def resultOfIntVsFloat(functionName: String, intFunc: (Int, Int) => Int, - doubleFunc: (Double, Double) => Double) - (value1: EvaluatedValue[_], value2: EvaluatedValue[_]): ErrorOr[EvaluatedValue[WomValue]] = { + doubleFunc: (Double, Double) => Double + )(value1: EvaluatedValue[_], value2: EvaluatedValue[_]): ErrorOr[EvaluatedValue[WomValue]] = { val newValue = (value1.value, value2.value) match { case (WomInteger(i1), WomInteger(i2)) => WomInteger(intFunc(i1, i2)).validNel case (WomInteger(i1), WomFloat(l2)) => WomFloat(doubleFunc(i1.doubleValue, l2)).validNel case (WomFloat(l1), WomInteger(i2)) => WomFloat(doubleFunc(l1, i2.doubleValue)).validNel case (WomFloat(l1), WomFloat(l2)) => WomFloat(doubleFunc(l1, l2)).validNel - case (other1, other2) => s"Invalid arguments to '$functionName':(${other1.typeName}, ${other2.typeName})".invalidNel + case (other1, other2) => + s"Invalid arguments to '$functionName':(${other1.typeName}, ${other2.typeName})".invalidNel } newValue map { v => EvaluatedValue(v, value1.sideEffectFiles ++ value2.sideEffectFiles) } } @@ -124,10 +160,16 @@ object BiscayneValueEvaluators { override def evaluateValue(a: Min, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { - val value1 = expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - val value2 = expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { + val value1 = + expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + val value2 = + expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) val intFunc = (i1: Int, i2: Int) => Math.min(i1, i2) val doubleFunc = (l1: Double, l2: Double) => Math.min(l1, l2) @@ -140,9 +182,16 @@ object BiscayneValueEvaluators { override def evaluateValue(a: Max, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions])(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { - val value1 = expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - val value2 = expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { + val value1 = + expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + val value2 = + expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) val intFunc = (i1: Int, i2: Int) => Math.max(i1, i2) val doubleFunc = (l1: Double, l2: Double) => Math.max(l1, l2) @@ -155,15 +204,17 @@ object BiscayneValueEvaluators { override def evaluateValue(a: Sep, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = processTwoValidatedValues[WomString, WomArray, WomString]( - expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator), - expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ), + expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) ) { (sepvalue, arr) => EvaluatedValue(WomString(arr.value.map(v => v.valueString).mkString(sepvalue.value)), Seq.empty).validNel } - } } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/values.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/values.scala index 49590a31ba6..338c8676906 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/values.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/linking/expression/values/values.scala @@ -24,94 +24,154 @@ package object values { override def evaluateValue(a: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = a match { // Literals: - case a: PrimitiveLiteralExpressionElement => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: NoneLiteralElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - - case a: StringLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: StringExpression => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ObjectLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: MapLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ArrayLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: PairLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: PrimitiveLiteralExpressionElement => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: NoneLiteralElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + + case a: StringLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: StringExpression => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ObjectLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: MapLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ArrayLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: PairLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Lookups and member accesses: - case a: IdentifierLookup => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ExpressionMemberAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: IdentifierMemberAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: IndexAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: IdentifierLookup => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ExpressionMemberAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: IdentifierMemberAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: IndexAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Unary operators: - case a: UnaryNegation => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: UnaryPlus => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LogicalNot => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: UnaryNegation => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: UnaryPlus => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LogicalNot => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Binary operators (at some point we might want to split these into separate cases): - case a: LogicalOr => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LogicalAnd => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Equals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: NotEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LessThan => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LessThanOrEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: GreaterThan => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: GreaterThanOrEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LogicalOr => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LogicalAnd => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Equals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: NotEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LessThan => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LessThanOrEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: GreaterThan => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: GreaterThanOrEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Add => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Subtract => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Multiply => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Divide => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Remainder => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - - case a: TernaryIf => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Subtract => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Multiply => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Divide => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Remainder => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + + case a: TernaryIf => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Engine functions: - case a: StdoutElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: StderrElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - - case a: ReadLines => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadTsv => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadMap => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadObject => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadObjects => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadJson => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadInt => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadString => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadFloat => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadBoolean => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteLines => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteTsv => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteMap => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteObject => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteObjects => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteJson => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Range => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Transpose => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Length => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Flatten => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Prefix => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: SelectFirst => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: SelectAll => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Defined => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Floor => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: StdoutElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: StderrElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + + case a: ReadLines => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadTsv => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadMap => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadObject => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadObjects => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadJson => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadInt => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadString => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadFloat => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadBoolean => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteLines => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteTsv => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteMap => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteObject => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteObjects => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteJson => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Range => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Transpose => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Length => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Flatten => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Prefix => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: SelectFirst => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: SelectAll => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Defined => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Floor => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Ceil => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Round => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Round => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Glob => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Size => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Basename => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Basename => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Zip => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Cross => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Cross => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Sub => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Keys => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: AsMap => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: AsPairs => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: CollectByKey => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: AsMap => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: AsPairs => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: CollectByKey => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Sep => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Min => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) @@ -119,6 +179,5 @@ package object values { case other => s"Unable to process ${other.toWdlV1}: No evaluateValue exists for that type in WDL 1.1".invalidNel } - } } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala index d0d7ad82f89..5e989e7d235 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala @@ -15,7 +15,7 @@ object StringParser { def convert(a: FileStringParserInput): Checked[Ast] = Try { val parser = new WdlParser() val tokens = parser.lex(a.workflowSource, a.resource) - val terminalMap = (tokens.asScala.toVector map {(_, a.workflowSource)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, a.workflowSource) }).toMap val syntaxErrorFormatter = WdlBiscayneSyntaxErrorFormatter(terminalMap) parser.parse(tokens, syntaxErrorFormatter).toAst.asInstanceOf[Ast] }.toChecked diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala index fd8c7120c5e..5946eecba25 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala @@ -13,52 +13,56 @@ case class WdlBiscayneSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSo case classicTerminal => terminalMap.get(classicTerminal) } - private def line(t: Terminal): String = getTerminal(t).map(_.split("\n")(t.getLine - 1)).getOrElse(s"Cannot highlight line. It was probably in an imported file.") + private def line(t: Terminal): String = getTerminal(t) + .map(_.split("\n")(t.getLine - 1)) + .getOrElse(s"Cannot highlight line. It was probably in an imported file.") - def unexpectedEof(method: String, expected: java.util.List[TerminalIdentifier], nt_rules: java.util.List[String]): String = "ERROR: Unexpected end of file" + def unexpectedEof(method: String, + expected: java.util.List[TerminalIdentifier], + nt_rules: java.util.List[String] + ): String = "ERROR: Unexpected end of file" - def excessTokens(method: String, terminal: Terminal): String = { + def excessTokens(method: String, terminal: Terminal): String = s"""ERROR: Finished parsing without consuming all tokens. | - |${pointToSource(terminal)} + |${pointToSource(terminal)} """.stripMargin - } - def unexpectedSymbol(method: String, actual: Terminal, expected: java.util.List[TerminalIdentifier], rule: String): String = { + def unexpectedSymbol(method: String, + actual: Terminal, + expected: java.util.List[TerminalIdentifier], + rule: String + ): String = { val expectedTokens = expected.asScala.map(_.string).mkString(", ") s"""ERROR: Unexpected symbol (line ${actual.getLine}, col ${actual.getColumn}) when parsing '$method'. | - |Expected $expectedTokens, got "${actual.getSourceString}". + |Expected $expectedTokens, got "${actual.getSourceString}". | - |${pointToSource(actual)} + |${pointToSource(actual)} | - |$rule + |$rule """.stripMargin } - def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = { + def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = s"""ERROR: No more tokens. Expecting ${expecting.string} | - |${pointToSource(last)} + |${pointToSource(last)} """.stripMargin - } - def invalidTerminal(method: String, invalid: Terminal): String = { + def invalidTerminal(method: String, invalid: Terminal): String = s"""ERROR: Invalid symbol ID: ${invalid.getId} (${invalid.getTerminalStr}) | - |${pointToSource(invalid)} + |${pointToSource(invalid)} """.stripMargin - } // TODO: these next two methods won't be called by the parser because there are no lists in the WDL grammar that // cause these to be triggered. Currently the parser is passing in 'null' for the value of 'last' and when that // changes, these errors can be made more helpful. - def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = { + def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = s"ERROR: $method requires $required items, but only found $found" - } - def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = { + def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = s"ERROR: $method requires a terminator after each element" - } } diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/wdlom2wom/package.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/wdlom2wom/package.scala index f1fd68f86eb..ef1f719a00e 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/wdlom2wom/package.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/wdlom2wom/package.scala @@ -3,7 +3,12 @@ package wdl.transforms.biscayne import common.transforms.CheckedAtoB import wdl.transforms.base.wdlom2wom.TaskDefinitionElementToWomTaskDefinition.TaskDefinitionElementToWomInputs import wdl.transforms.base.wdlom2wom.WorkflowDefinitionElementToWomWorkflowDefinition.WorkflowDefinitionConvertInputs -import wdl.transforms.base.wdlom2wom.{FileElementToWomBundle, FileElementToWomBundleInputs, TaskDefinitionElementToWomTaskDefinition, WorkflowDefinitionElementToWomWorkflowDefinition} +import wdl.transforms.base.wdlom2wom.{ + FileElementToWomBundle, + FileElementToWomBundleInputs, + TaskDefinitionElementToWomTaskDefinition, + WorkflowDefinitionElementToWomWorkflowDefinition +} import wom.callable.{CallableTaskDefinition, WorkflowDefinition} import wom.executable.WomBundle import wdl.transforms.biscayne.linking.expression.consumed._ @@ -12,7 +17,11 @@ import wdl.transforms.biscayne.linking.expression.types._ import wdl.transforms.biscayne.linking.expression.values._ package object wdlom2wom { - val taskDefinitionElementToWomTaskDefinition: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] = CheckedAtoB.fromErrorOr(TaskDefinitionElementToWomTaskDefinition.convert) - val workflowDefinitionElementToWomWorkflowDefinition: CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition] = CheckedAtoB.fromErrorOr(WorkflowDefinitionElementToWomWorkflowDefinition.convert) - val fileElementToWomBundle: CheckedAtoB[FileElementToWomBundleInputs, WomBundle] = CheckedAtoB.fromCheck(FileElementToWomBundle.convert) + val taskDefinitionElementToWomTaskDefinition: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] = + CheckedAtoB.fromErrorOr(TaskDefinitionElementToWomTaskDefinition.convert) + val workflowDefinitionElementToWomWorkflowDefinition + : CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition] = + CheckedAtoB.fromErrorOr(WorkflowDefinitionElementToWomWorkflowDefinition.convert) + val fileElementToWomBundle: CheckedAtoB[FileElementToWomBundleInputs, WomBundle] = + CheckedAtoB.fromCheck(FileElementToWomBundle.convert) } diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala index e5d7a2f9249..4fde60b1efd 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala @@ -27,14 +27,14 @@ object Ast2WdlomSpec { val parser = new WdlParser() def fromString[A](expression: String, - parseFunction: (util.List[WdlParser.Terminal], SyntaxErrorFormatter) => ParseTree) - (implicit converter: CheckedAtoB[GenericAstNode, A]): Checked[A] = { + parseFunction: (util.List[WdlParser.Terminal], SyntaxErrorFormatter) => ParseTree + )(implicit converter: CheckedAtoB[GenericAstNode, A]): Checked[A] = { // Add the "version development" to force the lexer into "main" mode. val versionedExpression = "version development-1.1\n" + expression // That "version development" means we'll have 2 unwanted tokens at the start of the list, so drop 'em: val tokens = parser.lex(versionedExpression, "string").asScala.drop(2).asJava - val terminalMap = (tokens.asScala.toVector map {(_, versionedExpression)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, versionedExpression) }).toMap val parseTree = parseFunction(tokens, WdlBiscayneSyntaxErrorFormatter(terminalMap)) (wrapAstNode andThen converter).run(parseTree.toAst) } @@ -42,18 +42,20 @@ object Ast2WdlomSpec { class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - - it should "parse a simple expression" in { val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid Add(PrimitiveLiteralExpressionElement(WomInteger(3)), PrimitiveLiteralExpressionElement(WomInteger(3))) + expr shouldBeValid Add(PrimitiveLiteralExpressionElement(WomInteger(3)), + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) } it should "parse a map expression" in { val str = "{3: 3}" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid MapLiteral(Map(PrimitiveLiteralExpressionElement(WomInteger(3)) -> PrimitiveLiteralExpressionElement(WomInteger(3)))) + expr shouldBeValid MapLiteral( + Map(PrimitiveLiteralExpressionElement(WomInteger(3)) -> PrimitiveLiteralExpressionElement(WomInteger(3))) + ) } it should "parse a simple meta section" in { @@ -65,33 +67,34 @@ class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { it should "parse a struct element" in { val str = "struct Foo { Int five\n Int six }" val struct = fromString[StructElement](str, parser.parse_struct)(astNodeToAst andThen astToStructElement) - struct shouldBeValid StructElement("Foo", Seq( - StructEntryElement("five", PrimitiveTypeElement(WomIntegerType)), - StructEntryElement("six", PrimitiveTypeElement(WomIntegerType))) + struct shouldBeValid StructElement("Foo", + Seq(StructEntryElement("five", PrimitiveTypeElement(WomIntegerType)), + StructEntryElement("six", PrimitiveTypeElement(WomIntegerType)) + ) ) } it should "parse the new as_map function" in { val str = "as_map(some_pairs)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(AsMap(IdentifierLookup("some_pairs"))) + expr shouldBeValid (AsMap(IdentifierLookup("some_pairs"))) } it should "parse the new as_pairs function" in { val str = "as_pairs(some_map)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(AsPairs(IdentifierLookup("some_map"))) + expr shouldBeValid (AsPairs(IdentifierLookup("some_map"))) } it should "parse the new collect_by_key function" in { val str = "collect_by_key(some_map)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(CollectByKey(IdentifierLookup("some_map"))) + expr shouldBeValid (CollectByKey(IdentifierLookup("some_map"))) } it should "parse the new None keyword" in { val str = "None" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(NoneLiteralElement) + expr shouldBeValid NoneLiteralElement } } diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/ast2wdlom/WdlFileToWdlomSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/ast2wdlom/WdlFileToWdlomSpec.scala index 31e09612000..20e940dab39 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/ast2wdlom/WdlFileToWdlomSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/ast2wdlom/WdlFileToWdlomSpec.scala @@ -12,7 +12,6 @@ import wom.SourceFileLocation import wom.types._ import wom.values.WomInteger - class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WDL File to WDLOM" @@ -25,12 +24,11 @@ class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } testCases.list.filter(x => x.isRegularFile && x.extension.contains(".wdl")) foreach { testCase => - val fileName = testCase.name val testName = testCase.name.split("\\.").head val itShouldString = s"create the correct Element structure for $fileName" - val testOrIgnore: (=>Any) => Unit = if (fileName.endsWith(".ignored.wdl")) { + val testOrIgnore: (=> Any) => Unit = if (fileName.endsWith(".ignored.wdl")) { (it should itShouldString).ignore _ } else { (it should itShouldString).in _ @@ -42,7 +40,8 @@ class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match fileToFileElement.run(testCase) match { case Right(actual) => actual shouldBe expected case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) fail(s"Failed to create WDLOM:$formattedErrors") } @@ -57,21 +56,23 @@ object WdlFileToWdlomSpec { FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "no_input_no_output", - inputsSection = None, - graphElements = Set( - CallElement("no_inputs", Some("noi1"), Vector.empty, None, Some(SourceFileLocation(4))), - CallElement("no_inputs", None, Vector.empty, None, Some(SourceFileLocation(6))), - CallElement("no_inputs", Some("noi2"), Vector.empty, None, Some(SourceFileLocation(8))), - CallElement("no_inputs", Some("noi3"), Vector.empty, None, Some(SourceFileLocation(10))), - CallElement("no_inputs", Some("noi4"), Vector.empty, None, Some(SourceFileLocation(14))), - CallElement("no_inputs", Some("noi5"), Vector.empty, None, Some(SourceFileLocation(18))) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3))) + workflows = Vector( + WorkflowDefinitionElement( + name = "no_input_no_output", + inputsSection = None, + graphElements = Set( + CallElement("no_inputs", Some("noi1"), Vector.empty, None, Some(SourceFileLocation(4))), + CallElement("no_inputs", None, Vector.empty, None, Some(SourceFileLocation(6))), + CallElement("no_inputs", Some("noi2"), Vector.empty, None, Some(SourceFileLocation(8))), + CallElement("no_inputs", Some("noi3"), Vector.empty, None, Some(SourceFileLocation(10))), + CallElement("no_inputs", Some("noi4"), Vector.empty, None, Some(SourceFileLocation(14))), + CallElement("no_inputs", Some("noi5"), Vector.empty, None, Some(SourceFileLocation(18))) + ), + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) ), tasks = Vector( TaskDefinitionElement( @@ -79,7 +80,8 @@ object WdlFileToWdlomSpec { inputsSection = None, declarations = Vector.empty, outputsSection = None, - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), + commandSection = + CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), runtimeSection = None, metaSection = None, parameterMetaSection = None, @@ -91,122 +93,280 @@ object WdlFileToWdlomSpec { FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "order", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "n", Some(PrimitiveLiteralExpressionElement(WomInteger(4)))), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more")))))), - graphElements = Set(CallElement("in_n_out", None, Vector.empty, Some(CallBodyElement(Vector(KvPair("total", IdentifierLookup("n")), KvPair("amount", IdentifierLookup("more"))))), Some(SourceFileLocation(19)))), - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "out", IdentifierMemberAccess("in_n_out", "out", List.empty))))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(14))) + workflows = Vector( + WorkflowDefinitionElement( + name = "order", + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "n", + Some(PrimitiveLiteralExpressionElement(WomInteger(4))) + ), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more"))) + ) + ) + ), + graphElements = Set( + CallElement( + "in_n_out", + None, + Vector.empty, + Some( + CallBodyElement( + Vector(KvPair("total", IdentifierLookup("n")), KvPair("amount", IdentifierLookup("more"))) + ) + ), + Some(SourceFileLocation(19)) + ) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "out", + IdentifierMemberAccess("in_n_out", "out", List.empty) + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(14)) + ) ), - tasks = Vector(TaskDefinitionElement( - name = "in_n_out", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "total", None), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "amount", None)))), - declarations = Vector.empty, - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "out", Add(ReadInt(StdoutElement), PrimitiveLiteralExpressionElement(WomInteger(1))))))), - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("total"), PlaceholderAttributeSet.empty), - StringCommandPartElement(" ") - )))), - runtimeSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)) - )) + tasks = Vector( + TaskDefinitionElement( + name = "in_n_out", + inputsSection = Some( + InputsSectionElement( + Vector(InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "total", None), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "amount", None) + ) + ) + ), + declarations = Vector.empty, + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "out", + Add(ReadInt(StdoutElement), PrimitiveLiteralExpressionElement(WomInteger(1))) + ) + ) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("total"), PlaceholderAttributeSet.empty), + StringCommandPartElement(" ") + ) + ) + ) + ), + runtimeSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ) ), "afters" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "afters", - inputsSection = None, - graphElements = Set( - CallElement("foo", None, Vector.empty, Some(CallBodyElement(Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(5)))))), - Some(SourceFileLocation(4))), - CallElement("foo", Some("foo2"), Vector("foo"), Some(CallBodyElement(Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(6)))))), - Some(SourceFileLocation(5))) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3))) + workflows = Vector( + WorkflowDefinitionElement( + name = "afters", + inputsSection = None, + graphElements = Set( + CallElement( + "foo", + None, + Vector.empty, + Some( + CallBodyElement( + Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(5)))) + ) + ), + Some(SourceFileLocation(4)) + ), + CallElement( + "foo", + Some("foo2"), + Vector("foo"), + Some( + CallBodyElement( + Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(6)))) + ) + ), + Some(SourceFileLocation(5)) + ) + ), + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) ), - tasks = Vector(TaskDefinitionElement( - name = "foo", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", None)))), - declarations = Vector.empty, - outputsSection = None, - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("cat \"hello "), - PlaceholderCommandPartElement(IdentifierLookup("i"), PlaceholderAttributeSet.empty), - StringCommandPartElement("\" > /tmp/helloFile") - )))), - runtimeSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(8)) - )) + tasks = Vector( + TaskDefinitionElement( + name = "foo", + inputsSection = Some( + InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", None))) + ), + declarations = Vector.empty, + outputsSection = None, + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("cat \"hello "), + PlaceholderCommandPartElement(IdentifierLookup("i"), PlaceholderAttributeSet.empty), + StringCommandPartElement("\" > /tmp/helloFile") + ) + ) + ) + ), + runtimeSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(8)) + ) + ) ), "biscayne_escaping" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "escapes", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"backslash",StringExpression(Vector(StringLiteral(" "), BackslashEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"n",StringExpression(Vector(StringLiteral(" "), NewlineEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"t",StringExpression(Vector(StringLiteral(" "), TabEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q1",StringExpression(Vector(StringLiteral("leading text "), DoubleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q2",StringLiteral("\"")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q3",StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q4",StringExpression(Vector(StringLiteral("leading text "), SingleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q5",StringLiteral("'")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q6",StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq1",StringExpression(Vector(StringLiteral("leading text "), DoubleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq2",StringLiteral("\"")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq3",StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq4",StringExpression(Vector(StringLiteral("leading text "), SingleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq5",StringLiteral("'")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq6",StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "octal_hello", - StringExpression(Vector(UnicodeCharacterEscape(104), UnicodeCharacterEscape(101), UnicodeCharacterEscape(108), UnicodeCharacterEscape(108), UnicodeCharacterEscape(111))) - ), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "hex_hello", - StringExpression(Vector(UnicodeCharacterEscape(104), UnicodeCharacterEscape(101), UnicodeCharacterEscape(108), UnicodeCharacterEscape(108), UnicodeCharacterEscape(111))) + workflows = Vector( + WorkflowDefinitionElement( + name = "escapes", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "backslash", + StringExpression(Vector(StringLiteral(" "), BackslashEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "n", + StringExpression(Vector(StringLiteral(" "), NewlineEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "t", + StringExpression(Vector(StringLiteral(" "), TabEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "q1", + StringExpression( + Vector(StringLiteral("leading text "), + DoubleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "q2", StringLiteral("\"")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "q3", + StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "q4", + StringExpression( + Vector(StringLiteral("leading text "), + SingleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "q5", StringLiteral("'")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "q6", + StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "sq1", + StringExpression( + Vector(StringLiteral("leading text "), + DoubleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "sq2", StringLiteral("\"")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "sq3", + StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "sq4", + StringExpression( + Vector(StringLiteral("leading text "), + SingleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "sq5", StringLiteral("'")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "sq6", + StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "octal_hello", + StringExpression( + Vector(UnicodeCharacterEscape(104), + UnicodeCharacterEscape(101), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(111) + ) + ) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "hex_hello", + StringExpression( + Vector(UnicodeCharacterEscape(104), + UnicodeCharacterEscape(101), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(111) + ) + ) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "unicode_hello", + StringExpression( + Vector( + UnicodeCharacterEscape(104), + UnicodeCharacterEscape(101), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(111) + ) + ) + ) ), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "unicode_hello", - StringExpression(Vector( - UnicodeCharacterEscape(104), - UnicodeCharacterEscape(101), - UnicodeCharacterEscape(108), - UnicodeCharacterEscape(108), - UnicodeCharacterEscape(111) - )) - ) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)) - )), - tasks = Vector.empty) + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = Vector.empty + ) ) } diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumersSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumersSpec.scala index 2e1e9a8cb52..11ca72b3a49 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumersSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/consumed/BiscayneExpressionValueConsumersSpec.scala @@ -17,8 +17,8 @@ class BiscayneExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set.empty) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set.empty) } } @@ -26,8 +26,8 @@ class BiscayneExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "3 + three" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("three"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("three"))) } } @@ -35,8 +35,8 @@ class BiscayneExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "as_map(my_task.out)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) } } @@ -44,8 +44,8 @@ class BiscayneExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "as_pairs(as_map(my_task.out))" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) } } @@ -53,8 +53,8 @@ class BiscayneExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = """ sep(my_separator, ["a", "b", c]) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("my_separator"), UnlinkedIdentifierHook("c"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("my_separator"), UnlinkedIdentifierHook("c"))) } } } diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluatorSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluatorSpec.scala index 35ffa84ca1b..59ebfd18dc4 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluatorSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/files/BiscayneFileEvaluatorSpec.scala @@ -18,8 +18,8 @@ class BiscayneFileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomIntegerType) shouldBeValid Set.empty + expr.shouldBeValidPF { case e => + e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomIntegerType) shouldBeValid Set.empty } } @@ -27,8 +27,11 @@ class BiscayneFileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """as_pairs(read_map("my_map.txt"))""" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomArrayType(WomPairType(WomStringType, WomStringType))) shouldBeValid Set(WomSingleFile("my_map.txt")) + expr.shouldBeValidPF { case e => + e.predictFilesNeededToEvaluate(Map.empty, + NoIoFunctionSet, + WomArrayType(WomPairType(WomStringType, WomStringType)) + ) shouldBeValid Set(WomSingleFile("my_map.txt")) } } @@ -36,8 +39,10 @@ class BiscayneFileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """ sep(' ', read_lines("foo.txt")) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomStringType) shouldBeValid Set(WomSingleFile("foo.txt")) + expr.shouldBeValidPF { case e => + e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomStringType) shouldBeValid Set( + WomSingleFile("foo.txt") + ) } } } diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluatorSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluatorSpec.scala index a2b7d60a216..0593605bb2b 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluatorSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/types/BiscayneTypeEvaluatorSpec.scala @@ -15,8 +15,8 @@ class BiscayneTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomIntegerType + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomIntegerType } } @@ -24,8 +24,8 @@ class BiscayneTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """as_map([(1,2), (3,4)])""" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomMapType(WomIntegerType, WomIntegerType) + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomMapType(WomIntegerType, WomIntegerType) } } @@ -33,8 +33,8 @@ class BiscayneTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """as_pairs({ "one": 1, "two": 2, "three": 3 })""" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomArrayType(WomPairType(WomStringType, WomIntegerType)) + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomArrayType(WomPairType(WomStringType, WomIntegerType)) } } @@ -42,8 +42,8 @@ class BiscayneTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """ sep(' ', ["a", "b", "c"]) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomStringType + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomStringType } } @@ -51,8 +51,8 @@ class BiscayneTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """ sep(' ', prefix("-i ", ["a", "b", "c"])) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomStringType + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomStringType } } diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluatorSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluatorSpec.scala index 8cab2b627f9..53660a407d9 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluatorSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/linking/expression/values/BiscayneValueEvaluatorSpec.scala @@ -23,8 +23,8 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomInteger(6), Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomInteger(6), Seq.empty) } } @@ -32,14 +32,16 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ as_map( [("x", 1), ("y", 2), ("z", 3)] ) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedMap: WomMap = WomMap(Map ( - WomString("x") -> WomInteger(1), - WomString("y") -> WomInteger(2), - WomString("z") -> WomInteger(3) - )) + val expectedMap: WomMap = WomMap( + Map( + WomString("x") -> WomInteger(1), + WomString("y") -> WomInteger(2), + WomString("z") -> WomInteger(3) + ) + ) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) } } @@ -50,14 +52,16 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expr = fromString[ExpressionElement](str, parser.parse_e) val inputs = Map("three" -> WomString("three")) - val expectedPairs: WomArray = WomArray(Seq( - WomPair(WomInteger(1), WomString("one")), - WomPair(WomInteger(2), WomString("two")), - WomPair(WomInteger(3), WomString("three")) - )) - - expr.shouldBeValidPF { - case e => e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) + val expectedPairs: WomArray = WomArray( + Seq( + WomPair(WomInteger(1), WomString("one")), + WomPair(WomInteger(2), WomString("two")), + WomPair(WomInteger(3), WomString("three")) + ) + ) + + expr.shouldBeValidPF { case e => + e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) } () } @@ -73,16 +77,18 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ as_pairs(as_map(echo_me)) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedPairs: WomArray = WomArray(Seq( - WomPair(WomInteger(1), WomString("one")), - WomPair(WomInteger(2), WomString("two")), - WomPair(WomInteger(3), WomString("three")) - )) + val expectedPairs: WomArray = WomArray( + Seq( + WomPair(WomInteger(1), WomString("one")), + WomPair(WomInteger(2), WomString("two")), + WomPair(WomInteger(3), WomString("three")) + ) + ) val inputs = Map("echo_me" -> expectedPairs) - expr.shouldBeValidPF { - case e => e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) } () } @@ -95,8 +101,11 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ as_map( [("x", 1), ("y", 2), ("x", 3)] ) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None).shouldBeInvalid("""Cannot evaluate 'as_map' with duplicated keys: keys can only appear once but "x" appeared 2 times.""") + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) + .shouldBeInvalid( + """Cannot evaluate 'as_map' with duplicated keys: keys can only appear once but "x" appeared 2 times.""" + ) } () } @@ -109,13 +118,15 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ collect_by_key( [("x", 1), ("y", 2), ("x", 3)] ) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedMap: WomMap = WomMap(Map( - WomString("x") -> WomArray(Seq(WomInteger(1), WomInteger(3))), - WomString("y") -> WomArray(Seq(WomInteger(2))) - )) + val expectedMap: WomMap = WomMap( + Map( + WomString("x") -> WomArray(Seq(WomInteger(1), WomInteger(3))), + WomString("y") -> WomArray(Seq(WomInteger(2))) + ) + ) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) } () } @@ -129,13 +140,16 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi ) val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedMap: WomMap = WomMap(WomMapType(WomStringType, WomOptionalType(WomStringType)), Map ( - WomString("i") -> WomOptionalValue(WomStringType, Some(WomString("1"))), - WomString("s") -> WomOptionalValue(WomStringType, Some(WomString("two"))) - )) + val expectedMap: WomMap = WomMap( + WomMapType(WomStringType, WomOptionalType(WomStringType)), + Map( + WomString("i") -> WomOptionalValue(WomStringType, Some(WomString("1"))), + WomString("s") -> WomOptionalValue(WomStringType, Some(WomString("two"))) + ) + ) - expr.shouldBeValidPF { - case e => e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) } } @@ -158,8 +172,8 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expectedEvaluation = WomString(expected) val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedEvaluation, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedEvaluation, Seq.empty) } } } @@ -171,8 +185,8 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expectedString: WomString = WomString("a b c") - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) } } @@ -182,8 +196,8 @@ class BiscayneValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expectedString: WomString = WomString("-i a -i b -i c") - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) } } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/AstToNewExpressionElements.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/AstToNewExpressionElements.scala index edcd6204f6c..ba331383e45 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/AstToNewExpressionElements.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/AstToNewExpressionElements.scala @@ -3,7 +3,7 @@ package wdl.transforms.cascades.ast2wdlom import cats.syntax.validated._ import common.validation.ErrorOr.ErrorOr import wdl.model.draft3.elements.ExpressionElement -import wdl.model.draft3.elements.ExpressionElement.{Keys, AsMap, AsPairs, CollectByKey, Min, Max, Sep} +import wdl.model.draft3.elements.ExpressionElement.{AsMap, AsPairs, CollectByKey, Keys, Max, Min, Sep} import wdl.transforms.base.ast2wdlom.AstNodeToExpressionElement object AstToNewExpressionElements { @@ -12,15 +12,20 @@ object AstToNewExpressionElements { "as_map" -> AstNodeToExpressionElement.validateOneParamEngineFunction(AsMap, "as_map"), "as_pairs" -> AstNodeToExpressionElement.validateOneParamEngineFunction(AsPairs, "as_pairs"), "collect_by_key" -> AstNodeToExpressionElement.validateOneParamEngineFunction(CollectByKey, "collect_by_key"), - "min" -> AstNodeToExpressionElement.validateTwoParamEngineFunction(Min, "min"), "max" -> AstNodeToExpressionElement.validateTwoParamEngineFunction(Max, "max"), - "sep" -> AstNodeToExpressionElement.validateTwoParamEngineFunction(Sep, "sep"), - - "read_object" -> (_ => "read_object is no longer available in this WDL version. Consider using read_json instead".invalidNel), - "read_objects" -> (_ => "read_objects is no longer available in this WDL version. Consider using read_json instead".invalidNel), - "write_object" -> (_ => "write_object is no longer available in this WDL version. Consider using write_json instead".invalidNel), - "write_objects" -> (_ => "write_objects is no longer available in this WDL version. Consider using write_json instead".invalidNel), + "read_object" -> (_ => + "read_object is no longer available in this WDL version. Consider using read_json instead".invalidNel + ), + "read_objects" -> (_ => + "read_objects is no longer available in this WDL version. Consider using read_json instead".invalidNel + ), + "write_object" -> (_ => + "write_object is no longer available in this WDL version. Consider using write_json instead".invalidNel + ), + "write_objects" -> (_ => + "write_objects is no longer available in this WDL version. Consider using write_json instead".invalidNel + ) ) } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/CascadesGenericAstNode.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/CascadesGenericAstNode.scala index a503ca897be..1eef5176a24 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/CascadesGenericAstNode.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/CascadesGenericAstNode.scala @@ -5,7 +5,8 @@ import wdl.transforms.base.ast2wdlom.{GenericAst, GenericAstList, GenericAstNode import scala.jdk.CollectionConverters._ case class cascadesGenericAst(ast: Ast) extends GenericAst { - override def getAttribute(attr: String): GenericAstNode = Option(ast.getAttribute(attr)).map(cascadesGenericAstNode.apply).orNull + override def getAttribute(attr: String): GenericAstNode = + Option(ast.getAttribute(attr)).map(cascadesGenericAstNode.apply).orNull override def getAttributes: Map[String, GenericAstNode] = ast.getAttributes.asScala.toMap collect { case (key, value) if value != null => key -> cascadesGenericAstNode(value) } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/ast2wdlom.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/ast2wdlom.scala index 3bda86048ef..5ab621ed5b4 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/ast2wdlom.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/ast2wdlom/ast2wdlom.scala @@ -14,50 +14,81 @@ import wom.types.WomUnlistedDirectoryType package object ast2wdlom { - val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck { a => cascadesGenericAst(a).validNelCheck } - val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => cascadesGenericAstNode(a).validNelCheck } + val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck(a => cascadesGenericAst(a).validNelCheck) + val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => + cascadesGenericAstNode(a).validNelCheck + } - implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement() + implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = + AstNodeToStaticString.astNodeToStaticStringElement() // meta sections implicit val astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] = AstNodeToMetaKvPair.astNodeToMetaKvPair - implicit val astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement] = astNodeToAst andThen AstToMetaSectionElement.astToMetaSectionElement - implicit val astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] = astNodeToAst andThen AstToParameterMetaSectionElement.astToParameterMetaSectionElement + implicit val astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement] = + astNodeToAst andThen AstToMetaSectionElement.astToMetaSectionElement + implicit val astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] = + astNodeToAst andThen AstToParameterMetaSectionElement.astToParameterMetaSectionElement - implicit val astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] = AstNodeToExpressionElement.astNodeToExpressionElement(customEngineFunctionMakers = AstToNewExpressionElements.newcascadesEngineFunctionMakers) - implicit val astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] = AstNodeToKvPair.astNodeToKvPair(astNodeToExpressionElement) + implicit val astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] = + AstNodeToExpressionElement.astNodeToExpressionElement(customEngineFunctionMakers = + AstToNewExpressionElements.newcascadesEngineFunctionMakers + ) + implicit val astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] = + AstNodeToKvPair.astNodeToKvPair(astNodeToExpressionElement) - implicit val astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] = AstNodeToTypeElement.astNodeToTypeElement(Map("Directory" -> WomUnlistedDirectoryType)) + implicit val astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] = + AstNodeToTypeElement.astNodeToTypeElement(Map("Directory" -> WomUnlistedDirectoryType)) implicit val astToStructElement: CheckedAtoB[GenericAst, StructElement] = AstToStructElement.astToStructElement - implicit val astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement] = astNodeToAst andThen AstToImportElement.astToImportElement + implicit val astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement] = + astNodeToAst andThen AstToImportElement.astToImportElement - implicit val astNodeToInputDeclarationElement: CheckedAtoB[GenericAstNode, InputDeclarationElement] = astNodeToAst andThen AstToInputDeclarationElement.astToInputDeclarationElement - implicit val astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement] = astNodeToAst andThen AstToInputsSectionElement.astToInputsSectionElement + implicit val astNodeToInputDeclarationElement: CheckedAtoB[GenericAstNode, InputDeclarationElement] = + astNodeToAst andThen AstToInputDeclarationElement.astToInputDeclarationElement + implicit val astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement] = + astNodeToAst andThen AstToInputsSectionElement.astToInputsSectionElement - implicit val astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent] = astNodeToAst andThen AstToDeclarationContent.astToDeclarationContent - implicit val astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement] = astNodeToAst andThen AstToOutputsSectionElement.astToOutputSectionElement + implicit val astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent] = + astNodeToAst andThen AstToDeclarationContent.astToDeclarationContent + implicit val astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement] = + astNodeToAst andThen AstToOutputsSectionElement.astToOutputSectionElement val astToWorkflowGraphNodeElementConverterMaker = new AstToWorkflowGraphNodeElementConverterMaker() - implicit val astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] = astNodeToAst andThen astToWorkflowGraphNodeElementConverterMaker.converter - implicit val astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement] = astNodeToAst andThen AstToCallElement.astToCallElement - implicit val astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement] = astNodeToAst andThen AstToScatterElement.astToScatterElement - implicit val astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] = astNodeToAst andThen AstToIfElement.astToIfElement + implicit val astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] = + astNodeToAst andThen astToWorkflowGraphNodeElementConverterMaker.converter + implicit val astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement] = + astNodeToAst andThen AstToCallElement.astToCallElement + implicit val astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement] = + astNodeToAst andThen AstToScatterElement.astToScatterElement + implicit val astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] = + astNodeToAst andThen AstToIfElement.astToIfElement astToWorkflowGraphNodeElementConverterMaker.astNodeToScatterElement = Some(astNodeToScatterElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToIfElement = Some(astNodeToIfElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToCallElement = Some(astNodeToCallElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToDeclarationContent = Some(astNodeToDeclarationContent) - implicit val astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] = astNodeToAst andThen AstToWorkflowBodyElement.astToWorkflowBodyElement - implicit val astToWorkflowDefinitionElement: CheckedAtoB[GenericAst, WorkflowDefinitionElement] = AstToWorkflowDefinitionElement.astToWorkflowDefinitionElement + implicit val astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] = + astNodeToAst andThen AstToWorkflowBodyElement.astToWorkflowBodyElement + implicit val astToWorkflowDefinitionElement: CheckedAtoB[GenericAst, WorkflowDefinitionElement] = + AstToWorkflowDefinitionElement.astToWorkflowDefinitionElement - implicit val astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] = astNodeToAstList andThen AstNodeToPlaceholderAttributeSet.attributeKvpConverter - implicit val astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] = AstNodeToCommandPartElement.astNodeToCommandPartElement - implicit val astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement] = astNodeToAst andThen AstToCommandSectionElement.astToCommandSectionElement - implicit val astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement] = astNodeToAst andThen AstToRuntimeAttributesSectionElement.astToRuntimeSectionElement - implicit val astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] = astNodeToAst andThen AstToTaskSectionElement.astToTaskSectionElement - implicit val astToTaskDefinitionElement: CheckedAtoB[GenericAst, TaskDefinitionElement] = AstToTaskDefinitionElement.astToTaskDefinitionElement + implicit val astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] = + astNodeToAstList andThen AstNodeToPlaceholderAttributeSet.attributeKvpConverter + implicit val astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] = + AstNodeToCommandPartElement.astNodeToCommandPartElement + implicit val astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement] = + astNodeToAst andThen AstToCommandSectionElement.astToCommandSectionElement + implicit val astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement] = + astNodeToAst andThen AstToRuntimeAttributesSectionElement.astToRuntimeSectionElement + implicit val astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] = + astNodeToAst andThen AstToTaskSectionElement.astToTaskSectionElement + implicit val astToTaskDefinitionElement: CheckedAtoB[GenericAst, TaskDefinitionElement] = + AstToTaskDefinitionElement.astToTaskDefinitionElement - implicit val astToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] = astNodeToAst andThen AstToFileBodyElement.astToFileBodyElement(astToWorkflowDefinitionElement, astToTaskDefinitionElement, astToStructElement) + implicit val astToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] = + astNodeToAst andThen AstToFileBodyElement.astToFileBodyElement(astToWorkflowDefinitionElement, + astToTaskDefinitionElement, + astToStructElement + ) implicit val astToFileElement: CheckedAtoB[GenericAst, FileElement] = AstToFileElement.astToFileElement implicit val fileToFileElement: CheckedAtoB[File, FileElement] = fileToAst andThen wrapAst andThen astToFileElement diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumers.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumers.scala index 196005fdb84..b4e6c5be1d6 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumers.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumers.scala @@ -6,52 +6,64 @@ import wdl.model.draft3.graph.{ExpressionValueConsumer, UnlinkedConsumedValueHoo object cascadesExpressionValueConsumers { implicit val keysExpressionValueConsumer: ExpressionValueConsumer[Keys] = new ExpressionValueConsumer[Keys] { - override def expressionConsumedValueHooks(a: Keys)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: Keys)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) - } } implicit val asMapExpressionValueConsumer: ExpressionValueConsumer[AsMap] = new ExpressionValueConsumer[AsMap] { - override def expressionConsumedValueHooks(a: AsMap)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: AsMap)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) - } } implicit val asPairsExpressionValueConsumer: ExpressionValueConsumer[AsPairs] = new ExpressionValueConsumer[AsPairs] { - override def expressionConsumedValueHooks(a: AsPairs)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: AsPairs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) - } } - implicit val collectByKeyExpressionValueConsumer: ExpressionValueConsumer[CollectByKey] = new ExpressionValueConsumer[CollectByKey] { - override def expressionConsumedValueHooks(a: CollectByKey)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) + implicit val collectByKeyExpressionValueConsumer: ExpressionValueConsumer[CollectByKey] = + new ExpressionValueConsumer[CollectByKey] { + override def expressionConsumedValueHooks(a: CollectByKey)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + expressionValueConsumer.expressionConsumedValueHooks(a.param)(expressionValueConsumer) } - } implicit val minExpressionValueConsumer: ExpressionValueConsumer[Min] = new ExpressionValueConsumer[Min] { - override def expressionConsumedValueHooks(a: Min)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer.expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) - } + override def expressionConsumedValueHooks( + a: Min + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer + .expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) } implicit val maxExpressionValueConsumer: ExpressionValueConsumer[Max] = new ExpressionValueConsumer[Max] { - override def expressionConsumedValueHooks(a: Max)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer.expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) - } + override def expressionConsumedValueHooks( + a: Max + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer + .expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) } implicit val sepExpressionValueConsumer: ExpressionValueConsumer[Sep] = new ExpressionValueConsumer[Sep] { - override def expressionConsumedValueHooks(a: Sep)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: Sep)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.arg1)(expressionValueConsumer) ++ expressionValueConsumer.expressionConsumedValueHooks(a.arg2)(expressionValueConsumer) - } } - implicit val noneLiteralExpressionValueConsumer: ExpressionValueConsumer[NoneLiteralElement.type] = new ExpressionValueConsumer[NoneLiteralElement.type] { - override def expressionConsumedValueHooks(a: NoneLiteralElement.type)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - // None literals consume no values: - Set.empty[UnlinkedConsumedValueHook] + implicit val noneLiteralExpressionValueConsumer: ExpressionValueConsumer[NoneLiteralElement.type] = + new ExpressionValueConsumer[NoneLiteralElement.type] { + override def expressionConsumedValueHooks(a: NoneLiteralElement.type)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + // None literals consume no values: + Set.empty[UnlinkedConsumedValueHook] } - } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/consumed.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/consumed.scala index d689d2a175a..af54bbbd6a6 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/consumed.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/consumed/consumed.scala @@ -14,97 +14,101 @@ import wdl.transforms.cascades.linking.expression.consumed.cascadesExpressionVal package object consumed { - implicit val expressionElementUnlinkedValueConsumer: ExpressionValueConsumer[ExpressionElement] = new ExpressionValueConsumer[ExpressionElement] { - override def expressionConsumedValueHooks(a: ExpressionElement)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = a match { - case _: PrimitiveLiteralExpressionElement | _: StringLiteral => Set.empty[UnlinkedConsumedValueHook] - case a: NoneLiteralElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: StringExpression => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ObjectLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: PairLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ArrayLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: MapLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Member access: - case a: IdentifierLookup => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: IdentifierMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ExpressionMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: IndexAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Unary operators: - case a: UnaryNegation => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: UnaryPlus => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LogicalNot => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Binary operators: - case a: LogicalOr => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LogicalAnd => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Equals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: NotEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LessThan => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LessThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: GreaterThan => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: GreaterThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Add => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Subtract => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Multiply => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Divide => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Remainder => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: TernaryIf => a.expressionConsumedValueHooks(expressionValueConsumer) - - // Engine functions: - case a: StdoutElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: StderrElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: ReadLines => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadTsv => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadObject => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadObjects => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadJson => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadInt => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadString => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadFloat => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadBoolean => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteLines => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteTsv => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteObject => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteObjects => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteJson => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Range => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Transpose => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Length => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Flatten => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Prefix => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: SelectFirst => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: SelectAll => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Defined => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Floor => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Ceil => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Round => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Glob => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: Size => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Basename => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: Zip => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Cross => a.expressionConsumedValueHooks(expressionValueConsumer) - - case a: Sub => a.expressionConsumedValueHooks(expressionValueConsumer) - - // New WDL biscayne expressions: - case a: Keys => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: AsMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: AsPairs => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: CollectByKey => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Sep => sepExpressionValueConsumer.expressionConsumedValueHooks(a)(expressionValueConsumer) - - case a: Min => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Max => a.expressionConsumedValueHooks(expressionValueConsumer) - - case other => throw new Exception(s"Cannot generate consumed values for ExpressionElement ${other.getClass.getSimpleName}") + implicit val expressionElementUnlinkedValueConsumer: ExpressionValueConsumer[ExpressionElement] = + new ExpressionValueConsumer[ExpressionElement] { + override def expressionConsumedValueHooks(a: ExpressionElement)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a match { + case _: PrimitiveLiteralExpressionElement | _: StringLiteral => Set.empty[UnlinkedConsumedValueHook] + case a: NoneLiteralElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: StringExpression => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ObjectLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: PairLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ArrayLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: MapLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Member access: + case a: IdentifierLookup => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: IdentifierMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ExpressionMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: IndexAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Unary operators: + case a: UnaryNegation => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: UnaryPlus => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LogicalNot => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Binary operators: + case a: LogicalOr => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LogicalAnd => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Equals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: NotEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LessThan => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LessThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: GreaterThan => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: GreaterThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Add => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Subtract => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Multiply => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Divide => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Remainder => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: TernaryIf => a.expressionConsumedValueHooks(expressionValueConsumer) + + // Engine functions: + case a: StdoutElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: StderrElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: ReadLines => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadTsv => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadObject => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadObjects => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadJson => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadInt => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadString => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadFloat => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadBoolean => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteLines => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteTsv => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteObject => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteObjects => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteJson => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Range => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Transpose => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Length => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Flatten => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Prefix => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: SelectFirst => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: SelectAll => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Defined => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Floor => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Ceil => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Round => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Glob => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: Size => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Basename => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: Zip => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Cross => a.expressionConsumedValueHooks(expressionValueConsumer) + + case a: Sub => a.expressionConsumedValueHooks(expressionValueConsumer) + + // New WDL biscayne expressions: + case a: Keys => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: AsMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: AsPairs => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: CollectByKey => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Sep => sepExpressionValueConsumer.expressionConsumedValueHooks(a)(expressionValueConsumer) + + case a: Min => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Max => a.expressionConsumedValueHooks(expressionValueConsumer) + + case other => + throw new Exception(s"Cannot generate consumed values for ExpressionElement ${other.getClass.getSimpleName}") + } } - } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluators.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluators.scala index f1e63de78cc..3b7b299a090 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluators.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluators.scala @@ -8,9 +8,12 @@ import wdl.transforms.base.linking.expression.files.EngineFunctionEvaluators.two object cascadesFileEvaluators { implicit val keysFileEvaluator: FileEvaluator[Keys] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator - implicit val asMapFileEvaluator: FileEvaluator[AsMap] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator - implicit val asPairsFileEvaluator: FileEvaluator[AsPairs] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator - implicit val collectByKeyFileEvaluator: FileEvaluator[CollectByKey] = EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator + implicit val asMapFileEvaluator: FileEvaluator[AsMap] = + EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator + implicit val asPairsFileEvaluator: FileEvaluator[AsPairs] = + EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator + implicit val collectByKeyFileEvaluator: FileEvaluator[CollectByKey] = + EngineFunctionEvaluators.singleParameterPassthroughFileEvaluator implicit val sepFunctionEvaluator: FileEvaluator[Sep] = twoParameterFunctionPassthroughFileEvaluator[Sep] diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/files.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/files.scala index 1c9da7b2d60..9693cbfaba6 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/files.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/files/files.scala @@ -21,82 +21,134 @@ package object files { implicit val expressionFileEvaluator: FileEvaluator[ExpressionElement] = new FileEvaluator[ExpressionElement] { - override def predictFilesNeededToEvaluate(a: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { - + override def predictFilesNeededToEvaluate(a: ExpressionElement, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = a match { // Literals: - case a: PrimitiveLiteralExpressionElement => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: StringLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ObjectLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: MapLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ArrayLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: PairLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: PrimitiveLiteralExpressionElement => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: StringLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ObjectLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: MapLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ArrayLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: PairLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Lookups and member accesses: - case a: IdentifierLookup => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ExpressionMemberAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: IdentifierMemberAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: IndexAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IdentifierLookup => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ExpressionMemberAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IdentifierMemberAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IndexAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Unary operators: - case a: UnaryNegation => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: UnaryPlus => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LogicalNot => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: UnaryNegation => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: UnaryPlus => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalNot => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Binary operators (at some point we might want to split these into separate cases): - case a: LogicalOr => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LogicalAnd => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalOr => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalAnd => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Equals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: NotEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LessThan => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LessThanOrEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: GreaterThan => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: GreaterThanOrEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: NotEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LessThan => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LessThanOrEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: GreaterThan => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: GreaterThanOrEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Add => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Subtract => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Multiply => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Subtract => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Multiply => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Divide => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Remainder => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Remainder => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: TernaryIf => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: TernaryIf => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Engine functions: - case StdoutElement => StdoutElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case StderrElement => StderrElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - - case a: ReadLines => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadTsv => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadObject => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadObjects => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadJson => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadInt => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadString => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadFloat => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadBoolean => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteLines => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteTsv => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteObject => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteObjects => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteJson => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case StdoutElement => + StdoutElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case StderrElement => + StderrElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + + case a: ReadLines => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadTsv => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadMap => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadObject => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadObjects => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadJson => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadInt => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadString => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadFloat => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadBoolean => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteLines => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteTsv => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteMap => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteObject => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteObjects => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteJson => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Range => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Transpose => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Transpose => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Length => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Flatten => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Flatten => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Prefix => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: SelectFirst => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: SelectAll => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Defined => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: SelectFirst => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: SelectAll => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Defined => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Floor => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Ceil => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Round => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Glob => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Size => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Basename => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Basename => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Zip => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Cross => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) @@ -105,8 +157,10 @@ package object files { case a: Keys => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: AsMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: AsPairs => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: CollectByKey => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: AsPairs => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: CollectByKey => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Sep => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Min => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) @@ -114,6 +168,5 @@ package object files { case other => s"No implementation of FileEvaluator[${other.getClass.getSimpleName}]".invalidNel } - } } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluators.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluators.scala index 4a2e389c25a..e169a561d53 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluators.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluators.scala @@ -11,59 +11,64 @@ import wom.types._ object cascadesTypeEvaluators { implicit val keysFunctionEvaluator: TypeEvaluator[Keys] = new TypeEvaluator[Keys] { - override def evaluateType(a: Keys, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Keys, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomMapType(WomAnyType, WomAnyType)) flatMap { case WomMapType(keyType, _) => WomArrayType(keyType).validNel case other => s"Cannot invoke 'keys' on type '${other.stableName}'. Expected a map".invalidNel } - } } implicit val asMapFunctionEvaluator: TypeEvaluator[AsMap] = new TypeEvaluator[AsMap] { - override def evaluateType(a: AsMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: AsMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomPairType(WomAnyType, WomAnyType))) flatMap { case WomArrayType(WomPairType(x: WomPrimitiveType, y)) => WomMapType(x, y).validNel - case other @ WomArrayType(WomPairType(x, _)) => s"Cannot invoke 'as_map' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel + case other @ WomArrayType(WomPairType(x, _)) => + s"Cannot invoke 'as_map' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel case other => s"Cannot invoke 'as_map' on type '${other.stableName}'. Expected an array of pairs".invalidNel } - } } implicit val asPairsFunctionEvaluator: TypeEvaluator[AsPairs] = new TypeEvaluator[AsPairs] { - override def evaluateType(a: AsPairs, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: AsPairs, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomMapType(WomAnyType, WomAnyType)) flatMap { case WomMapType(x, y) => WomArrayType(WomPairType(x, y)).validNel case other => s"Cannot invoke 'as_pairs' on type '${other.stableName}'. Expected a map".invalidNel } - } } implicit val collectByKeyFunctionEvaluator: TypeEvaluator[CollectByKey] = new TypeEvaluator[CollectByKey] { - override def evaluateType(a: CollectByKey, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: CollectByKey, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomPairType(WomAnyType, WomAnyType))) flatMap { case WomArrayType(WomPairType(x: WomPrimitiveType, y)) => WomMapType(x, WomArrayType(y)).validNel - case other @ WomArrayType(WomPairType(x, _)) => s"Cannot invoke 'collect_by_key' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel - case other => s"Cannot invoke 'collect_by_key' on type '${other.stableName}'. Expected an array of pairs".invalidNel + case other @ WomArrayType(WomPairType(x, _)) => + s"Cannot invoke 'collect_by_key' on type ${other.stableName}. Map keys must be primitive but got '${x.stableName}'".invalidNel + case other => + s"Cannot invoke 'collect_by_key' on type '${other.stableName}'. Expected an array of pairs".invalidNel } - } } - private def resultTypeOfIntVsFloat(functionName: String)(type1: WomType, type2: WomType): ErrorOr[WomType] = (type1, type2) match { - case (WomIntegerType, WomIntegerType) => WomIntegerType.validNel - case (WomIntegerType, WomFloatType) => WomFloatType.validNel - case (WomFloatType, WomIntegerType) => WomFloatType.validNel - case (WomFloatType, WomFloatType) => WomFloatType.validNel - case (other1, other2) => s"Cannot call '$functionName' with arguments (${other1.friendlyName}, ${other2.friendlyName}). Must be Int or Long.".invalidNel - } + private def resultTypeOfIntVsFloat(functionName: String)(type1: WomType, type2: WomType): ErrorOr[WomType] = + (type1, type2) match { + case (WomIntegerType, WomIntegerType) => WomIntegerType.validNel + case (WomIntegerType, WomFloatType) => WomFloatType.validNel + case (WomFloatType, WomIntegerType) => WomFloatType.validNel + case (WomFloatType, WomFloatType) => WomFloatType.validNel + case (other1, other2) => + s"Cannot call '$functionName' with arguments (${other1.friendlyName}, ${other2.friendlyName}). Must be Int or Long.".invalidNel + } implicit val minFunctionEvaluator: TypeEvaluator[Min] = new TypeEvaluator[Min] { - override def evaluateType(a: Min, - linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Min, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val type1 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) val type2 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) @@ -72,9 +77,9 @@ object cascadesTypeEvaluators { } implicit val maxFunctionEvaluator: TypeEvaluator[Max] = new TypeEvaluator[Max] { - override def evaluateType(a: Max, - linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Max, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val type1 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) val type2 = expressionTypeEvaluator.evaluateType(a.arg1, linkedValues) @@ -83,15 +88,16 @@ object cascadesTypeEvaluators { } implicit val sepFunctionEvaluator: TypeEvaluator[Sep] = new TypeEvaluator[Sep] { - override def evaluateType(a: Sep, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Sep, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.arg2, linkedValues, WomArrayType(WomAnyType)) flatMap { - case WomArrayType(WomArrayType(_)) => s"Cannot invoke 'sep' on type 'Array[Array[_]]'. Expected an Array[String].".invalidNel + case WomArrayType(WomArrayType(_)) => + s"Cannot invoke 'sep' on type 'Array[Array[_]]'. Expected an Array[String].".invalidNel case WomArrayType(_) => WomStringType.validNel case other => s"Cannot invoke 'sep' on type '${other.stableName}'. Expected an Array[String].".invalidNel } - } } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/types.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/types.scala index 367967b91be..70e6e61a180 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/types.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/types/types.scala @@ -18,9 +18,9 @@ import wdl.transforms.biscayne.linking.expression.types.cascadesTypeEvaluators._ package object types { implicit val expressionTypeEvaluator: TypeEvaluator[ExpressionElement] = new TypeEvaluator[ExpressionElement] { - override def evaluateType(a: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit typeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - + override def evaluateType(a: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit typeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a match { // Literals: case a: PrimitiveLiteralExpressionElement => a.evaluateType(linkedValues)(typeEvaluator) @@ -111,8 +111,8 @@ package object types { case a: Min => a.evaluateType(linkedValues)(typeEvaluator) case a: Max => a.evaluateType(linkedValues)(typeEvaluator) - case other => s"Unable to process ${other.getClass.getSimpleName}: No evaluateType exists for that type.".invalidNel + case other => + s"Unable to process ${other.getClass.getSimpleName}: No evaluateType exists for that type.".invalidNel } - } } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluators.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluators.scala index 4a02ed221f1..788f114e181 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluators.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluators.scala @@ -9,40 +9,54 @@ import common.collections.EnhancedCollections._ import wdl.model.draft3.elements.ExpressionElement import wdl.model.draft3.elements.ExpressionElement._ import wdl.model.draft3.graph.expression.{EvaluatedValue, ForCommandInstantiationOptions, ValueEvaluator} -import wdl.transforms.base.linking.expression.values.EngineFunctionEvaluators.{processValidatedSingleValue, processTwoValidatedValues} +import wdl.transforms.base.linking.expression.values.EngineFunctionEvaluators.{ + processTwoValidatedValues, + processValidatedSingleValue +} import wom.expression.IoFunctionSet import wom.types._ -import wom.values.{WomArray, WomInteger, WomFloat, WomMap, WomOptionalValue, WomPair, WomValue, WomString} +import wom.values.{WomArray, WomFloat, WomInteger, WomMap, WomOptionalValue, WomPair, WomString, WomValue} import wom.types.coercion.defaults._ object cascadesValueEvaluators { - implicit val noneLiteralEvaluator: ValueEvaluator[NoneLiteralElement.type] = new ValueEvaluator[ExpressionElement.NoneLiteralElement.type] { - override def evaluateValue(a: ExpressionElement.NoneLiteralElement.type, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions])(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - EvaluatedValue( - value = WomOptionalValue(WomNothingType, None), - sideEffectFiles = Seq.empty).validNel + implicit val noneLiteralEvaluator: ValueEvaluator[NoneLiteralElement.type] = + new ValueEvaluator[ExpressionElement.NoneLiteralElement.type] { + override def evaluateValue(a: ExpressionElement.NoneLiteralElement.type, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + EvaluatedValue(value = WomOptionalValue(WomNothingType, None), sideEffectFiles = Seq.empty).validNel } - } implicit val asMapFunctionEvaluator: ValueEvaluator[AsMap] = new ValueEvaluator[AsMap] { - override def evaluateValue(a: AsMap, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomArray, WomMap](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { + override def evaluateValue(a: AsMap, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomArray, WomMap]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { case WomArray(WomArrayType(WomPairType(_: WomPrimitiveType, _)), values) => val validPairs: ErrorOr[List[(WomValue, WomValue)]] = values.toList traverse { case WomPair(l, r) => (l, r).validNel - case other => s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel - } leftMap { - errors => NonEmptyList.fromListUnsafe(errors.toList.distinct) + case other => + s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel + } leftMap { errors => + NonEmptyList.fromListUnsafe(errors.toList.distinct) } validPairs flatMap { pairs => val grouped = pairs.groupBy(_._1) val tooManyKeyErrors = grouped collect { - case (name, list) if list.length != 1 => s"keys can only appear once but ${name.toWomString} appeared ${list.size} times." + case (name, list) if list.length != 1 => + s"keys can only appear once but ${name.toWomString} appeared ${list.size} times." } if (tooManyKeyErrors.isEmpty) { - val pairs = grouped map { case (key, value) => (key -> value.head._2) } + val pairs = grouped map { case (key, value) => key -> value.head._2 } EvaluatedValue(WomMap(pairs), Seq.empty).validNel } else { @@ -50,49 +64,69 @@ object cascadesValueEvaluators { } } - case WomArray(womType@WomArrayType(WomPairType(x, _)), _) => s"Cannot evaluate 'as_map' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel - case other => s"Invalid call of 'as_map' on parameter of type '${other.womType.stableName}' (expected Array[Pair[X, Y]])".invalidNel - } (coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) - } + case WomArray(womType @ WomArrayType(WomPairType(x, _)), _) => + s"Cannot evaluate 'as_map' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel + case other => + s"Invalid call of 'as_map' on parameter of type '${other.womType.stableName}' (expected Array[Pair[X, Y]])".invalidNel + }(coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) } implicit val keysFunctionEvaluator: ValueEvaluator[Keys] = new ValueEvaluator[Keys] { override def evaluateValue(a: Keys, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - - processValidatedSingleValue[WomMap, WomArray](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { - case WomMap(WomMapType(keyType, _), values) => EvaluatedValue(WomArray(WomArrayType(keyType), values.keys.toList), Seq.empty).validNel - case other => s"Invalid call of 'keys' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomMap, WomArray]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { + case WomMap(WomMapType(keyType, _), values) => + EvaluatedValue(WomArray(WomArrayType(keyType), values.keys.toList), Seq.empty).validNel + case other => + s"Invalid call of 'keys' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel } - } } implicit val asPairsFunctionEvaluator: ValueEvaluator[AsPairs] = new ValueEvaluator[AsPairs] { - override def evaluateValue(a: AsPairs, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomMap, WomArray](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { + override def evaluateValue(a: AsPairs, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomMap, WomArray]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { case WomMap(WomMapType(keyType, valueType), values) => - val validPairs: List[WomPair] = values.toList map { - case (l, r) => WomPair(l, r) + val validPairs: List[WomPair] = values.toList map { case (l, r) => + WomPair(l, r) } EvaluatedValue(WomArray(WomArrayType(WomPairType(keyType, valueType)), validPairs), Seq.empty).validNel - case other => s"Invalid call of 'as_pairs' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel + case other => + s"Invalid call of 'as_pairs' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel } - } } implicit val collectByKeyFunctionEvaluator: ValueEvaluator[CollectByKey] = new ValueEvaluator[CollectByKey] { - override def evaluateValue(a: CollectByKey, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomArray, WomMap](expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator)) { + override def evaluateValue(a: CollectByKey, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomArray, WomMap]( + expressionValueEvaluator.evaluateValue(a.param, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + ) { case WomArray(WomArrayType(WomPairType(_: WomPrimitiveType, _)), values) => val validPairs: ErrorOr[List[(WomValue, WomValue)]] = values.toList traverse { case WomPair(l, r) => (l, r).validNel - case other => s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel + case other => + s"Unexpected array element. Expected a Pair[X, Y] but array contained ${other.toWomString}]".invalidNel } validPairs flatMap { kvpairs => val grouped: Map[WomValue, WomArray] = kvpairs.groupBy(_._1).safeMapValues(v => WomArray(v.map(_._2))) @@ -100,22 +134,24 @@ object cascadesValueEvaluators { } - case WomArray(womType@WomArrayType(WomPairType(x, _)), _) => s"Cannot evaluate 'collect_by_key' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel - case other => s"Invalid call of 'collect_by_key' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel - } (coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) - } + case WomArray(womType @ WomArrayType(WomPairType(x, _)), _) => + s"Cannot evaluate 'collect_by_key' on type ${womType.stableName}. Keys must be primitive but got ${x.stableName}.".invalidNel + case other => + s"Invalid call of 'collect_by_key' on parameter of type '${other.womType.stableName}' (expected Map[X, Y])".invalidNel + }(coercer = WomArrayType(WomPairType(WomAnyType, WomAnyType))) } private def resultOfIntVsFloat(functionName: String, intFunc: (Int, Int) => Int, - doubleFunc: (Double, Double) => Double) - (value1: EvaluatedValue[_], value2: EvaluatedValue[_]): ErrorOr[EvaluatedValue[WomValue]] = { + doubleFunc: (Double, Double) => Double + )(value1: EvaluatedValue[_], value2: EvaluatedValue[_]): ErrorOr[EvaluatedValue[WomValue]] = { val newValue = (value1.value, value2.value) match { case (WomInteger(i1), WomInteger(i2)) => WomInteger(intFunc(i1, i2)).validNel case (WomInteger(i1), WomFloat(l2)) => WomFloat(doubleFunc(i1.doubleValue, l2)).validNel case (WomFloat(l1), WomInteger(i2)) => WomFloat(doubleFunc(l1, i2.doubleValue)).validNel case (WomFloat(l1), WomFloat(l2)) => WomFloat(doubleFunc(l1, l2)).validNel - case (other1, other2) => s"Invalid arguments to '$functionName':(${other1.typeName}, ${other2.typeName})".invalidNel + case (other1, other2) => + s"Invalid arguments to '$functionName':(${other1.typeName}, ${other2.typeName})".invalidNel } newValue map { v => EvaluatedValue(v, value1.sideEffectFiles ++ value2.sideEffectFiles) } } @@ -124,10 +160,16 @@ object cascadesValueEvaluators { override def evaluateValue(a: Min, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { - val value1 = expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - val value2 = expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { + val value1 = + expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + val value2 = + expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) val intFunc = (i1: Int, i2: Int) => Math.min(i1, i2) val doubleFunc = (l1: Double, l2: Double) => Math.min(l1, l2) @@ -140,9 +182,16 @@ object cascadesValueEvaluators { override def evaluateValue(a: Max, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions])(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { - val value1 = expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - val value2 = expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { + val value1 = + expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) + val value2 = + expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) val intFunc = (i1: Int, i2: Int) => Math.max(i1, i2) val doubleFunc = (l1: Double, l2: Double) => Math.max(l1, l2) @@ -155,15 +204,17 @@ object cascadesValueEvaluators { override def evaluateValue(a: Sep, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = processTwoValidatedValues[WomString, WomArray, WomString]( - expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator), - expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + expressionValueEvaluator.evaluateValue(a.arg1, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ), + expressionValueEvaluator.evaluateValue(a.arg2, inputs, ioFunctionSet, forCommandInstantiationOptions)( + expressionValueEvaluator + ) ) { (sepvalue, arr) => EvaluatedValue(WomString(arr.value.map(v => v.valueString).mkString(sepvalue.value)), Seq.empty).validNel } - } } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/values.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/values.scala index 24b21928f4b..08eb74ac19c 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/values.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/linking/expression/values/values.scala @@ -24,94 +24,154 @@ package object values { override def evaluateValue(a: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = a match { // Literals: - case a: PrimitiveLiteralExpressionElement => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: NoneLiteralElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - - case a: StringLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: StringExpression => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ObjectLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: MapLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ArrayLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: PairLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: PrimitiveLiteralExpressionElement => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: NoneLiteralElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + + case a: StringLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: StringExpression => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ObjectLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: MapLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ArrayLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: PairLiteral => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Lookups and member accesses: - case a: IdentifierLookup => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ExpressionMemberAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: IdentifierMemberAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: IndexAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: IdentifierLookup => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ExpressionMemberAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: IdentifierMemberAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: IndexAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Unary operators: - case a: UnaryNegation => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: UnaryPlus => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LogicalNot => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: UnaryNegation => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: UnaryPlus => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LogicalNot => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Binary operators (at some point we might want to split these into separate cases): - case a: LogicalOr => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LogicalAnd => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Equals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: NotEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LessThan => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: LessThanOrEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: GreaterThan => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: GreaterThanOrEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LogicalOr => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LogicalAnd => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Equals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: NotEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LessThan => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: LessThanOrEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: GreaterThan => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: GreaterThanOrEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Add => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Subtract => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Multiply => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Divide => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Remainder => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - - case a: TernaryIf => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Subtract => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Multiply => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Divide => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Remainder => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + + case a: TernaryIf => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) // Engine functions: - case a: StdoutElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: StderrElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - - case a: ReadLines => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadTsv => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadMap => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadObject => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadObjects => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadJson => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadInt => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadString => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadFloat => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: ReadBoolean => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteLines => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteTsv => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteMap => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteObject => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteObjects => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: WriteJson => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Range => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Transpose => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Length => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Flatten => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Prefix => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: SelectFirst => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: SelectAll => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Defined => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Floor => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: StdoutElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: StderrElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + + case a: ReadLines => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadTsv => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadMap => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadObject => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadObjects => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadJson => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadInt => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadString => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadFloat => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: ReadBoolean => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteLines => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteTsv => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteMap => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteObject => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteObjects => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: WriteJson => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Range => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Transpose => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Length => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Flatten => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Prefix => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: SelectFirst => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: SelectAll => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Defined => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Floor => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Ceil => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Round => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Round => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Glob => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Size => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Basename => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Basename => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Zip => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: Cross => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: Cross => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Sub => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Keys => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: AsMap => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: AsPairs => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) - case a: CollectByKey => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: AsMap => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: AsPairs => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) + case a: CollectByKey => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Sep => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) case a: Min => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(expressionValueEvaluator) @@ -119,6 +179,5 @@ package object values { case other => s"Unable to process ${other.toWdlV1}: No evaluateValue exists for that type in WDL 1.1".invalidNel } - } } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/CascadesParser.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/CascadesParser.scala index ed0bd20e2e9..dd7b7aef4c7 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/CascadesParser.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/CascadesParser.scala @@ -15,7 +15,7 @@ object StringParser { def convert(a: FileStringParserInput): Checked[Ast] = Try { val parser = new WdlParser() val tokens = parser.lex(a.workflowSource, a.resource) - val terminalMap = (tokens.asScala.toVector map {(_, a.workflowSource)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, a.workflowSource) }).toMap val syntaxErrorFormatter = WdlCascadesSyntaxErrorFormatter(terminalMap) parser.parse(tokens, syntaxErrorFormatter).toAst.asInstanceOf[Ast] }.toChecked diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/WdlCascadesSyntaxErrorFormatter.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/WdlCascadesSyntaxErrorFormatter.scala index 175adf8f1af..7a24e474a79 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/WdlCascadesSyntaxErrorFormatter.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/parsing/WdlCascadesSyntaxErrorFormatter.scala @@ -13,52 +13,56 @@ case class WdlCascadesSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSo case classicTerminal => terminalMap.get(classicTerminal) } - private def line(t: Terminal): String = getTerminal(t).map(_.split("\n")(t.getLine - 1)).getOrElse(s"Cannot highlight line. It was probably in an imported file.") + private def line(t: Terminal): String = getTerminal(t) + .map(_.split("\n")(t.getLine - 1)) + .getOrElse(s"Cannot highlight line. It was probably in an imported file.") - def unexpectedEof(method: String, expected: java.util.List[TerminalIdentifier], nt_rules: java.util.List[String]): String = "ERROR: Unexpected end of file" + def unexpectedEof(method: String, + expected: java.util.List[TerminalIdentifier], + nt_rules: java.util.List[String] + ): String = "ERROR: Unexpected end of file" - def excessTokens(method: String, terminal: Terminal): String = { + def excessTokens(method: String, terminal: Terminal): String = s"""ERROR: Finished parsing without consuming all tokens. | - |${pointToSource(terminal)} + |${pointToSource(terminal)} """.stripMargin - } - def unexpectedSymbol(method: String, actual: Terminal, expected: java.util.List[TerminalIdentifier], rule: String): String = { + def unexpectedSymbol(method: String, + actual: Terminal, + expected: java.util.List[TerminalIdentifier], + rule: String + ): String = { val expectedTokens = expected.asScala.map(_.string).mkString(", ") s"""ERROR: Unexpected symbol (line ${actual.getLine}, col ${actual.getColumn}) when parsing '$method'. | - |Expected $expectedTokens, got "${actual.getSourceString}". + |Expected $expectedTokens, got "${actual.getSourceString}". | - |${pointToSource(actual)} + |${pointToSource(actual)} | - |$rule + |$rule """.stripMargin } - def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = { + def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = s"""ERROR: No more tokens. Expecting ${expecting.string} | - |${pointToSource(last)} + |${pointToSource(last)} """.stripMargin - } - def invalidTerminal(method: String, invalid: Terminal): String = { + def invalidTerminal(method: String, invalid: Terminal): String = s"""ERROR: Invalid symbol ID: ${invalid.getId} (${invalid.getTerminalStr}) | - |${pointToSource(invalid)} + |${pointToSource(invalid)} """.stripMargin - } // TODO: these next two methods won't be called by the parser because there are no lists in the WDL grammar that // cause these to be triggered. Currently the parser is passing in 'null' for the value of 'last' and when that // changes, these errors can be made more helpful. - def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = { + def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = s"ERROR: $method requires $required items, but only found $found" - } - def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = { + def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = s"ERROR: $method requires a terminator after each element" - } } diff --git a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/wdlom2wom/package.scala b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/wdlom2wom/package.scala index 6f2f084867f..8933a00cb57 100644 --- a/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/wdlom2wom/package.scala +++ b/wdl/transforms/cascades/src/main/scala/wdl/transforms/cascades/wdlom2wom/package.scala @@ -3,7 +3,12 @@ package wdl.transforms.cascades import common.transforms.CheckedAtoB import wdl.transforms.base.wdlom2wom.TaskDefinitionElementToWomTaskDefinition.TaskDefinitionElementToWomInputs import wdl.transforms.base.wdlom2wom.WorkflowDefinitionElementToWomWorkflowDefinition.WorkflowDefinitionConvertInputs -import wdl.transforms.base.wdlom2wom.{FileElementToWomBundle, FileElementToWomBundleInputs, TaskDefinitionElementToWomTaskDefinition, WorkflowDefinitionElementToWomWorkflowDefinition} +import wdl.transforms.base.wdlom2wom.{ + FileElementToWomBundle, + FileElementToWomBundleInputs, + TaskDefinitionElementToWomTaskDefinition, + WorkflowDefinitionElementToWomWorkflowDefinition +} import wom.callable.{CallableTaskDefinition, WorkflowDefinition} import wom.executable.WomBundle import wdl.transforms.cascades.linking.expression.consumed._ @@ -12,7 +17,11 @@ import wdl.transforms.cascades.linking.expression.types._ import wdl.transforms.cascades.linking.expression.values._ package object wdlom2wom { - val taskDefinitionElementToWomTaskDefinition: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] = CheckedAtoB.fromErrorOr(TaskDefinitionElementToWomTaskDefinition.convert) - val workflowDefinitionElementToWomWorkflowDefinition: CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition] = CheckedAtoB.fromErrorOr(WorkflowDefinitionElementToWomWorkflowDefinition.convert) - val fileElementToWomBundle: CheckedAtoB[FileElementToWomBundleInputs, WomBundle] = CheckedAtoB.fromCheck(FileElementToWomBundle.convert) + val taskDefinitionElementToWomTaskDefinition: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] = + CheckedAtoB.fromErrorOr(TaskDefinitionElementToWomTaskDefinition.convert) + val workflowDefinitionElementToWomWorkflowDefinition + : CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition] = + CheckedAtoB.fromErrorOr(WorkflowDefinitionElementToWomWorkflowDefinition.convert) + val fileElementToWomBundle: CheckedAtoB[FileElementToWomBundleInputs, WomBundle] = + CheckedAtoB.fromCheck(FileElementToWomBundle.convert) } diff --git a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/Ast2WdlomSpec.scala b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/Ast2WdlomSpec.scala index 7633624bede..a3a9cd4fb65 100644 --- a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/Ast2WdlomSpec.scala +++ b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/Ast2WdlomSpec.scala @@ -27,14 +27,14 @@ object Ast2WdlomSpec { val parser = new WdlParser() def fromString[A](expression: String, - parseFunction: (util.List[WdlParser.Terminal], SyntaxErrorFormatter) => ParseTree) - (implicit converter: CheckedAtoB[GenericAstNode, A]): Checked[A] = { + parseFunction: (util.List[WdlParser.Terminal], SyntaxErrorFormatter) => ParseTree + )(implicit converter: CheckedAtoB[GenericAstNode, A]): Checked[A] = { // Add the "version development" to force the lexer into "main" mode. val versionedExpression = "version development\n" + expression // That "version development" means we'll have 2 unwanted tokens at the start of the list, so drop 'em: val tokens = parser.lex(versionedExpression, "string").asScala.drop(2).asJava - val terminalMap = (tokens.asScala.toVector map {(_, versionedExpression)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, versionedExpression) }).toMap val parseTree = parseFunction(tokens, WdlCascadesSyntaxErrorFormatter(terminalMap)) (wrapAstNode andThen converter).run(parseTree.toAst) } @@ -42,18 +42,20 @@ object Ast2WdlomSpec { class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - - it should "parse a simple expression" in { val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid Add(PrimitiveLiteralExpressionElement(WomInteger(3)), PrimitiveLiteralExpressionElement(WomInteger(3))) + expr shouldBeValid Add(PrimitiveLiteralExpressionElement(WomInteger(3)), + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) } it should "parse a map expression" in { val str = "{3: 3}" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid MapLiteral(Map(PrimitiveLiteralExpressionElement(WomInteger(3)) -> PrimitiveLiteralExpressionElement(WomInteger(3)))) + expr shouldBeValid MapLiteral( + Map(PrimitiveLiteralExpressionElement(WomInteger(3)) -> PrimitiveLiteralExpressionElement(WomInteger(3))) + ) } it should "parse a simple meta section" in { @@ -65,33 +67,34 @@ class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { it should "parse a struct element" in { val str = "struct Foo { Int five\n Int six }" val struct = fromString[StructElement](str, parser.parse_struct)(astNodeToAst andThen astToStructElement) - struct shouldBeValid StructElement("Foo", Seq( - StructEntryElement("five", PrimitiveTypeElement(WomIntegerType)), - StructEntryElement("six", PrimitiveTypeElement(WomIntegerType))) + struct shouldBeValid StructElement("Foo", + Seq(StructEntryElement("five", PrimitiveTypeElement(WomIntegerType)), + StructEntryElement("six", PrimitiveTypeElement(WomIntegerType)) + ) ) } it should "parse the new as_map function" in { val str = "as_map(some_pairs)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(AsMap(IdentifierLookup("some_pairs"))) + expr shouldBeValid (AsMap(IdentifierLookup("some_pairs"))) } it should "parse the new as_pairs function" in { val str = "as_pairs(some_map)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(AsPairs(IdentifierLookup("some_map"))) + expr shouldBeValid (AsPairs(IdentifierLookup("some_map"))) } it should "parse the new collect_by_key function" in { val str = "collect_by_key(some_map)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(CollectByKey(IdentifierLookup("some_map"))) + expr shouldBeValid (CollectByKey(IdentifierLookup("some_map"))) } it should "parse the new None keyword" in { val str = "None" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(NoneLiteralElement) + expr shouldBeValid NoneLiteralElement } } diff --git a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/ast2wdlom/WdlFileToWdlomSpec.scala b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/ast2wdlom/WdlFileToWdlomSpec.scala index 5d2cb2991e8..0a6a6f0e08e 100644 --- a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/ast2wdlom/WdlFileToWdlomSpec.scala +++ b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/ast2wdlom/WdlFileToWdlomSpec.scala @@ -12,7 +12,6 @@ import wom.SourceFileLocation import wom.types._ import wom.values.WomInteger - class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WDL File to WDLOM" @@ -25,12 +24,11 @@ class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } testCases.list.filter(x => x.isRegularFile && x.extension.contains(".wdl")) foreach { testCase => - val fileName = testCase.name val testName = testCase.name.split("\\.").head val itShouldString = s"create the correct Element structure for $fileName" - val testOrIgnore: (=>Any) => Unit = if (fileName.endsWith(".ignored.wdl")) { + val testOrIgnore: (=> Any) => Unit = if (fileName.endsWith(".ignored.wdl")) { (it should itShouldString).ignore _ } else { (it should itShouldString).in _ @@ -42,7 +40,8 @@ class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match fileToFileElement.run(testCase) match { case Right(actual) => actual shouldBe expected case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) fail(s"Failed to create WDLOM:$formattedErrors") } @@ -57,21 +56,23 @@ object WdlFileToWdlomSpec { FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "no_input_no_output", - inputsSection = None, - graphElements = Set( - CallElement("no_inputs", Some("noi1"), Vector.empty, None, Some(SourceFileLocation(7))), - CallElement("no_inputs", Some("noi5"), Vector.empty, None, Some(SourceFileLocation(21))), - CallElement("no_inputs", Some("noi2"), Vector.empty, None, Some(SourceFileLocation(11))), - CallElement("no_inputs", Some("noi3"), Vector.empty, None, Some(SourceFileLocation(13))), - CallElement("no_inputs", Some("noi4"), Vector.empty, None, Some(SourceFileLocation(17))), - CallElement("no_inputs", None, Vector.empty, None, Some(SourceFileLocation(9))) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(6))) + workflows = Vector( + WorkflowDefinitionElement( + name = "no_input_no_output", + inputsSection = None, + graphElements = Set( + CallElement("no_inputs", Some("noi1"), Vector.empty, None, Some(SourceFileLocation(7))), + CallElement("no_inputs", Some("noi5"), Vector.empty, None, Some(SourceFileLocation(21))), + CallElement("no_inputs", Some("noi2"), Vector.empty, None, Some(SourceFileLocation(11))), + CallElement("no_inputs", Some("noi3"), Vector.empty, None, Some(SourceFileLocation(13))), + CallElement("no_inputs", Some("noi4"), Vector.empty, None, Some(SourceFileLocation(17))), + CallElement("no_inputs", None, Vector.empty, None, Some(SourceFileLocation(9))) + ), + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(6)) + ) ), tasks = Vector( TaskDefinitionElement( @@ -79,7 +80,8 @@ object WdlFileToWdlomSpec { inputsSection = None, declarations = Vector.empty, outputsSection = None, - commandSection = CommandSectionElement(List(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), + commandSection = + CommandSectionElement(List(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), runtimeSection = None, metaSection = None, parameterMetaSection = None, @@ -91,122 +93,280 @@ object WdlFileToWdlomSpec { FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "order", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "n", Some(PrimitiveLiteralExpressionElement(WomInteger(4)))), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more")))))), - graphElements = Set(CallElement("in_n_out", None, Vector.empty, Some(CallBodyElement(Vector(KvPair("total", IdentifierLookup("n")), KvPair("amount", IdentifierLookup("more"))))), Some(SourceFileLocation(19)))), - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "out", IdentifierMemberAccess("in_n_out", "out", List.empty))))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(14))) + workflows = Vector( + WorkflowDefinitionElement( + name = "order", + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "n", + Some(PrimitiveLiteralExpressionElement(WomInteger(4))) + ), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more"))) + ) + ) + ), + graphElements = Set( + CallElement( + "in_n_out", + None, + Vector.empty, + Some( + CallBodyElement( + Vector(KvPair("total", IdentifierLookup("n")), KvPair("amount", IdentifierLookup("more"))) + ) + ), + Some(SourceFileLocation(19)) + ) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "out", + IdentifierMemberAccess("in_n_out", "out", List.empty) + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(14)) + ) ), - tasks = Vector(TaskDefinitionElement( - name = "in_n_out", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "total", None), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "amount", None)))), - declarations = Vector.empty, - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "out", Add(ReadInt(StdoutElement), PrimitiveLiteralExpressionElement(WomInteger(1))))))), - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("total"), PlaceholderAttributeSet.empty), - StringCommandPartElement(" ") - )))), - runtimeSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)) - )) + tasks = Vector( + TaskDefinitionElement( + name = "in_n_out", + inputsSection = Some( + InputsSectionElement( + Vector(InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "total", None), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "amount", None) + ) + ) + ), + declarations = Vector.empty, + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "out", + Add(ReadInt(StdoutElement), PrimitiveLiteralExpressionElement(WomInteger(1))) + ) + ) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("total"), PlaceholderAttributeSet.empty), + StringCommandPartElement(" ") + ) + ) + ) + ), + runtimeSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ) ), "afters" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "afters", - inputsSection = None, - graphElements = Set( - CallElement("foo", None, Vector.empty, Some(CallBodyElement(Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(5)))))), - Some(SourceFileLocation(4))), - CallElement("foo", Some("foo2"), Vector("foo"), Some(CallBodyElement(Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(6)))))), - Some(SourceFileLocation(5))) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3))) + workflows = Vector( + WorkflowDefinitionElement( + name = "afters", + inputsSection = None, + graphElements = Set( + CallElement( + "foo", + None, + Vector.empty, + Some( + CallBodyElement( + Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(5)))) + ) + ), + Some(SourceFileLocation(4)) + ), + CallElement( + "foo", + Some("foo2"), + Vector("foo"), + Some( + CallBodyElement( + Vector(KvPair("i", ExpressionElement.PrimitiveLiteralExpressionElement(WomInteger(6)))) + ) + ), + Some(SourceFileLocation(5)) + ) + ), + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) ), - tasks = Vector(TaskDefinitionElement( - name = "foo", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", None)))), - declarations = Vector.empty, - outputsSection = None, - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("cat \"hello "), - PlaceholderCommandPartElement(IdentifierLookup("i"), PlaceholderAttributeSet.empty), - StringCommandPartElement("\" > /tmp/helloFile") - )))), - runtimeSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(8)) - )) + tasks = Vector( + TaskDefinitionElement( + name = "foo", + inputsSection = Some( + InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", None))) + ), + declarations = Vector.empty, + outputsSection = None, + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("cat \"hello "), + PlaceholderCommandPartElement(IdentifierLookup("i"), PlaceholderAttributeSet.empty), + StringCommandPartElement("\" > /tmp/helloFile") + ) + ) + ) + ), + runtimeSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(8)) + ) + ) ), "cascades_escaping" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "escapes", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"backslash",StringExpression(Vector(StringLiteral(" "), BackslashEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"n",StringExpression(Vector(StringLiteral(" "), NewlineEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"t",StringExpression(Vector(StringLiteral(" "), TabEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q1",StringExpression(Vector(StringLiteral("leading text "), DoubleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q2",StringLiteral("\"")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q3",StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q4",StringExpression(Vector(StringLiteral("leading text "), SingleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q5",StringLiteral("'")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"q6",StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq1",StringExpression(Vector(StringLiteral("leading text "), DoubleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq2",StringLiteral("\"")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq3",StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq4",StringExpression(Vector(StringLiteral("leading text "), SingleQuoteEscape, StringLiteral(" trailing text")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq5",StringLiteral("'")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType),"sq6",StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" ")))), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "octal_hello", - StringExpression(Vector(UnicodeCharacterEscape(104), UnicodeCharacterEscape(101), UnicodeCharacterEscape(108), UnicodeCharacterEscape(108), UnicodeCharacterEscape(111))) - ), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "hex_hello", - StringExpression(Vector(UnicodeCharacterEscape(104), UnicodeCharacterEscape(101), UnicodeCharacterEscape(108), UnicodeCharacterEscape(108), UnicodeCharacterEscape(111))) + workflows = Vector( + WorkflowDefinitionElement( + name = "escapes", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "backslash", + StringExpression(Vector(StringLiteral(" "), BackslashEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "n", + StringExpression(Vector(StringLiteral(" "), NewlineEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "t", + StringExpression(Vector(StringLiteral(" "), TabEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "q1", + StringExpression( + Vector(StringLiteral("leading text "), + DoubleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "q2", StringLiteral("\"")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "q3", + StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "q4", + StringExpression( + Vector(StringLiteral("leading text "), + SingleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "q5", StringLiteral("'")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "q6", + StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "sq1", + StringExpression( + Vector(StringLiteral("leading text "), + DoubleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "sq2", StringLiteral("\"")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "sq3", + StringExpression(Vector(StringLiteral(" "), DoubleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "sq4", + StringExpression( + Vector(StringLiteral("leading text "), + SingleQuoteEscape, + StringLiteral(" trailing text") + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "sq5", StringLiteral("'")), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "sq6", + StringExpression(Vector(StringLiteral(" "), SingleQuoteEscape, StringLiteral(" "))) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "octal_hello", + StringExpression( + Vector(UnicodeCharacterEscape(104), + UnicodeCharacterEscape(101), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(111) + ) + ) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "hex_hello", + StringExpression( + Vector(UnicodeCharacterEscape(104), + UnicodeCharacterEscape(101), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(111) + ) + ) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "unicode_hello", + StringExpression( + Vector( + UnicodeCharacterEscape(104), + UnicodeCharacterEscape(101), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(108), + UnicodeCharacterEscape(111) + ) + ) + ) ), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "unicode_hello", - StringExpression(Vector( - UnicodeCharacterEscape(104), - UnicodeCharacterEscape(101), - UnicodeCharacterEscape(108), - UnicodeCharacterEscape(108), - UnicodeCharacterEscape(111) - )) - ) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)) - )), - tasks = Vector.empty) + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = Vector.empty + ) ) } diff --git a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumersSpec.scala b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumersSpec.scala index 4276d882b46..65a03812c25 100644 --- a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumersSpec.scala +++ b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/consumed/CascadesExpressionValueConsumersSpec.scala @@ -17,8 +17,8 @@ class CascadesExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set.empty) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set.empty) } } @@ -26,8 +26,8 @@ class CascadesExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "3 + three" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("three"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("three"))) } } @@ -35,8 +35,8 @@ class CascadesExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "as_map(my_task.out)" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) } } @@ -44,8 +44,8 @@ class CascadesExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = "as_pairs(as_map(my_task.out))" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedCallOutputOrIdentifierAndMemberAccessHook("my_task", "out"))) } } @@ -53,8 +53,8 @@ class CascadesExpressionValueConsumersSpec extends AnyFlatSpec with CromwellTime val str = """ sep(my_separator, ["a", "b", c]) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("my_separator"), UnlinkedIdentifierHook("c"))) + expr.shouldBeValidPF { case e => + e.expressionConsumedValueHooks should be(Set(UnlinkedIdentifierHook("my_separator"), UnlinkedIdentifierHook("c"))) } } } diff --git a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluatorSpec.scala b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluatorSpec.scala index a65bd73c8a9..6e0a828855e 100644 --- a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluatorSpec.scala +++ b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/files/CascadesFileEvaluatorSpec.scala @@ -18,8 +18,8 @@ class CascadesFileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomIntegerType) shouldBeValid Set.empty + expr.shouldBeValidPF { case e => + e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomIntegerType) shouldBeValid Set.empty } } @@ -27,8 +27,11 @@ class CascadesFileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """as_pairs(read_map("my_map.txt"))""" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomArrayType(WomPairType(WomStringType, WomStringType))) shouldBeValid Set(WomSingleFile("my_map.txt")) + expr.shouldBeValidPF { case e => + e.predictFilesNeededToEvaluate(Map.empty, + NoIoFunctionSet, + WomArrayType(WomPairType(WomStringType, WomStringType)) + ) shouldBeValid Set(WomSingleFile("my_map.txt")) } } @@ -36,8 +39,10 @@ class CascadesFileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """ sep(' ', read_lines("foo.txt")) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomStringType) shouldBeValid Set(WomSingleFile("foo.txt")) + expr.shouldBeValidPF { case e => + e.predictFilesNeededToEvaluate(Map.empty, NoIoFunctionSet, WomStringType) shouldBeValid Set( + WomSingleFile("foo.txt") + ) } } } diff --git a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluatorSpec.scala b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluatorSpec.scala index b6944e4485a..96c3c51e937 100644 --- a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluatorSpec.scala +++ b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/types/CascadesTypeEvaluatorSpec.scala @@ -15,8 +15,8 @@ class CascadesTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomIntegerType + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomIntegerType } } @@ -24,8 +24,8 @@ class CascadesTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """as_map([(1,2), (3,4)])""" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomMapType(WomIntegerType, WomIntegerType) + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomMapType(WomIntegerType, WomIntegerType) } } @@ -33,8 +33,8 @@ class CascadesTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """as_pairs({ "one": 1, "two": 2, "three": 3 })""" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomArrayType(WomPairType(WomStringType, WomIntegerType)) + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomArrayType(WomPairType(WomStringType, WomIntegerType)) } } @@ -42,8 +42,8 @@ class CascadesTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """ sep(' ', ["a", "b", "c"]) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomStringType + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomStringType } } @@ -51,8 +51,8 @@ class CascadesTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val str = """ sep(' ', prefix("-i ", ["a", "b", "c"])) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateType(Map.empty) shouldBeValid WomStringType + expr.shouldBeValidPF { case e => + e.evaluateType(Map.empty) shouldBeValid WomStringType } } diff --git a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluatorSpec.scala b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluatorSpec.scala index 7055da1c1ae..6dbad32d7d0 100644 --- a/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluatorSpec.scala +++ b/wdl/transforms/cascades/src/test/scala/wdl/transforms/cascades/linking/expression/values/CascadesValueEvaluatorSpec.scala @@ -23,8 +23,8 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = "3 + 3" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomInteger(6), Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomInteger(6), Seq.empty) } } @@ -32,14 +32,16 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ as_map( [("x", 1), ("y", 2), ("z", 3)] ) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedMap: WomMap = WomMap(Map ( - WomString("x") -> WomInteger(1), - WomString("y") -> WomInteger(2), - WomString("z") -> WomInteger(3) - )) + val expectedMap: WomMap = WomMap( + Map( + WomString("x") -> WomInteger(1), + WomString("y") -> WomInteger(2), + WomString("z") -> WomInteger(3) + ) + ) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) } } @@ -50,14 +52,16 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expr = fromString[ExpressionElement](str, parser.parse_e) val inputs = Map("three" -> WomString("three")) - val expectedPairs: WomArray = WomArray(Seq( - WomPair(WomInteger(1), WomString("one")), - WomPair(WomInteger(2), WomString("two")), - WomPair(WomInteger(3), WomString("three")) - )) - - expr.shouldBeValidPF { - case e => e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) + val expectedPairs: WomArray = WomArray( + Seq( + WomPair(WomInteger(1), WomString("one")), + WomPair(WomInteger(2), WomString("two")), + WomPair(WomInteger(3), WomString("three")) + ) + ) + + expr.shouldBeValidPF { case e => + e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) } () } @@ -73,16 +77,18 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ as_pairs(as_map(echo_me)) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedPairs: WomArray = WomArray(Seq( - WomPair(WomInteger(1), WomString("one")), - WomPair(WomInteger(2), WomString("two")), - WomPair(WomInteger(3), WomString("three")) - )) + val expectedPairs: WomArray = WomArray( + Seq( + WomPair(WomInteger(1), WomString("one")), + WomPair(WomInteger(2), WomString("two")), + WomPair(WomInteger(3), WomString("three")) + ) + ) val inputs = Map("echo_me" -> expectedPairs) - expr.shouldBeValidPF { - case e => e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedPairs, Seq.empty) } () } @@ -95,8 +101,11 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ as_map( [("x", 1), ("y", 2), ("x", 3)] ) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None).shouldBeInvalid("""Cannot evaluate 'as_map' with duplicated keys: keys can only appear once but "x" appeared 2 times.""") + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) + .shouldBeInvalid( + """Cannot evaluate 'as_map' with duplicated keys: keys can only appear once but "x" appeared 2 times.""" + ) } () } @@ -109,13 +118,15 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val str = """ collect_by_key( [("x", 1), ("y", 2), ("x", 3)] ) """ val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedMap: WomMap = WomMap(Map( - WomString("x") -> WomArray(Seq(WomInteger(1), WomInteger(3))), - WomString("y") -> WomArray(Seq(WomInteger(2))) - )) + val expectedMap: WomMap = WomMap( + Map( + WomString("x") -> WomArray(Seq(WomInteger(1), WomInteger(3))), + WomString("y") -> WomArray(Seq(WomInteger(2))) + ) + ) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) } () } @@ -129,13 +140,16 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi ) val expr = fromString[ExpressionElement](str, parser.parse_e) - val expectedMap: WomMap = WomMap(WomMapType(WomStringType, WomOptionalType(WomStringType)), Map ( - WomString("i") -> WomOptionalValue(WomStringType, Some(WomString("1"))), - WomString("s") -> WomOptionalValue(WomStringType, Some(WomString("two"))) - )) + val expectedMap: WomMap = WomMap( + WomMapType(WomStringType, WomOptionalType(WomStringType)), + Map( + WomString("i") -> WomOptionalValue(WomStringType, Some(WomString("1"))), + WomString("s") -> WomOptionalValue(WomStringType, Some(WomString("two"))) + ) + ) - expr.shouldBeValidPF { - case e => e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedMap, Seq.empty) } } @@ -158,8 +172,8 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expectedEvaluation = WomString(expected) val expr = fromString[ExpressionElement](str, parser.parse_e) - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedEvaluation, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedEvaluation, Seq.empty) } } } @@ -171,8 +185,8 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expectedString: WomString = WomString("a b c") - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) } } @@ -182,8 +196,8 @@ class CascadesValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec wi val expectedString: WomString = WomString("-i a -i b -i c") - expr.shouldBeValidPF { - case e => e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) + expr.shouldBeValidPF { case e => + e.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(expectedString, Seq.empty) } } } diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomBundleMakers.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomBundleMakers.scala index b74e3bbbb34..e2bcb384617 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomBundleMakers.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomBundleMakers.scala @@ -5,7 +5,7 @@ import cats.syntax.traverse._ import cats.instances.list._ import common.Checked import common.validation.ErrorOr.ErrorOr -import wdl.draft2.model.{WdlNamespace, WdlNamespaceWithWorkflow, WdlNamespaceWithoutWorkflow} +import wdl.draft2.model.{WdlNamespace, WdlNamespaceWithoutWorkflow, WdlNamespaceWithWorkflow} import wom.callable.{TaskDefinition, WorkflowDefinition} import wom.executable.WomBundle import wom.transforms.WomWorkflowDefinitionMaker.ops._ @@ -16,7 +16,8 @@ object WdlDraft2WomBundleMakers { implicit val wdlDraft2NamespaceWomBundleMaker: WomBundleMaker[WdlNamespace] = new WomBundleMaker[WdlNamespace] { override def toWomBundle(from: WdlNamespace): Checked[WomBundle] = { - val workflowsValidation: ErrorOr[List[WorkflowDefinition]] = from.workflows.toList.traverse(_.toWomWorkflowDefinition(isASubworkflow = false)) + val workflowsValidation: ErrorOr[List[WorkflowDefinition]] = + from.workflows.toList.traverse(_.toWomWorkflowDefinition(isASubworkflow = false)) val tasksValidation: ErrorOr[List[TaskDefinition]] = from.tasks.toList.traverse(_.toWomTaskDefinition) val errorOr = (workflowsValidation, tasksValidation) mapN { (workflows, tasks) => @@ -24,16 +25,21 @@ object WdlDraft2WomBundleMakers { if (workflows.size == 1) { workflows.headOption } else None - WomBundle(primary, (tasks ++ workflows).map(c => c.name -> c).toMap, Map.empty, from.resolvedImportRecords) } + WomBundle(primary, (tasks ++ workflows).map(c => c.name -> c).toMap, Map.empty, from.resolvedImportRecords) + } errorOr.toEither } } - implicit val wdlDraft2NamespaceWithWorkflowWomBundleMaker: WomBundleMaker[WdlNamespaceWithWorkflow] = new WomBundleMaker[WdlNamespaceWithWorkflow] { - override def toWomBundle(a: WdlNamespaceWithWorkflow): Checked[WomBundle] = wdlDraft2NamespaceWomBundleMaker.toWomBundle(a) - } + implicit val wdlDraft2NamespaceWithWorkflowWomBundleMaker: WomBundleMaker[WdlNamespaceWithWorkflow] = + new WomBundleMaker[WdlNamespaceWithWorkflow] { + override def toWomBundle(a: WdlNamespaceWithWorkflow): Checked[WomBundle] = + wdlDraft2NamespaceWomBundleMaker.toWomBundle(a) + } - implicit val wdlDraft2NamespaceWithoutWorkflowWomBundleMaker: WomBundleMaker[WdlNamespaceWithoutWorkflow] = new WomBundleMaker[WdlNamespaceWithoutWorkflow] { - override def toWomBundle(a: WdlNamespaceWithoutWorkflow): Checked[WomBundle] = wdlDraft2NamespaceWomBundleMaker.toWomBundle(a) - } + implicit val wdlDraft2NamespaceWithoutWorkflowWomBundleMaker: WomBundleMaker[WdlNamespaceWithoutWorkflow] = + new WomBundleMaker[WdlNamespaceWithoutWorkflow] { + override def toWomBundle(a: WdlNamespaceWithoutWorkflow): Checked[WomBundle] = + wdlDraft2NamespaceWomBundleMaker.toWomBundle(a) + } } diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala index 442c938c834..323ab7cd859 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala @@ -11,16 +11,31 @@ import wdl.draft2.parser.WdlParser.Terminal import wom.SourceFileLocation import wom.callable.Callable import wom.graph.CallNode._ -import wom.callable.Callable.{InputDefinition, OverridableInputDefinitionWithDefault, OptionalInputDefinition, RequiredInputDefinition} +import wom.callable.Callable.{ + InputDefinition, + OptionalInputDefinition, + OverridableInputDefinitionWithDefault, + RequiredInputDefinition +} import wom.transforms.WomCallableMaker.ops._ import wom.graph.CallNode.{InputDefinitionFold, InputDefinitionPointer} import wom.graph.GraphNodePort.OutputPort -import wom.graph.expression.{AnonymousExpressionNode, ExpressionNode, PlainAnonymousExpressionNode, TaskCallInputExpressionNode} +import wom.graph.expression.{ + AnonymousExpressionNode, + ExpressionNode, + PlainAnonymousExpressionNode, + TaskCallInputExpressionNode +} import wom.graph._ import wom.transforms.WomCallNodeMaker object WdlDraft2WomCallNodeMaker extends WomCallNodeMaker[WdlCall] { - override def toWomCallNode(wdlCall: WdlCall, localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[CallNode.CallNodeAndNewNodes] = { + override def toWomCallNode(wdlCall: WdlCall, + localLookup: Map[String, GraphNodePort.OutputPort], + outerLookup: Map[String, GraphNodePort.OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[CallNode.CallNodeAndNewNodes] = { import common.validation.ErrorOr._ @@ -28,68 +43,80 @@ object WdlDraft2WomCallNodeMaker extends WomCallNodeMaker[WdlCall] { // A validation that all inputs to the call were actually wanted: def allInputsWereWantedValidation(callable: Callable): ErrorOr[Unit] = { - val callableExpectedInputs = callable.inputs.collect { - case r: RequiredInputDefinition => r - case o: OptionalInputDefinition => o - // Draft 2 values are non-overridable if they have upstream dependencies, so filter for those: - case id: OverridableInputDefinitionWithDefault if id.default.inputs.isEmpty => id - }.map(_.localName.value) + val callableExpectedInputs = callable.inputs + .collect { + case r: RequiredInputDefinition => r + case o: OptionalInputDefinition => o + // Draft 2 values are non-overridable if they have upstream dependencies, so filter for those: + case id: OverridableInputDefinitionWithDefault if id.default.inputs.isEmpty => id + } + .map(_.localName.value) val unexpectedInputs: Option[NonEmptyList[String]] = NonEmptyList.fromList(wdlCall.inputMappings.toList collect { case (inputName, _) if !callableExpectedInputs.contains(inputName) => inputName }) unexpectedInputs match { case None => ().validNel - case Some(unexpectedInputsNel) => (unexpectedInputsNel map { unexpectedInput => - s"Invalid call to '${callable.name}': Didn't expect the input '$unexpectedInput'. Check that this input is declared in the task or workflow. Note that intermediate values (declarations with values that depend on previous values) cannot be overridden." - }).invalid + case Some(unexpectedInputsNel) => + (unexpectedInputsNel map { unexpectedInput => + s"Invalid call to '${callable.name}': Didn't expect the input '$unexpectedInput'. Check that this input is declared in the task or workflow. Note that intermediate values (declarations with values that depend on previous values) cannot be overridden." + }).invalid } } /* - * Each input mapping gets its own ExpressionNode: - * - * call my_task { input: - * input1 = "hi!" -> ExpressionNode with no input port - * input3 = other_task.out + 2 -> ExpressionNode with an input port pointing to the output port of other_task.out - * } + * Each input mapping gets its own ExpressionNode: + * + * call my_task { input: + * input1 = "hi!" -> ExpressionNode with no input port + * input3 = other_task.out + 2 -> ExpressionNode with an input port pointing to the output port of other_task.out + * } */ def expressionNodeMappings: ErrorOr[Map[LocalName, AnonymousExpressionNode]] = { val precomputedOgins: Map[String, OutputPort] = outerLookup collect { - case (name, port) if !localLookup.contains(name) => name -> OuterGraphInputNode(WomIdentifier(name), port, preserveIndexForOuterLookups).singleOutputPort + case (name, port) if !localLookup.contains(name) => + name -> OuterGraphInputNode(WomIdentifier(name), port, preserveIndexForOuterLookups).singleOutputPort } val newLocalLookup = localLookup ++ precomputedOgins - wdlCall.inputMappings traverse { - case (inputName, wdlExpression) => - val identifier = wdlCall.womIdentifier.combine(inputName) - val constructor = wdlCall match { - case _: WdlTaskCall => TaskCallInputExpressionNode.apply _ - case _ => PlainAnonymousExpressionNode.apply _ - } - - WdlWomExpression.toAnonymousExpressionNode(identifier, WdlWomExpression(wdlExpression, wdlCall), newLocalLookup, Map.empty, preserveIndexForOuterLookups, wdlCall, constructor) map { - LocalName(inputName) -> _ - } + wdlCall.inputMappings traverse { case (inputName, wdlExpression) => + val identifier = wdlCall.womIdentifier.combine(inputName) + val constructor = wdlCall match { + case _: WdlTaskCall => TaskCallInputExpressionNode.apply _ + case _ => PlainAnonymousExpressionNode.apply _ + } + + WdlWomExpression.toAnonymousExpressionNode(identifier, + WdlWomExpression(wdlExpression, wdlCall), + newLocalLookup, + Map.empty, + preserveIndexForOuterLookups, + wdlCall, + constructor + ) map { + LocalName(inputName) -> _ + } } } /* - * Fold over the input definitions and - * 1) assign each input definition its InputDefinitionPointer - * 2) if necessary, create a graph input node and assign its output port to the input definition - * - * The InputDefinitionFold accumulates the input definition mappings, the create graph input nodes, and the expression nodes. + * Fold over the input definitions and + * 1) assign each input definition its InputDefinitionPointer + * 2) if necessary, create a graph input node and assign its output port to the input definition + * + * The InputDefinitionFold accumulates the input definition mappings, the create graph input nodes, and the expression nodes. */ - def foldInputDefinitions(expressionNodes: Map[LocalName, ExpressionNode], callable: Callable): InputDefinitionFold = { + def foldInputDefinitions(expressionNodes: Map[LocalName, ExpressionNode], + callable: Callable + ): InputDefinitionFold = { // Updates the fold with a new graph input node. Happens when an optional or required undefined input without an // expression node mapping is found - def withGraphInputNode(inputDefinition: InputDefinition, graphInputNode: ExternalGraphInputNode) = { + def withGraphInputNode(inputDefinition: InputDefinition, graphInputNode: ExternalGraphInputNode) = InputDefinitionFold( - mappings = List(inputDefinition -> Coproduct[InputDefinitionPointer](graphInputNode.singleOutputPort: OutputPort)), + mappings = + List(inputDefinition -> Coproduct[InputDefinitionPointer](graphInputNode.singleOutputPort: OutputPort)), callInputPorts = Set(callNodeBuilder.makeInputPort(inputDefinition, graphInputNode.singleOutputPort)), newGraphInputNodes = Set(graphInputNode) ) - } callable.inputs.foldMap { // If there is an input mapping for this input definition, use that @@ -103,25 +130,29 @@ object WdlDraft2WomCallNodeMaker extends WomCallNodeMaker[WdlCall] { ) // No input mapping, either not an input or in a subworkflow: use the default expression - case withDefault@OverridableInputDefinitionWithDefault(_, _, expression, _, _) if inASubworkflow || expression.inputs.nonEmpty => + case withDefault @ OverridableInputDefinitionWithDefault(_, _, expression, _, _) + if inASubworkflow || expression.inputs.nonEmpty => InputDefinitionFold( mappings = List(withDefault -> Coproduct[InputDefinitionPointer](expression)) ) // No input mapping and in a top-level workflow: add an input with a default - case withDefault@OverridableInputDefinitionWithDefault(n, womType, expression, _, _) => + case withDefault @ OverridableInputDefinitionWithDefault(n, womType, expression, _, _) => val identifier = wdlCall.womIdentifier.combine(n) - withGraphInputNode(withDefault, OptionalGraphInputNodeWithDefault(identifier, womType, expression, identifier.fullyQualifiedName.value)) + withGraphInputNode( + withDefault, + OptionalGraphInputNodeWithDefault(identifier, womType, expression, identifier.fullyQualifiedName.value) + ) // No input mapping, required and we don't have a default value, create a new RequiredGraphInputNode // so that it can be satisfied via workflow inputs - case required@RequiredInputDefinition(n, womType, _, _) => + case required @ RequiredInputDefinition(n, womType, _, _) => val identifier = wdlCall.womIdentifier.combine(n) withGraphInputNode(required, RequiredGraphInputNode(identifier, womType, identifier.fullyQualifiedName.value)) // No input mapping, no default value but optional, create a OptionalGraphInputNode // so that it can be satisfied via workflow inputs - case optional@OptionalInputDefinition(n, womType, _, _) => + case optional @ OptionalInputDefinition(n, womType, _, _) => val identifier = wdlCall.womIdentifier.combine(n) withGraphInputNode(optional, OptionalGraphInputNode(identifier, womType, identifier.fullyQualifiedName.value)) @@ -139,11 +170,13 @@ object WdlDraft2WomCallNodeMaker extends WomCallNodeMaker[WdlCall] { // Figure out the line number by looking at the AST val t: Terminal = AstTools.findTerminals(wdlCall.ast).head - val callNodeAndNewNodes = callNodeBuilder.build(wdlCall.womIdentifier, - callable, - foldInputDefinitions(mappings, callable).copy(usedOuterGraphInputNodes = usedOgins), - Set.empty, - Some(SourceFileLocation(t.getLine))) + val callNodeAndNewNodes = callNodeBuilder.build( + wdlCall.womIdentifier, + callable, + foldInputDefinitions(mappings, callable).copy(usedOuterGraphInputNodes = usedOgins), + Set.empty, + Some(SourceFileLocation(t.getLine)) + ) // If the created node is a `TaskCallNode` the created input expressions should be `TaskCallInputExpressionNode`s // and should be assigned a reference to the `TaskCallNode`. This is used in the `WorkflowExecutionActor` to diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCommandTaskDefinitionMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCommandTaskDefinitionMaker.scala index f23069a1f5b..9d59abfe142 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCommandTaskDefinitionMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCommandTaskDefinitionMaker.scala @@ -5,13 +5,12 @@ import common.validation.ErrorOr.ErrorOr import wdl.draft2.model.{AstTools, WdlTask, WdlWomExpression} import wdl.draft2.parser.WdlParser.Terminal import wom.SourceFileLocation -import wom.callable.Callable.{OverridableInputDefinitionWithDefault, OptionalInputDefinition, RequiredInputDefinition} +import wom.callable.Callable.{OptionalInputDefinition, OverridableInputDefinitionWithDefault, RequiredInputDefinition} import wom.callable.{Callable, CallableTaskDefinition, CommandTaskDefinition, MetaValueElement} import wom.graph.LocalName import wom.transforms.WomCommandTaskDefinitionMaker import wom.types.WomOptionalType - object WdlDraft2WomCommandTaskDefinitionMaker extends WomCommandTaskDefinitionMaker[WdlTask] { override def toWomTaskDefinition(wdlTask: WdlTask): ErrorOr[CommandTaskDefinition] = { @@ -22,7 +21,10 @@ object WdlDraft2WomCommandTaskDefinitionMaker extends WomCommandTaskDefinitionMa case d if d.expression.isEmpty && d.womType.isInstanceOf[WomOptionalType] => OptionalInputDefinition(LocalName(d.unqualifiedName), d.womType.asInstanceOf[WomOptionalType]) case d if d.expression.nonEmpty => - OverridableInputDefinitionWithDefault(LocalName(d.unqualifiedName), d.womType, WdlWomExpression(d.expression.get, wdlTask)) + OverridableInputDefinitionWithDefault(LocalName(d.unqualifiedName), + d.womType, + WdlWomExpression(d.expression.get, wdlTask) + ) }).toList // Figure out the start line of the workflow in the source file @@ -30,12 +32,10 @@ object WdlDraft2WomCommandTaskDefinitionMaker extends WomCommandTaskDefinitionMa // Draft-2 only support string values. It does not support composite values, or // anything - def stringifyMetaValues(meta: Map[String, String]): Map[String, MetaValueElement] = { - meta map { - case (key, value) => - key -> MetaValueElement.MetaValueElementString(value) + def stringifyMetaValues(meta: Map[String, String]): Map[String, MetaValueElement] = + meta map { case (key, value) => + key -> MetaValueElement.MetaValueElementString(value) } - } CallableTaskDefinition( name = wdlTask.fullyQualifiedName, diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala index f4303e8ebe5..f26463aa723 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala @@ -14,39 +14,59 @@ import wom.transforms.WomGraphMaker.ops._ import wom.types.WomBooleanType object WdlDraft2WomConditionalNodeMaker extends WomConditionalNodeMaker[If] { + /** * @param preserveIndexForOuterLookups When we're evaluating the condition boolean, should we preserve scatter index if we have to use the outerLookup? */ - override def toWomConditionalNode(ifBlock: If, localLookup: Map[String, OutputPort], outerLookup: Map[String, OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[ConditionalNodeWithNewNodes] = { + override def toWomConditionalNode(ifBlock: If, + localLookup: Map[String, OutputPort], + outerLookup: Map[String, OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[ConditionalNodeWithNewNodes] = { /* - * Why? Imagine that we're building three nested levels of a innerGraph. - * - Say we're building the middle layer. - * - We have a set of OutputPorts in the outer layer that we can make OGINs to if we need them. - * - We know that the inner graph might want to make use of those output ports, but we don't know which. - * - So, we can make OGINs at this layer for all possible OutputPorts in the outer graph and let the inner graph - * use however many of them it needs. - */ - val possiblyNeededNestedOgins: Map[String, OuterGraphInputNode] = outerLookup filterNot { case (name, _) => localLookup.contains(name) } map { case (name, outerPort) => + * Why? Imagine that we're building three nested levels of a innerGraph. + * - Say we're building the middle layer. + * - We have a set of OutputPorts in the outer layer that we can make OGINs to if we need them. + * - We know that the inner graph might want to make use of those output ports, but we don't know which. + * - So, we can make OGINs at this layer for all possible OutputPorts in the outer graph and let the inner graph + * use however many of them it needs. + */ + val possiblyNeededNestedOgins: Map[String, OuterGraphInputNode] = outerLookup filterNot { case (name, _) => + localLookup.contains(name) + } map { case (name, outerPort) => /* - * preserveIndexForOuterLookups indicates us whether or not nodes in the outerLookup are in the same scatter inn graph as this node - * preserveIndexForOuterLookups = false means the outerLookup nodes are outside a scatter containing this conditional node - * preserveIndexForOuterLookups = true means the above predicate does not hold - * - * When creating OGINs from those outer lookup nodes for the inner graph we want to make sure we set their preserveScatterIndex to preserveIndexForOuterLookups - * because they effectively represent the outer lookup nodes inside the conditional. So whether the index must be preserved depends on whether this - * conditional node has been asked to "preserveIndexForOuterLookups". + * preserveIndexForOuterLookups indicates us whether or not nodes in the outerLookup are in the same scatter inn graph as this node + * preserveIndexForOuterLookups = false means the outerLookup nodes are outside a scatter containing this conditional node + * preserveIndexForOuterLookups = true means the above predicate does not hold + * + * When creating OGINs from those outer lookup nodes for the inner graph we want to make sure we set their preserveScatterIndex to preserveIndexForOuterLookups + * because they effectively represent the outer lookup nodes inside the conditional. So whether the index must be preserved depends on whether this + * conditional node has been asked to "preserveIndexForOuterLookups". */ name -> OuterGraphInputNode(WomIdentifier(name), outerPort, preserveScatterIndex = preserveIndexForOuterLookups) } - val possiblyNeededNestedOginPorts: Map[String, OutputPort] = possiblyNeededNestedOgins map { case (name: String, ogin: OuterGraphInputNode) => name -> ogin.singleOutputPort } + val possiblyNeededNestedOginPorts: Map[String, OutputPort] = possiblyNeededNestedOgins map { + case (name: String, ogin: OuterGraphInputNode) => name -> ogin.singleOutputPort + } val ifConditionExpression = WdlWomExpression(ifBlock.condition, ifBlock) val ifConditionGraphInputExpressionValidation = WdlWomExpression.toAnonymousExpressionNode( - WomIdentifier("conditional"), ifConditionExpression, localLookup ++ possiblyNeededNestedOginPorts, Map.empty, preserveIndexForOuterLookups, ifBlock, PlainAnonymousExpressionNode.apply) - val ifConditionTypeValidation = ifConditionExpression.evaluateType((localLookup ++ outerLookup).map { case (k, v) => k -> v.womType }) flatMap { + WomIdentifier("conditional"), + ifConditionExpression, + localLookup ++ possiblyNeededNestedOginPorts, + Map.empty, + preserveIndexForOuterLookups, + ifBlock, + PlainAnonymousExpressionNode.apply + ) + val ifConditionTypeValidation = ifConditionExpression.evaluateType((localLookup ++ outerLookup).map { case (k, v) => + k -> v.womType + }) flatMap { case coerceable if WomBooleanType.isCoerceableFrom(coerceable) => Valid(()) - case other => s"An if block must be given a boolean expression but instead got '${ifBlock.condition.toWomString}' (a ${other.stableName})".invalidNel + case other => + s"An if block must be given a boolean expression but instead got '${ifBlock.condition.toWomString}' (a ${other.stableName})".invalidNel } val innerGraphValidation: ErrorOr[Graph] = (ifBlock: Scope).toWomGraph( @@ -56,8 +76,9 @@ object WdlDraft2WomConditionalNodeMaker extends WomConditionalNodeMaker[If] { inASubworkflow = inASubworkflow ) - (ifConditionGraphInputExpressionValidation, ifConditionTypeValidation, innerGraphValidation) mapN { (ifConditionGraphInputExpression, _, innerGraph) => - ConditionalNode.wireInConditional(innerGraph, ifConditionGraphInputExpression) + (ifConditionGraphInputExpressionValidation, ifConditionTypeValidation, innerGraphValidation) mapN { + (ifConditionGraphInputExpression, _, innerGraph) => + ConditionalNode.wireInConditional(innerGraph, ifConditionGraphInputExpression) } } } diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomExecutableMakers.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomExecutableMakers.scala index 9d68dc33528..3d545ee95b5 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomExecutableMakers.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomExecutableMakers.scala @@ -1,7 +1,7 @@ package wdl.transforms.draft2.wdlom2wom import common.Checked -import wdl.draft2.model.{WdlNamespace, WdlNamespaceWithWorkflow, WdlNamespaceWithoutWorkflow} +import wdl.draft2.model.{WdlNamespace, WdlNamespaceWithoutWorkflow, WdlNamespaceWithWorkflow} import wdl.transforms.draft2.wdlom2wom.WdlDraft2WomBundleMakers._ import wdl.shared.transforms.wdlom2wom.WdlSharedInputParsing import wom.core.WorkflowJson @@ -12,20 +12,33 @@ import wom.transforms.WomBundleMaker.ops._ object WdlDraft2WomExecutableMakers { implicit val namespaceWomExecutableMaker: WomExecutableMaker[WdlNamespace] = new WomExecutableMaker[WdlNamespace] { - override def toWomExecutable(a: WdlNamespace, inputs: Option[WorkflowJson], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] = { + override def toWomExecutable(a: WdlNamespace, + inputs: Option[WorkflowJson], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] = a.toWomBundle flatMap { bundle => WdlSharedInputParsing.buildWomExecutable(bundle, inputs, ioFunctions, strictValidation) } - } } - implicit val namespaceWithWorkflowWomExecutableMaker: WomExecutableMaker[WdlNamespaceWithWorkflow] = new WomExecutableMaker[WdlNamespaceWithWorkflow] { - override def toWomExecutable(a: WdlNamespaceWithWorkflow, inputs: Option[WorkflowJson], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] = - namespaceWomExecutableMaker.toWomExecutable(a, inputs, ioFunctions, strictValidation) - } + implicit val namespaceWithWorkflowWomExecutableMaker: WomExecutableMaker[WdlNamespaceWithWorkflow] = + new WomExecutableMaker[WdlNamespaceWithWorkflow] { + override def toWomExecutable(a: WdlNamespaceWithWorkflow, + inputs: Option[WorkflowJson], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] = + namespaceWomExecutableMaker.toWomExecutable(a, inputs, ioFunctions, strictValidation) + } - implicit val namespaceWithoutWorkflowWomExecutableMaker: WomExecutableMaker[WdlNamespaceWithoutWorkflow] = new WomExecutableMaker[WdlNamespaceWithoutWorkflow] { - override def toWomExecutable(a: WdlNamespaceWithoutWorkflow, inputs: Option[WorkflowJson], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] = - namespaceWomExecutableMaker.toWomExecutable(a, inputs, ioFunctions, strictValidation) - } + implicit val namespaceWithoutWorkflowWomExecutableMaker: WomExecutableMaker[WdlNamespaceWithoutWorkflow] = + new WomExecutableMaker[WdlNamespaceWithoutWorkflow] { + override def toWomExecutable(a: WdlNamespaceWithoutWorkflow, + inputs: Option[WorkflowJson], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] = + namespaceWomExecutableMaker.toWomExecutable(a, inputs, ioFunctions, strictValidation) + } } diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomGraphMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomGraphMaker.scala index dc6a66627ff..3f2e837dffe 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomGraphMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomGraphMaker.scala @@ -24,7 +24,12 @@ object WdlDraft2WomGraphMaker extends WomGraphMaker[Scope] { * - Adds in any extras from 'includeGraphNode' which the call knows should also be in the Graph (eg the Scatter variable's InputGraphNode which this method doesn't know exists) * - Builds them all together into a Graph */ - override def toWomGraph(scope: Scope, includeGraphNodes: Set[GraphNode], outerLookup: Map[String, OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[Graph] = { + override def toWomGraph(scope: Scope, + includeGraphNodes: Set[GraphNode], + outerLookup: Map[String, OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[Graph] = { final case class FoldState(nodes: Set[GraphNode], availableInputs: Map[String, GraphNodePort.OutputPort]) @@ -36,7 +41,13 @@ object WdlDraft2WomGraphMaker extends WomGraphMaker[Scope] { ) def foldFunction(acc: Checked[FoldState], node: WdlGraphNode): Checked[FoldState] = acc flatMap { goodAcc => - buildNode(goodAcc, node).leftMap(errors => errors.map(s"Unable to build WOM node for ${node.getClass.getSimpleName} '${node.womIdentifier.localName.value}': " + _)).toEither + buildNode(goodAcc, node) + .leftMap(errors => + errors.map( + s"Unable to build WOM node for ${node.getClass.getSimpleName} '${node.womIdentifier.localName.value}': " + _ + ) + ) + .toEither } def foldInGeneratedNodeAndNewInputs(acc: FoldState)(gnani: GeneratedNodeAndNewNodes): FoldState = { @@ -66,37 +77,49 @@ object WdlDraft2WomGraphMaker extends WomGraphMaker[Scope] { def buildNode(acc: FoldState, node: WdlGraphNode): ErrorOr[FoldState] = node match { case wdlCall: WdlCall => - wdlCall.toWomCallNode(acc.availableInputs, outerLookup, preserveIndexForOuterLookups, inASubworkflow) map { cnani: CallNodeAndNewNodes => - foldInGeneratedNodeAndNewInputs(acc)(cnani) + wdlCall.toWomCallNode(acc.availableInputs, outerLookup, preserveIndexForOuterLookups, inASubworkflow) map { + cnani: CallNodeAndNewNodes => + foldInGeneratedNodeAndNewInputs(acc)(cnani) } - case decl: DeclarationInterface => Declaration.buildWdlDeclarationNode(decl, acc.availableInputs, outerLookup, preserveIndexForOuterLookups) map { wdlDeclNode => - // As with GeneratedNodeAndNewInputs, we might have made some new OuterGraphInputNodes to build this DeclarationNode, so - // make sure they get included: - val declNode = wdlDeclNode.toGraphNode - val newOgins: Set[OuterGraphInputNode] = declNode.upstreamOuterGraphInputNodes - val newOginOutputs = newOgins.map(_.nameToPortMapping) - - // Add the output port, but only if the value isn't already available as an input from somewhere else - val availableOutputPort: Option[(String, OutputPort)] = if(!acc.availableInputs.contains(wdlDeclNode.localName)) { - Option(wdlDeclNode.localName -> wdlDeclNode.singleOutputPort) - } else { - None + case decl: DeclarationInterface => + Declaration.buildWdlDeclarationNode(decl, acc.availableInputs, outerLookup, preserveIndexForOuterLookups) map { + wdlDeclNode => + // As with GeneratedNodeAndNewInputs, we might have made some new OuterGraphInputNodes to build this DeclarationNode, so + // make sure they get included: + val declNode = wdlDeclNode.toGraphNode + val newOgins: Set[OuterGraphInputNode] = declNode.upstreamOuterGraphInputNodes + val newOginOutputs = newOgins.map(_.nameToPortMapping) + + // Add the output port, but only if the value isn't already available as an input from somewhere else + val availableOutputPort: Option[(String, OutputPort)] = + if (!acc.availableInputs.contains(wdlDeclNode.localName)) { + Option(wdlDeclNode.localName -> wdlDeclNode.singleOutputPort) + } else { + None + } + + FoldState(acc.nodes + declNode ++ newOgins, acc.availableInputs ++ newOginOutputs ++ availableOutputPort) } - FoldState(acc.nodes + declNode ++ newOgins, acc.availableInputs ++ newOginOutputs ++ availableOutputPort) - } - case scatter: Scatter => - scatter.toWomScatterNode(acc.availableInputs, outerLookup, preserveIndexForOuterLookups, inASubworkflow) map { foldInGeneratedNodeAndNewInputs(acc)(_) } + scatter.toWomScatterNode(acc.availableInputs, outerLookup, preserveIndexForOuterLookups, inASubworkflow) map { + foldInGeneratedNodeAndNewInputs(acc)(_) + } case ifBlock: If => - ifBlock.toWomConditionalNode(acc.availableInputs, outerLookup, preserveIndexForOuterLookups, inASubworkflow) map { foldInGeneratedNodeAndNewInputs(acc)(_) } + ifBlock.toWomConditionalNode(acc.availableInputs, + outerLookup, + preserveIndexForOuterLookups, + inASubworkflow + ) map { foldInGeneratedNodeAndNewInputs(acc)(_) } case _ => s"Cannot process WdlGraphNodes of type ${node.getClass.getSimpleName} yet!".invalidNel } val nodeList = scope.childGraphNodesSorted - val nodeAccumulator: Checked[FoldState] = nodeList flatMap { _.foldLeft[Checked[FoldState]](Right(initialFoldState))(foldFunction) } + val nodeAccumulator: Checked[FoldState] = nodeList flatMap { + _.foldLeft[Checked[FoldState]](Right(initialFoldState))(foldFunction) + } def outerLinkInputs(nodes: Set[GraphNode]): Set[OuterGraphInputNode] = nodes flatMap { // NB: this curious type annotation just gives intelliJ a hand: diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomScatterNodeMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomScatterNodeMaker.scala index f1f018f2307..cc7ee098774 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomScatterNodeMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomScatterNodeMaker.scala @@ -17,32 +17,47 @@ object WdlDraft2WomScatterNodeMaker extends WomScatterNodeMaker[Scatter] { localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean, - inASubworkflow: Boolean): ErrorOr[ScatterNodeWithNewNodes] = { + inASubworkflow: Boolean + ): ErrorOr[ScatterNodeWithNewNodes] = { /* - * Why? Imagine that we're building three nested levels of a innerGraph. - * - Say we're building the middle layer. - * - We have a set of OutputPorts in the outer layer that we can make OGINs to if we need them. - * - We know that the inner graph might want to make use of those output ports, but we don't know which. - * - So, we can make OGINs at this layer for all possible OutputPorts in the outer graph and let the inner graph - * use however many of them it needs. - */ - val possiblyNeededNestedOgins: Map[String, OuterGraphInputNode] = outerLookup filterNot { case (name, _) => localLookup.contains(name) } map { case (name, outerPort) => + * Why? Imagine that we're building three nested levels of a innerGraph. + * - Say we're building the middle layer. + * - We have a set of OutputPorts in the outer layer that we can make OGINs to if we need them. + * - We know that the inner graph might want to make use of those output ports, but we don't know which. + * - So, we can make OGINs at this layer for all possible OutputPorts in the outer graph and let the inner graph + * use however many of them it needs. + */ + val possiblyNeededNestedOgins: Map[String, OuterGraphInputNode] = outerLookup filterNot { case (name, _) => + localLookup.contains(name) + } map { case (name, outerPort) => /* - * preserveScatterIndex = false because in the absence of support of nested scatters, - * the index should never be preserved when for nodes coming from outside the scatter. + * preserveScatterIndex = false because in the absence of support of nested scatters, + * the index should never be preserved when for nodes coming from outside the scatter. */ name -> OuterGraphInputNode(WomIdentifier(name), outerPort, preserveScatterIndex = false) } - val possiblyNeededNestedOginPorts: Map[String, OutputPort] = possiblyNeededNestedOgins map { case (name: String, ogin: OuterGraphInputNode) => name -> ogin.singleOutputPort } + val possiblyNeededNestedOginPorts: Map[String, OutputPort] = possiblyNeededNestedOgins map { + case (name: String, ogin: OuterGraphInputNode) => name -> ogin.singleOutputPort + } // Convert the scatter collection WdlExpression to a WdlWomExpression val scatterCollectionExpression = WdlWomExpression(scatter.collection, scatter) // Generate an ExpressionNode from the WdlWomExpression val scatterCollectionExpressionNode = - WdlWomExpression.toAnonymousExpressionNode(WomIdentifier(scatter.item), scatterCollectionExpression, localLookup ++ possiblyNeededNestedOginPorts, Map.empty, preserveIndexForOuterLookups, scatter, PlainAnonymousExpressionNode.apply) + WdlWomExpression.toAnonymousExpressionNode( + WomIdentifier(scatter.item), + scatterCollectionExpression, + localLookup ++ possiblyNeededNestedOginPorts, + Map.empty, + preserveIndexForOuterLookups, + scatter, + PlainAnonymousExpressionNode.apply + ) // Validate the collection evaluates to a traversable type - val scatterItemTypeValidation = scatterCollectionExpression.evaluateType((localLookup ++ outerLookup).map { case (k, v) => k -> v.womType }) flatMap { + val scatterItemTypeValidation = scatterCollectionExpression.evaluateType((localLookup ++ outerLookup).map { + case (k, v) => k -> v.womType + }) flatMap { case WomArrayType(itemType) => Valid(itemType) // Covers maps because this is a custom unapply (see WdlArrayType) case other => s"Cannot scatter over a non-traversable type ${other.stableName}".invalidNel } @@ -52,7 +67,11 @@ object WdlDraft2WomScatterNodeMaker extends WomScatterNodeMaker[Scatter] { expressionNode <- scatterCollectionExpressionNode // Graph input node for the scatter variable in the inner graph. Note that the type is the array's member type womInnerGraphScatterVariableInput = ScatterVariableNode(WomIdentifier(scatter.item), expressionNode, itemType) - g <- (scatter: Scope).toWomGraph(Set(womInnerGraphScatterVariableInput), localLookup ++ possiblyNeededNestedOginPorts, preserveIndexForOuterLookups = false, inASubworkflow) + g <- (scatter: Scope).toWomGraph(Set(womInnerGraphScatterVariableInput), + localLookup ++ possiblyNeededNestedOginPorts, + preserveIndexForOuterLookups = false, + inASubworkflow + ) } yield ScatterNode.scatterOverGraph(g, womInnerGraphScatterVariableInput) } diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomWorkflowDefinitionMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomWorkflowDefinitionMaker.scala index 6f52e1a8d7e..08fc5349212 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomWorkflowDefinitionMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomWorkflowDefinitionMaker.scala @@ -9,26 +9,31 @@ import wom.transforms.WomGraphMaker.ops._ import wom.SourceFileLocation object WdlDraft2WomWorkflowDefinitionMaker extends WomWorkflowDefinitionMaker[WdlWorkflow] { - override def toWomWorkflowDefinition(wdlWorkflow: WdlWorkflow, isASubworkflow: Boolean): ErrorOr[WorkflowDefinition] = { + override def toWomWorkflowDefinition(wdlWorkflow: WdlWorkflow, + isASubworkflow: Boolean + ): ErrorOr[WorkflowDefinition] = { // NB: We don't allow "OuterGraphInputNode"s when building this (the Map is empty), so preserveScatterForExternalLookups isn't ever actually used. // Figure out the start line of the workflow in the source file val t: Terminal = AstTools.findTerminals(wdlWorkflow.ast).head - def stringifyMetaValues(meta: Map[String, String]): Map[String, MetaValueElement] = { - meta map { - case (key, value) => - key -> MetaValueElement.MetaValueElementString(value) + def stringifyMetaValues(meta: Map[String, String]): Map[String, MetaValueElement] = + meta map { case (key, value) => + key -> MetaValueElement.MetaValueElementString(value) } - } - (wdlWorkflow: Scope).toWomGraph(Set.empty, Map.empty, preserveIndexForOuterLookups = true, isASubworkflow: Boolean) map { wg => + (wdlWorkflow: Scope).toWomGraph(Set.empty, + Map.empty, + preserveIndexForOuterLookups = true, + isASubworkflow: Boolean + ) map { wg => WorkflowDefinition( wdlWorkflow.fullyQualifiedName, wg, stringifyMetaValues(wdlWorkflow.meta), stringifyMetaValues(wdlWorkflow.parameterMeta), - Some(SourceFileLocation(t.getLine))) + Some(SourceFileLocation(t.getLine)) + ) } } } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/Draft2ReadFileLimitsSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/Draft2ReadFileLimitsSpec.scala index d1d744aeab1..7bae5243f0b 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/Draft2ReadFileLimitsSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/Draft2ReadFileLimitsSpec.scala @@ -11,7 +11,7 @@ import scala.concurrent.Future class Draft2ReadFileLimitsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ReadLikeFunctions Size Limit Draft 2" - + it should "pass correct size limits to the ioFunctions for read_lines" in { new WdlWomExpression(WdlExpression.fromString("""read_lines("blah")"""), null) .evaluateValue(Map.empty, ioFunctionTester(1, "")) @@ -75,9 +75,13 @@ class Draft2ReadFileLimitsSpec extends AnyFlatSpec with CromwellTimeoutSpec with object Draft2ReadFileLimitsSpec { def ioFunctionTester(expectedMaxBytes: Int, result: String) = new EmptyIoFunctionSet { - override def readFile(path: String, maxBytes: Option[Int] = None, failOnOverflow: Boolean = false) = { + override def readFile(path: String, maxBytes: Option[Int] = None, failOnOverflow: Boolean = false) = if (maxBytes.contains(expectedMaxBytes)) Future.successful(result) - else Future.failed(new Exception(s"readFile was called with a max bytes value of ${maxBytes.getOrElse("No value")} but was expecting $expectedMaxBytes")) - } + else + Future.failed( + new Exception( + s"readFile was called with a max bytes value of ${maxBytes.getOrElse("No value")} but was expecting $expectedMaxBytes" + ) + ) } } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlAliasWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlAliasWomSpec.scala index 1c70f49b600..b3e1873e193 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlAliasWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlAliasWomSpec.scala @@ -34,22 +34,29 @@ class WdlAliasWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers conditionalTestGraph match { case Valid(g) => validateGraph(g) - case Invalid(errors) => fail(s"Unable to build wom version of conditional foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail(s"Unable to build wom version of conditional foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } def validateGraph(workflowGraph: Graph) = { val inputNodes: Set[ExternalGraphInputNode] = workflowGraph.nodes.filterByType[ExternalGraphInputNode] inputNodes.map(_.localName) should be(Set("foo1.i", "foo2.i")) - inputNodes.map(_.identifier.fullyQualifiedName.value) should be(Set("conditional_test.foo1.i", "conditional_test.foo2.i")) + inputNodes.map(_.identifier.fullyQualifiedName.value) should be( + Set("conditional_test.foo1.i", "conditional_test.foo2.i") + ) val callNodes: Set[CallNode] = workflowGraph.nodes.filterByType[CallNode] callNodes.map(_.localName) should be(Set("foo1", "foo2")) - callNodes.map(_.identifier.fullyQualifiedName.value) should be(Set("conditional_test.foo1", "conditional_test.foo2")) + callNodes.map(_.identifier.fullyQualifiedName.value) should be( + Set("conditional_test.foo1", "conditional_test.foo2") + ) val outputNodes: Set[GraphOutputNode] = workflowGraph.nodes.filterByType[GraphOutputNode] outputNodes.map(_.localName) should be(Set("foo1.out", "foo2.out")) - outputNodes.map(_.identifier.fullyQualifiedName.value) should be(Set("conditional_test.foo1.out", "conditional_test.foo2.out")) + outputNodes.map(_.identifier.fullyQualifiedName.value) should be( + Set("conditional_test.foo1.out", "conditional_test.foo2.out") + ) workflowGraph.nodes.size should be(6) } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlConditionalWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlConditionalWomSpec.scala index 820bd3bd607..12c5f4e57a3 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlConditionalWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlConditionalWomSpec.scala @@ -39,66 +39,92 @@ class WdlConditionalWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma conditionalTestGraph match { case Valid(g) => validateGraph(g) - case Invalid(errors) => fail(s"Unable to build wom version of conditional foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail(s"Unable to build wom version of conditional foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } def validateGraph(workflowGraph: Graph) = { case class OuterGraphValidations(conditionalNode: ConditionalNode, foo_i_inputNode: GraphInputNode) def validateOuterGraph: OuterGraphValidations = { - val conditionalNode = workflowGraph.nodes.firstByType[ConditionalNode].getOrElse(fail("Resulting graph did not contain a ConditionalNode")) + val conditionalNode = workflowGraph.nodes + .firstByType[ConditionalNode] + .getOrElse(fail("Resulting graph did not contain a ConditionalNode")) val inputNodes: Set[GraphInputNode] = workflowGraph.nodes.filterByType[GraphInputNode] - val b_inputNode = inputNodes.find(_.localName == "b").getOrElse(fail("Resulting graph did not contain the 'b' GraphInputNode")) + val b_inputNode = + inputNodes.find(_.localName == "b").getOrElse(fail("Resulting graph did not contain the 'b' GraphInputNode")) b_inputNode.womType should be(WomBooleanType) - val foo_i_inputNode = inputNodes.find(_.localName == "foo.i").getOrElse(fail("Resulting graph did not contain the 'foo.i' GraphInputNode")) + val foo_i_inputNode = inputNodes + .find(_.localName == "foo.i") + .getOrElse(fail("Resulting graph did not contain the 'foo.i' GraphInputNode")) foo_i_inputNode.womType should be(WomIntegerType) - val foo_out_output = workflowGraph.nodes.collectFirst { - case gon: GraphOutputNode if gon.localName == "foo.out" => gon - }.getOrElse(fail("Resulting graph did not contain the 'foo.out' GraphOutputNode")) + val foo_out_output = workflowGraph.nodes + .collectFirst { + case gon: GraphOutputNode if gon.localName == "foo.out" => gon + } + .getOrElse(fail("Resulting graph did not contain the 'foo.out' GraphOutputNode")) foo_out_output.womType should be(WomOptionalType(WomStringType)) foo_out_output.identifier.fullyQualifiedName.value shouldBe "conditional_test.foo.out" - - val expressionNode = workflowGraph.nodes.collectFirst { - case expr: ExpressionNode if expr.localName == "conditional" => expr - }.getOrElse(fail("Resulting graph did not contain the 'conditional' ExpressionNode")) - workflowGraph.nodes should be(Set(conditionalNode, foo_i_inputNode, b_inputNode, foo_out_output, expressionNode)) + val expressionNode = workflowGraph.nodes + .collectFirst { + case expr: ExpressionNode if expr.localName == "conditional" => expr + } + .getOrElse(fail("Resulting graph did not contain the 'conditional' ExpressionNode")) + + workflowGraph.nodes should be( + Set(conditionalNode, foo_i_inputNode, b_inputNode, foo_out_output, expressionNode) + ) OuterGraphValidations(conditionalNode, foo_i_inputNode) } case class InnerGraphValidations(foo_out_innerOutput: GraphOutputNode) def validateInnerGraph(validatedOuterGraph: OuterGraphValidations): InnerGraphValidations = { - val foo_i_innerInput = validatedOuterGraph.conditionalNode.innerGraph.nodes.collectFirst { - case gin: ExternalGraphInputNode if gin.identifier.fullyQualifiedName.value == "conditional_test.foo.i" => gin - }.getOrElse(fail("Conditional inner graph did not contain a GraphInputNode 'foo.i'")) - - val foo_callNode = validatedOuterGraph.conditionalNode.innerGraph.nodes.collectFirst { - case c: CommandCallNode if c.localName == "foo" => c - }.getOrElse(fail("Conditional inner graph did not contain a call to 'foo'")) + val foo_i_innerInput = validatedOuterGraph.conditionalNode.innerGraph.nodes + .collectFirst { + case gin: ExternalGraphInputNode if gin.identifier.fullyQualifiedName.value == "conditional_test.foo.i" => + gin + } + .getOrElse(fail("Conditional inner graph did not contain a GraphInputNode 'foo.i'")) + + val foo_callNode = validatedOuterGraph.conditionalNode.innerGraph.nodes + .collectFirst { + case c: CommandCallNode if c.localName == "foo" => c + } + .getOrElse(fail("Conditional inner graph did not contain a call to 'foo'")) foo_callNode.identifier.fullyQualifiedName.value shouldBe "conditional_test.foo" - val foo_out_innerOutput = validatedOuterGraph.conditionalNode.innerGraph.nodes.collectFirst { - case gon: GraphOutputNode if gon.localName == "foo.out" => gon - }.getOrElse(fail("Conditional inner graph did not contain a GraphOutputNode 'foo.out'")) + val foo_out_innerOutput = validatedOuterGraph.conditionalNode.innerGraph.nodes + .collectFirst { + case gon: GraphOutputNode if gon.localName == "foo.out" => gon + } + .getOrElse(fail("Conditional inner graph did not contain a GraphOutputNode 'foo.out'")) - validatedOuterGraph.conditionalNode.innerGraph.nodes should be(Set(foo_i_innerInput, foo_callNode, foo_out_innerOutput)) + validatedOuterGraph.conditionalNode.innerGraph.nodes should be( + Set(foo_i_innerInput, foo_callNode, foo_out_innerOutput) + ) InnerGraphValidations(foo_out_innerOutput) } - def validateConnections(validatedOuterGraph: OuterGraphValidations, validatedInnerGraph: InnerGraphValidations) = { + def validateConnections(validatedOuterGraph: OuterGraphValidations, validatedInnerGraph: InnerGraphValidations) = // The ConditionalNode's output port is correctly associated with the inner graph's GraphOutputNode: validatedOuterGraph.conditionalNode.conditionalOutputPorts.toList match { case (port @ ConditionalOutputPort(outputToGather, _)) :: Nil => port.name should be("foo.out") port.womType should be(WomOptionalType(WomStringType)) outputToGather eq validatedInnerGraph.foo_out_innerOutput should be(true) - case other => fail("Expected exactly one output to be gathered in this conditional but got:" + other.mkString("\n", "\n", "\n")) + case other => + fail( + "Expected exactly one output to be gathered in this conditional but got:" + other.mkString("\n", + "\n", + "\n" + ) + ) } - } val outer = validateOuterGraph val inner = validateInnerGraph(outer) @@ -106,5 +132,4 @@ class WdlConditionalWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma } } - } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlInputValidationSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlInputValidationSpec.scala index 6e5bb2cdb20..cec503bf3de 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlInputValidationSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlInputValidationSpec.scala @@ -22,7 +22,12 @@ import wom.transforms.WomWorkflowDefinitionMaker.ops._ import wom.types._ import wom.values._ -class WdlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BeforeAndAfterAll with TableDrivenPropertyChecks { +class WdlInputValidationSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with BeforeAndAfterAll + with TableDrivenPropertyChecks { behavior of "WDL Wom executable" @@ -47,23 +52,41 @@ class WdlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M """.stripMargin val namespace = WdlNamespace.loadUsingSource(wdlWorkflow, None, None).get.asInstanceOf[WdlNamespaceWithWorkflow] - val graph = namespace.workflow.toWomWorkflowDefinition(isASubworkflow = false) + val graph = namespace.workflow + .toWomWorkflowDefinition(isASubworkflow = false) .valueOr(errors => fail(s"Failed to build a wom definition: ${errors.toList.mkString(", ")}")) .graph - val w1OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.w1").getOrElse(fail("Failed to find an input node for w1")).singleOutputPort - val w2OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.w2").getOrElse(fail("Failed to find an input node for w2")).singleOutputPort - val t1OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.t.t1").getOrElse(fail("Failed to find an input node for t1")).singleOutputPort - val t2OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.t.t2").getOrElse(fail("Failed to find an input node for t2")).singleOutputPort - val u1OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.u.t1").getOrElse(fail("Failed to find an input node for u1")).singleOutputPort - val u2OutputPort = graph.externalInputNodes.find(_.fullyQualifiedName == "w.u.t2").getOrElse(fail("Failed to find an input node for u2")).singleOutputPort + val w1OutputPort = graph.externalInputNodes + .find(_.fullyQualifiedName == "w.w1") + .getOrElse(fail("Failed to find an input node for w1")) + .singleOutputPort + val w2OutputPort = graph.externalInputNodes + .find(_.fullyQualifiedName == "w.w2") + .getOrElse(fail("Failed to find an input node for w2")) + .singleOutputPort + val t1OutputPort = graph.externalInputNodes + .find(_.fullyQualifiedName == "w.t.t1") + .getOrElse(fail("Failed to find an input node for t1")) + .singleOutputPort + val t2OutputPort = graph.externalInputNodes + .find(_.fullyQualifiedName == "w.t.t2") + .getOrElse(fail("Failed to find an input node for t2")) + .singleOutputPort + val u1OutputPort = graph.externalInputNodes + .find(_.fullyQualifiedName == "w.u.t1") + .getOrElse(fail("Failed to find an input node for u1")) + .singleOutputPort + val u2OutputPort = graph.externalInputNodes + .find(_.fullyQualifiedName == "w.u.t2") + .getOrElse(fail("Failed to find an input node for u2")) + .singleOutputPort - def validate(inputFile: String): Checked[ResolvedExecutableInputs] = { + def validate(inputFile: String): Checked[ResolvedExecutableInputs] = namespace.toWomExecutable(Option(inputFile), NoIoFunctionSet, strictValidation = true) match { case Left(errors) => Left(errors) case Right(e) => e.resolvedExecutableInputs.validNelCheck } - } val validations: TableFor3[String, String, Checked[ResolvedExecutableInputs]] = Table( ("test name", "inputs JSON", "input set"), @@ -112,18 +135,22 @@ class WdlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M |{ |} """.stripMargin, - NonEmptyList.fromListUnsafe(List( - "Required workflow input 'w.t.t1' not specified", - "Required workflow input 'w.u.t1' not specified", - "Required workflow input 'w.w1' not specified" - )).asLeft[ResolvedExecutableInputs] + NonEmptyList + .fromListUnsafe( + List( + "Required workflow input 'w.t.t1' not specified", + "Required workflow input 'w.u.t1' not specified", + "Required workflow input 'w.w1' not specified" + ) + ) + .asLeft[ResolvedExecutableInputs] ) ) /* * Note that we create the graph twice: - * once in namespace.workflow.toWomWorkflowDefinition for the expectations - * once in namespace.toWomExecutable to validate the actual inputs + * once in namespace.workflow.toWomWorkflowDefinition for the expectations + * once in namespace.toWomExecutable to validate the actual inputs * So the output ports in the map won't be object matches. We just check the name equality. */ private def validateInexactPortEquality(actual: ResolvedExecutableInputs, expected: ResolvedExecutableInputs) = { @@ -135,13 +162,14 @@ class WdlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M } forAll(validations) { (name, inputSource, expectation) => - it should s"validate $name" in { (validate(inputSource), expectation) match { - case (Left(actualError), Left(expectedError)) => actualError.toList.toSet -- expectedError.toList.toSet should be(Set.empty) + case (Left(actualError), Left(expectedError)) => + actualError.toList.toSet -- expectedError.toList.toSet should be(Set.empty) case (Left(actualError), _) => fail(s"Expected success but got errors: ${actualError.toList.mkString(", ")}") case (Right(actualInputs), Right(expectedInputs)) => validateInexactPortEquality(actualInputs, expectedInputs) - case (_, Left(expectedError)) => fail(s"Expected errors: '${expectedError.toList.mkString(", ")}' but got success ") + case (_, Left(expectedError)) => + fail(s"Expected errors: '${expectedError.toList.mkString(", ")}' but got success ") } } } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNamespaceWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNamespaceWomSpec.scala index 8761e936ff3..d61a8a336ab 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNamespaceWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNamespaceWomSpec.scala @@ -64,7 +64,8 @@ class WdlNamespaceWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val workflowGraph = wom3Step match { case Valid(g) => g - case Invalid(errors) => fail(s"Unable to build wom version of 3step from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail(s"Unable to build wom version of 3step from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } val graphInputNodes = workflowGraph.nodes collect { case gin: ExternalGraphInputNode => gin } @@ -73,17 +74,27 @@ class WdlNamespaceWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc patternInputNode.localName should be("cgrep.pattern") patternInputNode.fullyQualifiedName should be("three_step.cgrep.pattern") - workflowGraph.nodes collect { case gon: ExpressionBasedGraphOutputNode => gon.localName } should be(Set("wc.count", "cgrep.count", "ps.procs")) + workflowGraph.nodes collect { case gon: ExpressionBasedGraphOutputNode => gon.localName } should be( + Set("wc.count", "cgrep.count", "ps.procs") + ) - val ps: CommandCallNode = workflowGraph.nodes.collectFirst({ case ps: CommandCallNode if ps.localName == "ps" => ps }).get - val cgrep: CommandCallNode = workflowGraph.nodes.collectFirst({ case cgrep: CommandCallNode if cgrep.localName == "cgrep" => cgrep }).get - val cgrepInFileExpression = { - workflowGraph.nodes.collectFirst({ case cgrepInFile: ExpressionNode if cgrepInFile.localName == "cgrep.in_file" => cgrepInFile }).get - } - val wc: CommandCallNode = workflowGraph.nodes.collectFirst({ case wc: CommandCallNode if wc.localName == "wc" => wc }).get - val wcInFileExpression = { - workflowGraph.nodes.collectFirst({ case wcInFile: ExpressionNode if wcInFile.localName == "wc.in_file" => wcInFile }).get - } + val ps: CommandCallNode = workflowGraph.nodes.collectFirst { + case ps: CommandCallNode if ps.localName == "ps" => ps + }.get + val cgrep: CommandCallNode = workflowGraph.nodes.collectFirst { + case cgrep: CommandCallNode if cgrep.localName == "cgrep" => cgrep + }.get + val cgrepInFileExpression = + workflowGraph.nodes.collectFirst { + case cgrepInFile: ExpressionNode if cgrepInFile.localName == "cgrep.in_file" => cgrepInFile + }.get + val wc: CommandCallNode = workflowGraph.nodes.collectFirst { + case wc: CommandCallNode if wc.localName == "wc" => wc + }.get + val wcInFileExpression = + workflowGraph.nodes.collectFirst { + case wcInFile: ExpressionNode if wcInFile.localName == "wc.in_file" => wcInFile + }.get workflowGraph.nodes.filterByType[CallNode] should be(Set(ps, cgrep, wc)) ps.inputPorts.map(_.name) should be(Set.empty) @@ -102,10 +113,15 @@ class WdlNamespaceWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val inFileMapping = cgrepInputs(cgrepFileInputDef) inFileMapping.select[OutputPort].isDefined shouldBe true // This should be less ugly when we can access a string value from a womexpression - inFileMapping.select[OutputPort].get - .graphNode.asInstanceOf[ExpressionNode] - .womExpression.asInstanceOf[WdlWomExpression] - .wdlExpression.valueString shouldBe "ps.procs" + inFileMapping + .select[OutputPort] + .get + .graphNode + .asInstanceOf[ExpressionNode] + .womExpression + .asInstanceOf[WdlWomExpression] + .wdlExpression + .valueString shouldBe "ps.procs" val cgrepPatternInputDef = cgrep.callable.inputs.find(_.name == "pattern").get cgrepInputs(cgrepPatternInputDef).select[OutputPort].get eq patternInputNode.singleOutputPort shouldBe true diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNestedConditionalWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNestedConditionalWomSpec.scala index 0eeedc5506c..e980c03542a 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNestedConditionalWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlNestedConditionalWomSpec.scala @@ -35,7 +35,6 @@ class WdlNestedConditionalWomSpec extends AnyFlatSpec with CromwellTimeoutSpec w ("same lookups in if condition, scatter collection, and task calls", sameLookupsInConditionsAndInnerTaskCalls) ) - /* NB these tests go as far as "can I make a WOM Graph out of this WDL. We don't do anything to check that the WOM Graph is correct" @@ -51,12 +50,15 @@ class WdlNestedConditionalWomSpec extends AnyFlatSpec with CromwellTimeoutSpec w // Run each WDL through 20 times because the topological ordering of WdlGraphNodes is sometimes non-deterministic // and the order of operations has been known to change whether a bug is expressed or not. - val errors = (0 until 20).toList.as(mkTestGraph) collect { - case i @ Invalid(_) => i.e.toList + val errors = (0 until 20).toList.as(mkTestGraph) collect { case i @ Invalid(_) => + i.e.toList } if (errors.nonEmpty) { - fail(s"Unable to build wom version of nested_lookups from WDL ${errors.size * 5}% of the time. First failure was: ${errors.head.mkString("\n", "\n", "\n")}") + fail( + s"Unable to build wom version of nested_lookups from WDL ${errors.size * 5}% of the time. First failure was: ${errors.head + .mkString("\n", "\n", "\n")}" + ) } } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala index 1a2c586e7fd..c302cb87b4a 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala @@ -39,28 +39,36 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche scatterTestGraph match { case Valid(g) => validateGraph(g) - case Invalid(errors) => fail(s"Unable to build wom version of scatter foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail(s"Unable to build wom version of scatter foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } def validateGraph(workflowGraph: Graph) = { case class OuterGraphValidations(scatterNode: ScatterNode, xs_inputNode: GraphInputNode) def validateOuterGraph: OuterGraphValidations = { - val scatterNode = workflowGraph.nodes.firstByType[ScatterNode].getOrElse(fail("Resulting graph did not contain a ScatterNode")) + val scatterNode = + workflowGraph.nodes.firstByType[ScatterNode].getOrElse(fail("Resulting graph did not contain a ScatterNode")) - val xs_inputNode = workflowGraph.nodes.collectFirst { - case gin: GraphInputNode if gin.localName == "xs" => gin - }.getOrElse(fail("Resulting graph did not contain the 'xs' GraphInputNode")) + val xs_inputNode = workflowGraph.nodes + .collectFirst { + case gin: GraphInputNode if gin.localName == "xs" => gin + } + .getOrElse(fail("Resulting graph did not contain the 'xs' GraphInputNode")) - val scatterExpressionNode = workflowGraph.nodes.collectFirst { - case expr: ExpressionNode if expr.localName == "x" => expr - }.getOrElse(fail("Resulting graph did not contain the 'x' ExpressionNode")) + val scatterExpressionNode = workflowGraph.nodes + .collectFirst { + case expr: ExpressionNode if expr.localName == "x" => expr + } + .getOrElse(fail("Resulting graph did not contain the 'x' ExpressionNode")) scatterNode.inputPorts.map(_.upstream) shouldBe Set(scatterExpressionNode.singleOutputPort) - val foo_out_output = workflowGraph.nodes.collectFirst { - case gon: GraphOutputNode if gon.localName == "foo.out" => gon - }.getOrElse(fail("Resulting graph did not contain the 'foo.out' GraphOutputNode")) + val foo_out_output = workflowGraph.nodes + .collectFirst { + case gon: GraphOutputNode if gon.localName == "foo.out" => gon + } + .getOrElse(fail("Resulting graph did not contain the 'foo.out' GraphOutputNode")) foo_out_output.womType should be(WomArrayType(WomStringType)) foo_out_output.identifier.fullyQualifiedName.value shouldBe "scatter_test.foo.out" @@ -70,34 +78,49 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche case class InnerGraphValidations(x_scatterCollectionInput: GraphInputNode, foo_out_innerOutput: GraphOutputNode) def validateInnerGraph(validatedOuterGraph: OuterGraphValidations): InnerGraphValidations = { - val x_scatterCollectionInput = validatedOuterGraph.scatterNode.innerGraph.nodes.collectFirst { - case gin: GraphInputNode if gin.localName == "x" => gin - }.getOrElse(fail("Scatter inner graph did not contain a GraphInputNode 'x'")) - - val foo_callNode = validatedOuterGraph.scatterNode.innerGraph.nodes.collectFirst { - case c: CommandCallNode if c.localName == "foo" => c - }.getOrElse(fail("Scatter inner graph did not contain a call to 'foo'")) + val x_scatterCollectionInput = validatedOuterGraph.scatterNode.innerGraph.nodes + .collectFirst { + case gin: GraphInputNode if gin.localName == "x" => gin + } + .getOrElse(fail("Scatter inner graph did not contain a GraphInputNode 'x'")) + + val foo_callNode = validatedOuterGraph.scatterNode.innerGraph.nodes + .collectFirst { + case c: CommandCallNode if c.localName == "foo" => c + } + .getOrElse(fail("Scatter inner graph did not contain a call to 'foo'")) foo_callNode.identifier.fullyQualifiedName.value shouldBe "scatter_test.foo" - val foo_out_innerOutput = validatedOuterGraph.scatterNode.innerGraph.nodes.collectFirst { - case gon: GraphOutputNode if gon.localName == "foo.out" => gon - }.getOrElse(fail("Scatter inner graph did not contain a GraphOutputNode 'foo.out'")) - - val foo_out_i_expressionNode = validatedOuterGraph.scatterNode.innerGraph.nodes.collectFirst { - case expr: ExpressionNode if expr.localName == "foo.i" => expr - }.getOrElse(fail("Scatter inner graph did not contain a ExpressionNode 'scatter_test.foo.i'")) - - validatedOuterGraph.scatterNode.innerGraph.nodes should be(Set(x_scatterCollectionInput, foo_callNode, foo_out_innerOutput, foo_out_i_expressionNode)) + val foo_out_innerOutput = validatedOuterGraph.scatterNode.innerGraph.nodes + .collectFirst { + case gon: GraphOutputNode if gon.localName == "foo.out" => gon + } + .getOrElse(fail("Scatter inner graph did not contain a GraphOutputNode 'foo.out'")) + + val foo_out_i_expressionNode = validatedOuterGraph.scatterNode.innerGraph.nodes + .collectFirst { + case expr: ExpressionNode if expr.localName == "foo.i" => expr + } + .getOrElse(fail("Scatter inner graph did not contain a ExpressionNode 'scatter_test.foo.i'")) + + validatedOuterGraph.scatterNode.innerGraph.nodes should be( + Set(x_scatterCollectionInput, foo_callNode, foo_out_innerOutput, foo_out_i_expressionNode) + ) InnerGraphValidations(x_scatterCollectionInput, foo_out_innerOutput) } - def validateConnections(validatedOuterGraph: OuterGraphValidations, validatedInnerGraph: InnerGraphValidations) = { + def validateConnections(validatedOuterGraph: OuterGraphValidations, + validatedInnerGraph: InnerGraphValidations + ) = { // The scatter collection links to its predecessor - validatedOuterGraph.scatterNode.scatterCollectionExpressionNodes.head.inputPorts.map(_.upstream.graphNode) should be(Set(validatedOuterGraph.xs_inputNode)) + validatedOuterGraph.scatterNode.scatterCollectionExpressionNodes.head.inputPorts + .map(_.upstream.graphNode) should be(Set(validatedOuterGraph.xs_inputNode)) // The ScatterNode's "scatter variable mapping" links to the innerGraph's scatter variable input Node: - validatedOuterGraph.scatterNode.scatterVariableInnerGraphInputNodes.head eq validatedInnerGraph.x_scatterCollectionInput should be(true) + validatedOuterGraph.scatterNode.scatterVariableInnerGraphInputNodes.head eq validatedInnerGraph.x_scatterCollectionInput should be( + true + ) // The ScatterNode's output port links to the inner graph's GraphOutputNode: validatedOuterGraph.scatterNode.outputMapping.toList match { @@ -105,7 +128,10 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche port.name should be("foo.out") womType should be(WomArrayType(WomStringType)) outputToGather eq validatedInnerGraph.foo_out_innerOutput should be(true) - case other => fail("Expected exactly one output to be gathered in this scatter but got:" + other.mkString("\n", "\n", "\n")) + case other => + fail( + "Expected exactly one output to be gathered in this scatter but got:" + other.mkString("\n", "\n", "\n") + ) } } @@ -140,7 +166,8 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche scatterTestGraph match { case Valid(g) => validateGraph(g) - case Invalid(errors) => fail(s"Unable to build wom version of scatter foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail(s"Unable to build wom version of scatter foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } def validateGraph(workflowGraph: Graph) = { @@ -149,10 +176,11 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche workflowGraph.nodes.size should be(7) // Find that scatter: - workflowGraph.nodes.collectFirst { - case s: ScatterNode => s - }.getOrElse(fail("Resulting graph did not contain a ScatterNode")) - + workflowGraph.nodes + .collectFirst { case s: ScatterNode => + s + } + .getOrElse(fail("Resulting graph did not contain a ScatterNode")) } } @@ -182,31 +210,38 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche scatterTestGraph match { case Valid(g) => validateGraph(g) - case Invalid(errors) => fail(s"Unable to build wom version of scatter foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail(s"Unable to build wom version of scatter foo from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } def validateGraph(workflowGraph: Graph) = { // Find the inputs: val inputNodes = workflowGraph.nodes.filterByType[RequiredGraphInputNode] - inputNodes.map {_.localName} should be(Set("foo.j")) - inputNodes.map {_.identifier.fullyQualifiedName.value} should be(Set("scatter_test.foo.j")) + inputNodes.map(_.localName) should be(Set("foo.j")) + inputNodes.map(_.identifier.fullyQualifiedName.value) should be(Set("scatter_test.foo.j")) // Find that scatter: - val scatterNode = workflowGraph.nodes.collectFirst { - case s: ScatterNode => s - }.getOrElse(fail("Resulting graph did not contain a ScatterNode")) + val scatterNode = workflowGraph.nodes + .collectFirst { case s: ScatterNode => + s + } + .getOrElse(fail("Resulting graph did not contain a ScatterNode")) - val scatterInnerInputs: Set[ExternalGraphInputNode] = scatterNode.innerGraph.nodes.filterByType[ExternalGraphInputNode] - scatterInnerInputs map {_.identifier.fullyQualifiedName.value} should be(Set("scatter_test.foo.j")) - val scatterInnerItemInput: Set[OuterGraphInputNode] = scatterNode.innerGraph.nodes.filterByType[OuterGraphInputNode] - scatterInnerItemInput map {_.localName} should be(Set("s")) + val scatterInnerInputs: Set[ExternalGraphInputNode] = + scatterNode.innerGraph.nodes.filterByType[ExternalGraphInputNode] + scatterInnerInputs map { _.identifier.fullyQualifiedName.value } should be(Set("scatter_test.foo.j")) + val scatterInnerItemInput: Set[OuterGraphInputNode] = + scatterNode.innerGraph.nodes.filterByType[OuterGraphInputNode] + scatterInnerItemInput map { _.localName } should be(Set("s")) // Find the outputs: - val outputNodes = workflowGraph.nodes.collect { - case output: GraphOutputNode => output + val outputNodes = workflowGraph.nodes.collect { case output: GraphOutputNode => + output } - outputNodes map { on => (on.localName, on.womType) } should be(Set(("foo.int_out", WomArrayType(WomIntegerType)), ("foo.str_out", WomArrayType(WomStringType)))) + outputNodes map { on => (on.localName, on.womType) } should be( + Set(("foo.int_out", WomArrayType(WomIntegerType)), ("foo.str_out", WomArrayType(WomStringType))) + ) } } diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlSubworkflowWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlSubworkflowWomSpec.scala index fffe841b1be..d41d38c0733 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlSubworkflowWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlSubworkflowWomSpec.scala @@ -50,19 +50,21 @@ class WdlSubworkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma |} """.stripMargin - def innerResolver: Draft2ImportResolver = str => Draft2ResolvedImportBundle(innerWdl, ResolvedImportRecord(str)) - val namespace = WdlNamespace.loadUsingSource( - workflowSource = outerWdl, - resource = None, - importResolver = Some(Seq(innerResolver))).get.asInstanceOf[WdlNamespaceWithWorkflow] + val namespace = WdlNamespace + .loadUsingSource(workflowSource = outerWdl, resource = None, importResolver = Some(Seq(innerResolver))) + .get + .asInstanceOf[WdlNamespaceWithWorkflow] val outerWorkflowGraph = namespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).map(_.graph) outerWorkflowGraph match { case Valid(g) => validateOuter(g) - case Invalid(errors) => fail(s"Unable to build wom version of workflow with subworkflow from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail( + s"Unable to build wom version of workflow with subworkflow from WDL: ${errors.toList.mkString("\n", "\n", "\n")}" + ) } def validateOuter(workflowGraph: Graph) = { @@ -75,7 +77,9 @@ class WdlSubworkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma val innerCall = calls.head.asInstanceOf[WorkflowCallNode] innerCall.localName should be("inner") innerCall.identifier.fullyQualifiedName.value should be("outer.inner") - innerCall.upstream.head.asInstanceOf[ExpressionNode].inputPorts.map(_.upstream.graphNode) should be(Set(workflowGraph.inputNodes.head)) + innerCall.upstream.head.asInstanceOf[ExpressionNode].inputPorts.map(_.upstream.graphNode) should be( + Set(workflowGraph.inputNodes.head) + ) // One output, "out" workflowGraph.outputNodes.map(_.localName) should be(Set("out")) @@ -92,7 +96,9 @@ class WdlSubworkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma calls.map(_.localName) should be(Set("foo")) val fooCall = calls.head.asInstanceOf[CommandCallNode] - fooCall.upstream.head.asInstanceOf[ExpressionNode].inputPorts.map(_.upstream.graphNode) should be(Set(innerGraph.inputNodes.head)) + fooCall.upstream.head.asInstanceOf[ExpressionNode].inputPorts.map(_.upstream.graphNode) should be( + Set(innerGraph.inputNodes.head) + ) innerGraph.outputNodes.map(_.localName) should be(Set("out", "x")) innerGraph.outputNodes.map(_.identifier.fullyQualifiedName.value) should be(Set("inner.out", "inner.x")) @@ -141,26 +147,30 @@ class WdlSubworkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma |} """.stripMargin - def innerResolver: Draft2ImportResolver = str => Draft2ResolvedImportBundle(innerWdl, ResolvedImportRecord(str)) - val namespace = WdlNamespace.loadUsingSource( - workflowSource = outerWdl, - resource = None, - importResolver = Some(Seq(innerResolver))).get.asInstanceOf[WdlNamespaceWithWorkflow] + val namespace = WdlNamespace + .loadUsingSource(workflowSource = outerWdl, resource = None, importResolver = Some(Seq(innerResolver))) + .get + .asInstanceOf[WdlNamespaceWithWorkflow] val outerWorkflowGraph = namespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).map(_.graph) outerWorkflowGraph match { case Valid(g) => validateOuter(g) - case Invalid(errors) => fail(s"Unable to build wom version of workflow with subworkflow from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail( + s"Unable to build wom version of workflow with subworkflow from WDL: ${errors.toList.mkString("\n", "\n", "\n")}" + ) } def validateOuter(workflowGraph: Graph) = { workflowGraph.inputNodes.map(_.localName) should be(Set("xs")) val scatter = workflowGraph.scatters.head - val scatterCollectionNode = workflowGraph.nodes.collectFirst({ case e: ExpressionNode if e.localName == "x" => e }).get + val scatterCollectionNode = workflowGraph.nodes.collectFirst { + case e: ExpressionNode if e.localName == "x" => e + }.get scatter.upstream should be(Set(scatterCollectionNode)) scatter.outputPorts.map(_.name) should be(Set("inner.out")) scatter.outputPorts.head.womType should be(WomArrayType(WomStringType)) @@ -206,16 +216,19 @@ class WdlSubworkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma def innerResolver: Draft2ImportResolver = str => Draft2ResolvedImportBundle(innerWdl, ResolvedImportRecord(str)) - val namespace = WdlNamespace.loadUsingSource( - workflowSource = outerWdl, - resource = None, - importResolver = Some(Seq(innerResolver))).get.asInstanceOf[WdlNamespaceWithWorkflow] + val namespace = WdlNamespace + .loadUsingSource(workflowSource = outerWdl, resource = None, importResolver = Some(Seq(innerResolver))) + .get + .asInstanceOf[WdlNamespaceWithWorkflow] val outerWorkflowGraph = namespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).map(_.graph) outerWorkflowGraph match { case Valid(g) => validateOuter(g) - case Invalid(errors) => fail(s"Unable to build wom version of workflow with subworkflow from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") + case Invalid(errors) => + fail( + s"Unable to build wom version of workflow with subworkflow from WDL: ${errors.toList.mkString("\n", "\n", "\n")}" + ) } def validateOuter(workflowGraph: Graph) = { @@ -250,7 +263,9 @@ class WdlSubworkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma calls.map(_.localName) should be(Set("foo")) val fooCall = calls.head.asInstanceOf[CommandCallNode] - fooCall.upstream.head.asInstanceOf[ExpressionNode].inputPorts.map(_.upstream.graphNode) should be(Set(innerGraph.inputNodes.head)) + fooCall.upstream.head.asInstanceOf[ExpressionNode].inputPorts.map(_.upstream.graphNode) should be( + Set(innerGraph.inputNodes.head) + ) innerGraph.outputNodes.map(_.localName) should be(Set("out")) innerGraph.outputNodes.map(_.identifier.fullyQualifiedName.value) should be(Set("twin.out")) diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlWomExpressionsAsInputsSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlWomExpressionsAsInputsSpec.scala index a0d85b969ff..53c750ebf6f 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlWomExpressionsAsInputsSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlWomExpressionsAsInputsSpec.scala @@ -35,7 +35,6 @@ object WdlWomExpressionsAsInputsSpec { """.stripMargin } - class WdlWomExpressionsAsInputsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WdlWomExpressionsAsInputs" @@ -47,7 +46,9 @@ class WdlWomExpressionsAsInputsSpec extends AnyFlatSpec with CromwellTimeoutSpec case Invalid(errors) => fail(s"Unable to build wom graph from WDL: ${errors.toList.mkString("\n", "\n", "\n")}") } - val callNodes = workflowGraph.nodes.toList collect { case callNode: CommandCallNode => callNode.localName -> callNode } toMap + val callNodes = workflowGraph.nodes.toList collect { case callNode: CommandCallNode => + callNode.localName -> callNode + } toMap val a = callNodes("a") val b = callNodes("b") @@ -56,9 +57,8 @@ class WdlWomExpressionsAsInputsSpec extends AnyFlatSpec with CromwellTimeoutSpec val cInputExpressionNode = c.inputPorts.map(_.upstream).head.graphNode.asInstanceOf[ExpressionNode] cInputExpressionNode.inputPorts.map(_.upstream) shouldBe a.outputPorts ++ b.outputPorts - val inputExpression = c.inputDefinitionMappings - .head._2.select[OutputPort].get - .graphNode.asInstanceOf[ExpressionNode] + val inputExpression = + c.inputDefinitionMappings.head._2.select[OutputPort].get.graphNode.asInstanceOf[ExpressionNode] List("a", "b") foreach { x => val connectedInputPort = inputExpression.inputMapping(s"$x.int_out") diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala index 7b0405b473a..1ca48a996bb 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala @@ -5,7 +5,8 @@ import wdl.transforms.base.ast2wdlom.{GenericAst, GenericAstList, GenericAstNode import scala.jdk.CollectionConverters._ case class Draft3GenericAst(ast: Ast) extends GenericAst { - override def getAttribute(attr: String): GenericAstNode = Option(ast.getAttribute(attr)).map(Draft3GenericAstNode.apply).orNull + override def getAttribute(attr: String): GenericAstNode = + Option(ast.getAttribute(attr)).map(Draft3GenericAstNode.apply).orNull override def getAttributes: Map[String, GenericAstNode] = ast.getAttributes.asScala.toMap collect { case (key, value) if value != null => key -> Draft3GenericAstNode(value) } diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala index 7765086a848..eb92bc4855f 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala @@ -7,56 +7,117 @@ import common.validation.Checked._ import wdl.draft3.parser.WdlParser.{Ast, AstNode} import wdl.model.draft3.elements.ExpressionElement.KvPair import wdl.model.draft3.elements._ -import wdl.transforms.base.ast2wdlom.{AstNodeToCommandPartElement, AstNodeToExpressionElement, AstNodeToKvPair, AstNodeToMetaKvPair, AstNodeToPlaceholderAttributeSet, AstNodeToStaticString, AstNodeToTypeElement, AstToCallElement, AstToCommandSectionElement, AstToDeclarationContent, AstToFileBodyElement, AstToFileElement, AstToIfElement, AstToImportElement, AstToInputDeclarationElement, AstToInputsSectionElement, AstToMetaSectionElement, AstToOutputsSectionElement, AstToParameterMetaSectionElement, AstToRuntimeAttributesSectionElement, AstToScatterElement, AstToStructElement, AstToTaskDefinitionElement, AstToTaskSectionElement, AstToWorkflowBodyElement, AstToWorkflowDefinitionElement, AstToWorkflowGraphNodeElementConverterMaker, GenericAst, GenericAstNode, astNodeToAst, astNodeToAstList} +import wdl.transforms.base.ast2wdlom.{ + astNodeToAst, + astNodeToAstList, + AstNodeToCommandPartElement, + AstNodeToExpressionElement, + AstNodeToKvPair, + AstNodeToMetaKvPair, + AstNodeToPlaceholderAttributeSet, + AstNodeToStaticString, + AstNodeToTypeElement, + AstToCallElement, + AstToCommandSectionElement, + AstToDeclarationContent, + AstToFileBodyElement, + AstToFileElement, + AstToIfElement, + AstToImportElement, + AstToInputDeclarationElement, + AstToInputsSectionElement, + AstToMetaSectionElement, + AstToOutputsSectionElement, + AstToParameterMetaSectionElement, + AstToRuntimeAttributesSectionElement, + AstToScatterElement, + AstToStructElement, + AstToTaskDefinitionElement, + AstToTaskSectionElement, + AstToWorkflowBodyElement, + AstToWorkflowDefinitionElement, + AstToWorkflowGraphNodeElementConverterMaker, + GenericAst, + GenericAstNode +} import wdl.draft3.transforms.parsing.fileToAst import wom.callable.MetaKvPair package object ast2wdlom { - val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck { a => Draft3GenericAst(a).validNelCheck } - val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => Draft3GenericAstNode(a).validNelCheck } + val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck(a => Draft3GenericAst(a).validNelCheck) + val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => + Draft3GenericAstNode(a).validNelCheck + } - implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement() + implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = + AstNodeToStaticString.astNodeToStaticStringElement() // meta sections implicit val astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] = AstNodeToMetaKvPair.astNodeToMetaKvPair - implicit val astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement] = astNodeToAst andThen AstToMetaSectionElement.astToMetaSectionElement - implicit val astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] = astNodeToAst andThen AstToParameterMetaSectionElement.astToParameterMetaSectionElement + implicit val astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement] = + astNodeToAst andThen AstToMetaSectionElement.astToMetaSectionElement + implicit val astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] = + astNodeToAst andThen AstToParameterMetaSectionElement.astToParameterMetaSectionElement - implicit val astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] = AstNodeToExpressionElement.astNodeToExpressionElement(customEngineFunctionMakers = Map.empty) - implicit val astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] = AstNodeToKvPair.astNodeToKvPair(astNodeToExpressionElement) + implicit val astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] = + AstNodeToExpressionElement.astNodeToExpressionElement(customEngineFunctionMakers = Map.empty) + implicit val astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] = + AstNodeToKvPair.astNodeToKvPair(astNodeToExpressionElement) - implicit val astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] = AstNodeToTypeElement.astNodeToTypeElement(Map.empty) + implicit val astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] = + AstNodeToTypeElement.astNodeToTypeElement(Map.empty) implicit val astToStructElement: CheckedAtoB[GenericAst, StructElement] = AstToStructElement.astToStructElement - implicit val astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement] = astNodeToAst andThen AstToImportElement.astToImportElement + implicit val astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement] = + astNodeToAst andThen AstToImportElement.astToImportElement - implicit val astNodeToInputDeclarationElement: CheckedAtoB[GenericAstNode, InputDeclarationElement] = astNodeToAst andThen AstToInputDeclarationElement.astToInputDeclarationElement - implicit val astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement] = astNodeToAst andThen AstToInputsSectionElement.astToInputsSectionElement + implicit val astNodeToInputDeclarationElement: CheckedAtoB[GenericAstNode, InputDeclarationElement] = + astNodeToAst andThen AstToInputDeclarationElement.astToInputDeclarationElement + implicit val astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement] = + astNodeToAst andThen AstToInputsSectionElement.astToInputsSectionElement - implicit val astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent] = astNodeToAst andThen AstToDeclarationContent.astToDeclarationContent - implicit val astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement] = astNodeToAst andThen AstToOutputsSectionElement.astToOutputSectionElement + implicit val astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent] = + astNodeToAst andThen AstToDeclarationContent.astToDeclarationContent + implicit val astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement] = + astNodeToAst andThen AstToOutputsSectionElement.astToOutputSectionElement val astToWorkflowGraphNodeElementConverterMaker = new AstToWorkflowGraphNodeElementConverterMaker() - implicit val astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] = astNodeToAst andThen astToWorkflowGraphNodeElementConverterMaker.converter - implicit val astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement] = astNodeToAst andThen AstToCallElement.astToCallElement - implicit val astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement] = astNodeToAst andThen AstToScatterElement.astToScatterElement - implicit val astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] = astNodeToAst andThen AstToIfElement.astToIfElement + implicit val astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] = + astNodeToAst andThen astToWorkflowGraphNodeElementConverterMaker.converter + implicit val astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement] = + astNodeToAst andThen AstToCallElement.astToCallElement + implicit val astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement] = + astNodeToAst andThen AstToScatterElement.astToScatterElement + implicit val astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] = + astNodeToAst andThen AstToIfElement.astToIfElement astToWorkflowGraphNodeElementConverterMaker.astNodeToScatterElement = Some(astNodeToScatterElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToIfElement = Some(astNodeToIfElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToCallElement = Some(astNodeToCallElement) astToWorkflowGraphNodeElementConverterMaker.astNodeToDeclarationContent = Some(astNodeToDeclarationContent) - implicit val astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] = astNodeToAst andThen AstToWorkflowBodyElement.astToWorkflowBodyElement - implicit val astToWorkflowDefinitionElement: CheckedAtoB[GenericAst, WorkflowDefinitionElement] = AstToWorkflowDefinitionElement.astToWorkflowDefinitionElement + implicit val astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] = + astNodeToAst andThen AstToWorkflowBodyElement.astToWorkflowBodyElement + implicit val astToWorkflowDefinitionElement: CheckedAtoB[GenericAst, WorkflowDefinitionElement] = + AstToWorkflowDefinitionElement.astToWorkflowDefinitionElement - implicit val astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] = astNodeToAstList andThen AstNodeToPlaceholderAttributeSet.attributeKvpConverter - implicit val astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] = AstNodeToCommandPartElement.astNodeToCommandPartElement - implicit val astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement] = astNodeToAst andThen AstToCommandSectionElement.astToCommandSectionElement - implicit val astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement] = astNodeToAst andThen AstToRuntimeAttributesSectionElement.astToRuntimeSectionElement - implicit val astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] = astNodeToAst andThen AstToTaskSectionElement.astToTaskSectionElement - implicit val astToTaskDefinitionElement: CheckedAtoB[GenericAst, TaskDefinitionElement] = AstToTaskDefinitionElement.astToTaskDefinitionElement + implicit val astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] = + astNodeToAstList andThen AstNodeToPlaceholderAttributeSet.attributeKvpConverter + implicit val astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] = + AstNodeToCommandPartElement.astNodeToCommandPartElement + implicit val astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement] = + astNodeToAst andThen AstToCommandSectionElement.astToCommandSectionElement + implicit val astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement] = + astNodeToAst andThen AstToRuntimeAttributesSectionElement.astToRuntimeSectionElement + implicit val astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] = + astNodeToAst andThen AstToTaskSectionElement.astToTaskSectionElement + implicit val astToTaskDefinitionElement: CheckedAtoB[GenericAst, TaskDefinitionElement] = + AstToTaskDefinitionElement.astToTaskDefinitionElement - implicit val astToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] = astNodeToAst andThen AstToFileBodyElement.astToFileBodyElement(astToWorkflowDefinitionElement, astToTaskDefinitionElement, astToStructElement) + implicit val astToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] = + astNodeToAst andThen AstToFileBodyElement.astToFileBodyElement(astToWorkflowDefinitionElement, + astToTaskDefinitionElement, + astToStructElement + ) implicit val astToFileElement: CheckedAtoB[GenericAst, FileElement] = AstToFileElement.astToFileElement implicit val fileToFileElement: CheckedAtoB[File, FileElement] = fileToAst andThen wrapAst andThen astToFileElement diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/consumed/package.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/consumed/package.scala index d63620c4f1f..909937274e4 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/consumed/package.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/consumed/package.scala @@ -13,87 +13,91 @@ import wdl.transforms.base.linking.expression.consumed.UnaryOperatorEvaluators._ package object consumed { - implicit val expressionElementUnlinkedValueConsumer: ExpressionValueConsumer[ExpressionElement] = new ExpressionValueConsumer[ExpressionElement] { - override def expressionConsumedValueHooks(a: ExpressionElement)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = a match { - case _: PrimitiveLiteralExpressionElement | _: StringLiteral => Set.empty[UnlinkedConsumedValueHook] - case a: StringExpression => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ObjectLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: PairLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ArrayLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: MapLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + implicit val expressionElementUnlinkedValueConsumer: ExpressionValueConsumer[ExpressionElement] = + new ExpressionValueConsumer[ExpressionElement] { + override def expressionConsumedValueHooks(a: ExpressionElement)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a match { + case _: PrimitiveLiteralExpressionElement | _: StringLiteral => Set.empty[UnlinkedConsumedValueHook] + case a: StringExpression => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ObjectLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: PairLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ArrayLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: MapLiteral => a.expressionConsumedValueHooks(expressionValueConsumer) - // Member access: - case a: IdentifierLookup => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: IdentifierMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ExpressionMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: IndexAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + // Member access: + case a: IdentifierLookup => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: IdentifierMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ExpressionMemberAccess => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: IndexAccess => a.expressionConsumedValueHooks(expressionValueConsumer) - // Unary operators: - case a: UnaryNegation => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: UnaryPlus => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LogicalNot => a.expressionConsumedValueHooks(expressionValueConsumer) + // Unary operators: + case a: UnaryNegation => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: UnaryPlus => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LogicalNot => a.expressionConsumedValueHooks(expressionValueConsumer) - // Binary operators: - case a: LogicalOr => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LogicalAnd => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Equals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: NotEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LessThan => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: LessThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: GreaterThan => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: GreaterThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Add => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Subtract => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Multiply => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Divide => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Remainder => a.expressionConsumedValueHooks(expressionValueConsumer) + // Binary operators: + case a: LogicalOr => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LogicalAnd => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Equals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: NotEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LessThan => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: LessThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: GreaterThan => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: GreaterThanOrEquals => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Add => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Subtract => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Multiply => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Divide => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Remainder => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: TernaryIf => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: TernaryIf => a.expressionConsumedValueHooks(expressionValueConsumer) - // Engine functions: - case a: StdoutElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: StderrElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + // Engine functions: + case a: StdoutElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: StderrElement.type => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadLines => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadTsv => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadObject => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadObjects => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadJson => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadInt => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadString => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadFloat => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: ReadBoolean => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteLines => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteTsv => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteMap => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteObject => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteObjects => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: WriteJson => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Range => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Transpose => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Length => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Flatten => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Prefix => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: SelectFirst => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: SelectAll => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Defined => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Floor => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Ceil => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Round => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Glob => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadLines => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadTsv => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadObject => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadObjects => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadJson => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadInt => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadString => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadFloat => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: ReadBoolean => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteLines => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteTsv => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteMap => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteObject => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteObjects => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: WriteJson => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Range => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Transpose => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Length => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Flatten => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Prefix => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: SelectFirst => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: SelectAll => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Defined => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Floor => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Ceil => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Round => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Glob => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Size => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Basename => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Size => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Basename => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Zip => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Cross => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Sep => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Zip => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Cross => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Sep => a.expressionConsumedValueHooks(expressionValueConsumer) - case a: Sub => a.expressionConsumedValueHooks(expressionValueConsumer) + case a: Sub => a.expressionConsumedValueHooks(expressionValueConsumer) - // TODO fill in other expression types - case other => throw new Exception(s"Cannot generate consumed values for ExpressionElement ${other.getClass.getSimpleName}") + // TODO fill in other expression types + case other => + throw new Exception(s"Cannot generate consumed values for ExpressionElement ${other.getClass.getSimpleName}") + } } - } } diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/files/package.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/files/package.scala index 027fa6efbd2..89d77fd4759 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/files/package.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/files/package.scala @@ -20,82 +20,134 @@ package object files { implicit val expressionFileEvaluator: FileEvaluator[ExpressionElement] = new FileEvaluator[ExpressionElement] { - override def predictFilesNeededToEvaluate(a: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { - + override def predictFilesNeededToEvaluate(a: ExpressionElement, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = a match { // Literals: - case a: PrimitiveLiteralExpressionElement => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: StringLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ObjectLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: MapLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ArrayLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: PairLiteral => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: PrimitiveLiteralExpressionElement => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: StringLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ObjectLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: MapLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ArrayLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: PairLiteral => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Lookups and member accesses: - case a: IdentifierLookup => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ExpressionMemberAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: IdentifierMemberAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: IndexAccess => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IdentifierLookup => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ExpressionMemberAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IdentifierMemberAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: IndexAccess => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Unary operators: - case a: UnaryNegation => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: UnaryPlus => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LogicalNot => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: UnaryNegation => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: UnaryPlus => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalNot => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Binary operators (at some point we might want to split these into separate cases): - case a: LogicalOr => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LogicalAnd => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalOr => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LogicalAnd => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Equals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: NotEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LessThan => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: LessThanOrEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: GreaterThan => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: GreaterThanOrEquals => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: NotEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LessThan => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: LessThanOrEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: GreaterThan => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: GreaterThanOrEquals => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Add => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Subtract => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Multiply => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Subtract => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Multiply => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Divide => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Remainder => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Remainder => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: TernaryIf => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: TernaryIf => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) // Engine functions: - case StdoutElement => StdoutElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case StderrElement => StderrElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case StdoutElement => + StdoutElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case StderrElement => + StderrElement.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadLines => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadTsv => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadObject => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadObjects => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadJson => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadInt => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadString => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadFloat => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: ReadBoolean => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteLines => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteTsv => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteMap => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteObject => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteObjects => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: WriteJson => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadLines => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadTsv => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadMap => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadObject => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadObjects => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadJson => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadInt => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadString => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadFloat => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: ReadBoolean => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteLines => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteTsv => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteMap => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteObject => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteObjects => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: WriteJson => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Range => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Transpose => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Transpose => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Length => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Flatten => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Flatten => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Prefix => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: SelectFirst => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: SelectAll => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Defined => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: SelectFirst => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: SelectAll => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Defined => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Floor => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Ceil => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Round => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Glob => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Size => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) - case a: Basename => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) + case a: Basename => + a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Zip => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) case a: Cross => a.predictFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)(fileEvaluator, valueEvaluator) @@ -104,6 +156,5 @@ package object files { case other => s"No implementation of FileEvaluator[${other.getClass.getSimpleName}]".invalidNel } - } } } diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/types/package.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/types/package.scala index 7da33f63449..4651a0d4c18 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/types/package.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/types/package.scala @@ -17,9 +17,9 @@ import wom.types.WomType package object types { implicit val expressionTypeEvaluator: TypeEvaluator[ExpressionElement] = new TypeEvaluator[ExpressionElement] { - override def evaluateType(a: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit typeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - + override def evaluateType(a: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit typeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a match { // Literals: case a: PrimitiveLiteralExpressionElement => a.evaluateType(linkedValues)(typeEvaluator) @@ -99,8 +99,8 @@ package object types { case a: StdoutElement.type => a.evaluateType(linkedValues)(typeEvaluator) case a: StderrElement.type => a.evaluateType(linkedValues)(typeEvaluator) - case other => s"Unable to process ${other.getClass.getSimpleName}: No evaluateType exists for that type.".invalidNel + case other => + s"Unable to process ${other.getClass.getSimpleName}: No evaluateType exists for that type.".invalidNel } - } } } diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/values/package.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/values/package.scala index b8da81699e4..65487a16fd8 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/values/package.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/linking/expression/values/package.scala @@ -23,23 +23,27 @@ package object values { override def evaluateValue(a: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = a match { // Literals: - case a: PrimitiveLiteralExpressionElement => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: PrimitiveLiteralExpressionElement => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: StringLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) - case a: StringExpression => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: StringExpression => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: ObjectLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: MapLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: ArrayLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: PairLiteral => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) // Lookups and member accesses: - case a: IdentifierLookup => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) - case a: ExpressionMemberAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) - case a: IdentifierMemberAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: IdentifierLookup => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: ExpressionMemberAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: IdentifierMemberAccess => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: IndexAccess => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) // Unary operators: @@ -53,9 +57,11 @@ package object values { case a: Equals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: NotEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: LessThan => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) - case a: LessThanOrEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: LessThanOrEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: GreaterThan => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) - case a: GreaterThanOrEquals => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: GreaterThanOrEquals => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: Add => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: Subtract => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: Multiply => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) @@ -65,8 +71,10 @@ package object values { case a: TernaryIf => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) // Engine functions: - case a: StdoutElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) - case a: StderrElement.type => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: StdoutElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) + case a: StderrElement.type => + a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: ReadLines => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) case a: ReadTsv => a.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)(valueEvaluator) @@ -107,6 +115,5 @@ package object values { case other => s"Unable to process ${other.toWdlV1}: No evaluateValue exists for that type in WDL 1.0".invalidNel } - } } } diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala index 6f732eda085..450f9f1d0c0 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala @@ -16,7 +16,7 @@ object StringParser { def convert(a: FileStringParserInput): Checked[Ast] = Try { val parser = new WdlParser() val tokens = parser.lex(a.workflowSource, a.resource) - val terminalMap = (tokens.asScala.toVector map {(_, a.workflowSource)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, a.workflowSource) }).toMap val syntaxErrorFormatter = WdlDraft3SyntaxErrorFormatter(terminalMap) parser.parse(tokens, syntaxErrorFormatter).toAst.asInstanceOf[Ast] }.toChecked diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala index acba6e0fb6f..13ebf1f353a 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala @@ -12,52 +12,56 @@ case class WdlDraft3SyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSour case classicTerminal => terminalMap.get(classicTerminal) } - private def line(t: Terminal): String = getTerminal(t).map(_.split("\n")(t.getLine - 1)).getOrElse(s"Cannot highlight line. It was probably in an imported file.") + private def line(t: Terminal): String = getTerminal(t) + .map(_.split("\n")(t.getLine - 1)) + .getOrElse(s"Cannot highlight line. It was probably in an imported file.") - def unexpectedEof(method: String, expected: java.util.List[TerminalIdentifier], nt_rules: java.util.List[String]): String = "ERROR: Unexpected end of file" + def unexpectedEof(method: String, + expected: java.util.List[TerminalIdentifier], + nt_rules: java.util.List[String] + ): String = "ERROR: Unexpected end of file" - def excessTokens(method: String, terminal: Terminal): String = { + def excessTokens(method: String, terminal: Terminal): String = s"""ERROR: Finished parsing without consuming all tokens. | - |${pointToSource(terminal)} + |${pointToSource(terminal)} """.stripMargin - } - def unexpectedSymbol(method: String, actual: Terminal, expected: java.util.List[TerminalIdentifier], rule: String): String = { + def unexpectedSymbol(method: String, + actual: Terminal, + expected: java.util.List[TerminalIdentifier], + rule: String + ): String = { val expectedTokens = expected.asScala.map(_.string).mkString(", ") s"""ERROR: Unexpected symbol (line ${actual.getLine}, col ${actual.getColumn}) when parsing '$method'. | - |Expected $expectedTokens, got "${actual.getSourceString}". + |Expected $expectedTokens, got "${actual.getSourceString}". | - |${pointToSource(actual)} + |${pointToSource(actual)} | - |$rule + |$rule """.stripMargin } - def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = { + def noMoreTokens(method: String, expecting: TerminalIdentifier, last: Terminal): String = s"""ERROR: No more tokens. Expecting ${expecting.string} | - |${pointToSource(last)} + |${pointToSource(last)} """.stripMargin - } - def invalidTerminal(method: String, invalid: Terminal): String = { + def invalidTerminal(method: String, invalid: Terminal): String = s"""ERROR: Invalid symbol ID: ${invalid.getId} (${invalid.getTerminalStr}) | - |${pointToSource(invalid)} + |${pointToSource(invalid)} """.stripMargin - } // TODO: these next two methods won't be called by the parser because there are no lists in the WDL grammar that // cause these to be triggered. Currently the parser is passing in 'null' for the value of 'last' and when that // changes, these errors can be made more helpful. - def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = { + def missingListItems(method: String, required: Int, found: Int, last: Terminal): String = s"ERROR: $method requires $required items, but only found $found" - } - def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = { + def missingTerminator(method: String, terminal: TerminalIdentifier, last: Terminal): String = s"ERROR: $method requires a terminator after each element" - } } diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/wdlom2wom/package.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/wdlom2wom/package.scala index e7fae573d4b..477e31e394e 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/wdlom2wom/package.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/wdlom2wom/package.scala @@ -3,7 +3,12 @@ package wdl.draft3.transforms import common.transforms.CheckedAtoB import wdl.transforms.base.wdlom2wom.TaskDefinitionElementToWomTaskDefinition.TaskDefinitionElementToWomInputs import wdl.transforms.base.wdlom2wom.WorkflowDefinitionElementToWomWorkflowDefinition.WorkflowDefinitionConvertInputs -import wdl.transforms.base.wdlom2wom.{FileElementToWomBundle, FileElementToWomBundleInputs, TaskDefinitionElementToWomTaskDefinition, WorkflowDefinitionElementToWomWorkflowDefinition} +import wdl.transforms.base.wdlom2wom.{ + FileElementToWomBundle, + FileElementToWomBundleInputs, + TaskDefinitionElementToWomTaskDefinition, + WorkflowDefinitionElementToWomWorkflowDefinition +} import wom.callable.{CallableTaskDefinition, WorkflowDefinition} import wom.executable.WomBundle import wdl.draft3.transforms.linking.expression.consumed._ @@ -12,7 +17,11 @@ import wdl.draft3.transforms.linking.expression.types._ import wdl.draft3.transforms.linking.expression.values._ package object wdlom2wom { - val taskDefinitionElementToWomTaskDefinition: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] = CheckedAtoB.fromErrorOr(TaskDefinitionElementToWomTaskDefinition.convert) - val workflowDefinitionElementToWomWorkflowDefinition: CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition] = CheckedAtoB.fromErrorOr(WorkflowDefinitionElementToWomWorkflowDefinition.convert) - val fileElementToWomBundle: CheckedAtoB[FileElementToWomBundleInputs, WomBundle] = CheckedAtoB.fromCheck(FileElementToWomBundle.convert) + val taskDefinitionElementToWomTaskDefinition: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] = + CheckedAtoB.fromErrorOr(TaskDefinitionElementToWomTaskDefinition.convert) + val workflowDefinitionElementToWomWorkflowDefinition + : CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition] = + CheckedAtoB.fromErrorOr(WorkflowDefinitionElementToWomWorkflowDefinition.convert) + val fileElementToWomBundle: CheckedAtoB[FileElementToWomBundleInputs, WomBundle] = + CheckedAtoB.fromCheck(FileElementToWomBundle.convert) } diff --git a/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala b/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala index 99731de6aad..0e9cda8c2cf 100644 --- a/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala @@ -2,27 +2,57 @@ import cats.data.Validated._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers._ import wdl.model.draft3.elements.ExpressionElement._ -import wdl.model.draft3.elements.{InputDeclarationElement, InputsSectionElement, IntermediateValueDeclarationElement, OutputDeclarationElement, OutputsSectionElement, PrimitiveTypeElement} +import wdl.model.draft3.elements.{ + InputDeclarationElement, + InputsSectionElement, + IntermediateValueDeclarationElement, + OutputDeclarationElement, + OutputsSectionElement, + PrimitiveTypeElement +} import wom.types.{WomSingleFileType, WomStringType} import wdl.transforms.base.ast2wdlom.AstToWorkflowDefinitionElement._ - -class AstToWorkflowDefinitionElementSpec extends AnyFlatSpec{ +class AstToWorkflowDefinitionElementSpec extends AnyFlatSpec { behavior of "Check Stdouts and Stderrs" - val mockInputSectionStdout = Option(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "i", Some(StdoutElement))))) - val mockInputSectionStderr = Option(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "i", Some(StderrElement))))) - val mockInputSectionNonStd = Option(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more")))))) - - - val mockIntermediatesStdout = Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "y", StdoutElement)) - val mockIntermediatesStderr = Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "y", StderrElement)) - val mockIntermediatesNonStd = Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "here", StringLiteral("here"))) + val mockInputSectionStdout = Option( + InputsSectionElement( + Vector(InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "i", Some(StdoutElement))) + ) + ) + val mockInputSectionStderr = Option( + InputsSectionElement( + Vector(InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "i", Some(StderrElement))) + ) + ) + val mockInputSectionNonStd = Option( + InputsSectionElement( + Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more")))) + ) + ) - val mockOutputSectionStdout = Option(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "s", StdoutElement)))) - val mockOutputSectionStderr = Option(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "s", StderrElement)))) - val mockOutputSectionNonStd = Option(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", StringLiteral("more"))))) + val mockIntermediatesStdout = Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "y", StdoutElement) + ) + val mockIntermediatesStderr = Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "y", StderrElement) + ) + val mockIntermediatesNonStd = Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "here", StringLiteral("here")) + ) + val mockOutputSectionStdout = Option( + OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "s", StdoutElement))) + ) + val mockOutputSectionStderr = Option( + OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "s", StderrElement))) + ) + val mockOutputSectionNonStd = Option( + OutputsSectionElement( + Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", StringLiteral("more"))) + ) + ) it should "return an error when there is an stdout in input section" in { val testInputs = checkDisallowedInputElement(mockInputSectionStdout, StdoutElement, "stdout") @@ -75,14 +105,20 @@ class AstToWorkflowDefinitionElementSpec extends AnyFlatSpec{ val testIntermediates = checkDisallowedIntermediates(mockIntermediatesStdout, StdoutElement, "stdout") testIntermediates match { case Valid(_) => fail("Intermediate section contained stdout. Should have failed.") - case Invalid(e) => e.head should be("Workflow cannot have stdout expression at intermediate declaration section at workflow-level.") + case Invalid(e) => + e.head should be( + "Workflow cannot have stdout expression at intermediate declaration section at workflow-level." + ) } } it should "return an error when there is an stderr at intermediate declaration section" in { val testIntermediates = checkDisallowedIntermediates(mockIntermediatesStderr, StderrElement, "stderr") testIntermediates match { case Valid(_) => fail("Intermediate section contained stderr. Should have failed.") - case Invalid(e) => e.head should be("Workflow cannot have stderr expression at intermediate declaration section at workflow-level.") + case Invalid(e) => + e.head should be( + "Workflow cannot have stderr expression at intermediate declaration section at workflow-level." + ) } } @@ -90,8 +126,9 @@ class AstToWorkflowDefinitionElementSpec extends AnyFlatSpec{ val testIntermediates = checkDisallowedIntermediates(mockIntermediatesNonStd, StdoutElement, "non-stdout/stderr") testIntermediates match { case Valid(_) => // No action - case Invalid(_) => fail("Check shouldn't have returned error as intermediate section had non-stdout/stderr intermediates.") + case Invalid(_) => + fail("Check shouldn't have returned error as intermediate section had non-stdout/stderr intermediates.") } } -} \ No newline at end of file +} diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala index 035d4e6efa6..7765cac73bc 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala @@ -23,13 +23,13 @@ class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val parser = new WdlParser() def fromString[A](expression: String, - parseFunction: (util.List[WdlParser.Terminal], SyntaxErrorFormatter) => ParseTree) - (implicit converter: CheckedAtoB[GenericAstNode, A]): Checked[A] = { + parseFunction: (util.List[WdlParser.Terminal], SyntaxErrorFormatter) => ParseTree + )(implicit converter: CheckedAtoB[GenericAstNode, A]): Checked[A] = { // Add the "version 1.0" to force the lexer into "main" mode. val versionedExpression = "version 1.0\n" + expression // That "version 1.0" means we'll have 2 unwanted tokens at the start of the list, so drop 'em: val tokens = parser.lex(versionedExpression, "string").asScala.drop(2).asJava - val terminalMap = (tokens.asScala.toVector map {(_, expression)}).toMap + val terminalMap = (tokens.asScala.toVector map { (_, expression) }).toMap val parseTree = parseFunction(tokens, WdlDraft3SyntaxErrorFormatter(terminalMap)) (wrapAstNode andThen converter).run(parseTree.toAst) } @@ -55,6 +55,6 @@ class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { it should "parse the (biscayne) None keyword as a plain old identifier" in { val str = "None" val expr = fromString[ExpressionElement](str, parser.parse_e) - expr shouldBeValid(IdentifierLookup("None")) + expr shouldBeValid (IdentifierLookup("None")) } } diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/ExpressionSet.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/ExpressionSet.scala index dd2a1cd13ab..ee3c178565b 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/ExpressionSet.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/ExpressionSet.scala @@ -10,7 +10,9 @@ object ExpressionSet { val floatLiteral = PrimitiveLiteralExpressionElement(WomFloat(5.5)) val booleanLiteral = PrimitiveLiteralExpressionElement(WomBoolean(true)) - val stringPlaceholderExpression = StringExpression(Vector(StringLiteral("h"), StringPlaceholder(IdentifierLookup("s")), StringLiteral("o"))) + val stringPlaceholderExpression = StringExpression( + Vector(StringLiteral("h"), StringPlaceholder(IdentifierLookup("s")), StringLiteral("o")) + ) val addExpression = Add(intLiteral, intLiteral) val subtractExpression = Subtract(intLiteral, intLiteral) @@ -65,16 +67,20 @@ object ExpressionSet { val chainPairAccess = ExpressionMemberAccess(pairExpression, NonEmptyList("a", List("b", "c", "d", "e", "f", "g"))) val arrayOfIs = ArrayLiteral(Vector(intLiteral, Multiply(intLiteral, intLiteral), UnaryNegation(intLiteral))) - val objectLiteralExpression = ObjectLiteral(Map( - "a" -> intLiteral, - "b" -> stringLiteral, - "c" -> pairExpression - )) - val mapLiteralExpression = MapLiteral(Map( - StringLiteral("a") -> intLiteral, - StringLiteral("b") -> addExpression, - StringLiteral("c") -> subtractExpression - )) + val objectLiteralExpression = ObjectLiteral( + Map( + "a" -> intLiteral, + "b" -> stringLiteral, + "c" -> pairExpression + ) + ) + val mapLiteralExpression = MapLiteral( + Map( + StringLiteral("a") -> intLiteral, + StringLiteral("b") -> addExpression, + StringLiteral("c") -> subtractExpression + ) + ) val ternaryIfExpression = TernaryIf(booleanLiteral, intLiteral, intLiteral) val zippedExpression = Zip(IdentifierLookup("is"), IdentifierLookup("is")) diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/WdlFileToWdlomSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/WdlFileToWdlomSpec.scala index 3485bdfa8a3..450e1bebd5d 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/WdlFileToWdlomSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/WdlFileToWdlomSpec.scala @@ -14,7 +14,6 @@ import wom.values.{WomBoolean, WomFloat, WomInteger} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WDL File to WDLOM" @@ -27,12 +26,11 @@ class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } testCases.list.filter(x => x.isRegularFile && x.extension.contains(".wdl")) foreach { testCase => - val fileName = testCase.name val testName = testCase.name.split("\\.").head val itShouldString = s"create the correct Element structure for $fileName" - val testOrIgnore: (=>Any) => Unit = if (fileName.endsWith(".ignored.wdl")) { + val testOrIgnore: (=> Any) => Unit = if (fileName.endsWith(".ignored.wdl")) { (it should itShouldString).ignore _ } else { (it should itShouldString).in _ @@ -44,7 +42,8 @@ class WdlFileToWdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match fileToFileElement.run(testCase) match { case Right(actual) => actual shouldBe expected case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) fail(s"Failed to create WDLOM:$formattedErrors") } @@ -59,134 +58,210 @@ object WdlFileToWdlomSpec { FileElement( imports = List.empty, structs = Vector.empty, - workflows = List(WorkflowDefinitionElement( - name = "empty", - inputsSection = None, - graphElements = Set.empty, - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), - tasks = List.empty), + workflows = List( + WorkflowDefinitionElement( + name = "empty", + inputsSection = None, + graphElements = Set.empty, + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = List.empty + ), "struct_definition" -> FileElement( imports = Vector(), - structs = Vector(StructElement( - name = "FooStruct", - entries = Vector( - StructEntryElement( - "simple", - PrimitiveTypeElement(WomIntegerType)), - StructEntryElement( - "complex", - PairTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), MapTypeElement(PrimitiveTypeElement(WomStringType), PrimitiveTypeElement(WomBooleanType))) + structs = Vector( + StructElement( + name = "FooStruct", + entries = Vector( + StructEntryElement("simple", PrimitiveTypeElement(WomIntegerType)), + StructEntryElement( + "complex", + PairTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + MapTypeElement(PrimitiveTypeElement(WomStringType), PrimitiveTypeElement(WomBooleanType)) + ) + ) ) ) - )), - workflows = Vector(WorkflowDefinitionElement( - name = "struct_definition", - inputsSection = None, - graphElements = Set(), - outputsSection = Some(OutputsSectionElement(Vector(OutputDeclarationElement( - typeElement = TypeAliasElement("FooStruct"), - name = "myFoo", - expression = ObjectLiteral(Map( - "simple" -> PrimitiveLiteralExpressionElement(WomInteger(5)), - "complex" -> PairLiteral(ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(5)))), MapLiteral(Map(StringLiteral("t") -> PrimitiveLiteralExpressionElement(WomBoolean(true))))) - )) - )))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(8)))), + ), + workflows = Vector( + WorkflowDefinitionElement( + name = "struct_definition", + inputsSection = None, + graphElements = Set(), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement( + typeElement = TypeAliasElement("FooStruct"), + name = "myFoo", + expression = ObjectLiteral( + Map( + "simple" -> PrimitiveLiteralExpressionElement(WomInteger(5)), + "complex" -> PairLiteral( + ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(5)))), + MapLiteral(Map(StringLiteral("t") -> PrimitiveLiteralExpressionElement(WomBoolean(true)))) + ) + ) + ) + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(8)) + ) + ), tasks = Vector.empty ), "input_types" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "input_types", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", None), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "s", None), - InputDeclarationElement(PrimitiveTypeElement(WomFloatType), "float", None), - InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b", None), - InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "file", None), - InputDeclarationElement(ObjectTypeElement, "o", None), - InputDeclarationElement(OptionalTypeElement(PrimitiveTypeElement(WomIntegerType)), "maybe_i", None), - InputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)), "array_s", None), - InputDeclarationElement(MapTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomStringType)), "map_is", None), - InputDeclarationElement( - ArrayTypeElement( - OptionalTypeElement( - PairTypeElement(PrimitiveTypeElement(WomStringType), PrimitiveTypeElement(WomIntegerType)))), - "lotsa_nesting_array", None) - ))), - graphElements = Set.empty, - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), - tasks = Vector.empty), + workflows = Vector( + WorkflowDefinitionElement( + name = "input_types", + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", None), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "s", None), + InputDeclarationElement(PrimitiveTypeElement(WomFloatType), "float", None), + InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b", None), + InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "file", None), + InputDeclarationElement(ObjectTypeElement, "o", None), + InputDeclarationElement(OptionalTypeElement(PrimitiveTypeElement(WomIntegerType)), "maybe_i", None), + InputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)), "array_s", None), + InputDeclarationElement(MapTypeElement(PrimitiveTypeElement(WomIntegerType), + PrimitiveTypeElement(WomStringType) + ), + "map_is", + None + ), + InputDeclarationElement( + ArrayTypeElement( + OptionalTypeElement( + PairTypeElement(PrimitiveTypeElement(WomStringType), PrimitiveTypeElement(WomIntegerType)) + ) + ), + "lotsa_nesting_array", + None + ) + ) + ) + ), + graphElements = Set.empty, + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = Vector.empty + ), "input_values" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "input_values", - inputsSection = Some(InputsSectionElement( - inputDeclarations = Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", Some(intLiteral)), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "s", Some(stringLiteral)), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "placeholder", Some(stringPlaceholderExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "placeholder2", Some(stringPlaceholderExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomFloatType), "f", Some(floatLiteral)), - InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b", Some(booleanLiteral)) - ) - )), - graphElements = Set.empty, - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), - tasks = List.empty), + workflows = Vector( + WorkflowDefinitionElement( + name = "input_values", + inputsSection = Some( + InputsSectionElement( + inputDeclarations = Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", Some(intLiteral)), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "s", Some(stringLiteral)), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), + "placeholder", + Some(stringPlaceholderExpression) + ), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), + "placeholder2", + Some(stringPlaceholderExpression) + ), + InputDeclarationElement(PrimitiveTypeElement(WomFloatType), "f", Some(floatLiteral)), + InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b", Some(booleanLiteral)) + ) + ) + ), + graphElements = Set.empty, + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = List.empty + ), "input_expressions" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "input_expressions", - inputsSection = Some(InputsSectionElement( - inputDeclarations = Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "ten", Some(addExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "zero", Some(subtractExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "twentyfive", Some(multiplyExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "one", Some(divideExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "zeroagain", Some(remainderExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "tenagain", Some(tenVariableLookup)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "pair_expression_member_access", Some(pairExpressionMemberAccess)), - InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), "unary_expressions", Some(unaryExpressions)), - InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), "comparisons", Some(comparisonExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "variableLookupMemberAccesses", Some(chainIdentifierAccess)), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "expressionMemberAccesses", Some(chainPairAccess)), - InputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "is", Some(arrayOfIs)), - InputDeclarationElement(ObjectTypeElement, "object_literal", Some(objectLiteralExpression)), - InputDeclarationElement( - MapTypeElement(PrimitiveTypeElement(WomStringType), PrimitiveTypeElement(WomIntegerType)), - "map_literal", - Some(mapLiteralExpression) - ), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "ternaryIf", Some(ternaryIfExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "string_read", Some(ReadString(StdoutElement))), - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "zipped", Some(zippedExpression)), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "subbed", Some(subbedExpression)) - ) - )), - graphElements = Set.empty, - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(4)))), - tasks = Vector.empty), + workflows = Vector( + WorkflowDefinitionElement( + name = "input_expressions", + inputsSection = Some( + InputsSectionElement( + inputDeclarations = Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "ten", Some(addExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "zero", Some(subtractExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "twentyfive", Some(multiplyExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "one", Some(divideExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "zeroagain", Some(remainderExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "tenagain", Some(tenVariableLookup)), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "pair_expression_member_access", + Some(pairExpressionMemberAccess) + ), + InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), + "unary_expressions", + Some(unaryExpressions) + ), + InputDeclarationElement(PrimitiveTypeElement(WomBooleanType), + "comparisons", + Some(comparisonExpression) + ), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "variableLookupMemberAccesses", + Some(chainIdentifierAccess) + ), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "expressionMemberAccesses", + Some(chainPairAccess) + ), + InputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "is", + Some(arrayOfIs) + ), + InputDeclarationElement(ObjectTypeElement, "object_literal", Some(objectLiteralExpression)), + InputDeclarationElement( + MapTypeElement(PrimitiveTypeElement(WomStringType), PrimitiveTypeElement(WomIntegerType)), + "map_literal", + Some(mapLiteralExpression) + ), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "ternaryIf", Some(ternaryIfExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), + "string_read", + Some(ReadString(StdoutElement)) + ), + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "zipped", Some(zippedExpression)), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "subbed", Some(subbedExpression)) + ) + ) + ), + graphElements = Set.empty, + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(4)) + ) + ), + tasks = Vector.empty + ), "passthrough_workflow" -> FileElement( imports = Vector(), @@ -194,35 +269,77 @@ object WdlFileToWdlomSpec { workflows = Vector( WorkflowDefinitionElement( name = "foo", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "x", None) - ))), - graphElements = Set(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "y", IdentifierLookup("x"))), - outputsSection = Some(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "z", IdentifierLookup("y"))))), + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "x", None) + ) + ) + ), + graphElements = Set( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "y", IdentifierLookup("x")) + ), + outputsSection = Some( + OutputsSectionElement( + Vector(OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "z", IdentifierLookup("y"))) + ) + ), metaSection = None, parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), - tasks = Vector()), + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = Vector() + ), "scatter_var_member_access" -> FileElement( imports = Vector.empty, structs = Vector.empty, workflows = Vector( - WorkflowDefinitionElement("scatter_var_member_access", None, - Set(IntermediateValueDeclarationElement( - ArrayTypeElement(PairTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomIntegerType))), - "pairs", - ArrayLiteral(Vector( - PairLiteral(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2))), - PairLiteral(PrimitiveLiteralExpressionElement(WomInteger(3)), PrimitiveLiteralExpressionElement(WomInteger(4))), - PairLiteral(PrimitiveLiteralExpressionElement(WomInteger(5)), PrimitiveLiteralExpressionElement(WomInteger(6)))))), - ScatterElement("ScatterAt5_12", IdentifierLookup("pairs"), "p", - Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "x", IdentifierMemberAccess("p", "left", Vector()))), None)), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), - tasks = Vector.empty), + WorkflowDefinitionElement( + "scatter_var_member_access", + None, + Set( + IntermediateValueDeclarationElement( + ArrayTypeElement( + PairTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomIntegerType)) + ), + "pairs", + ArrayLiteral( + Vector( + PairLiteral(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)) + ), + PairLiteral(PrimitiveLiteralExpressionElement(WomInteger(3)), + PrimitiveLiteralExpressionElement(WomInteger(4)) + ), + PairLiteral(PrimitiveLiteralExpressionElement(WomInteger(5)), + PrimitiveLiteralExpressionElement(WomInteger(6)) + ) + ) + ) + ), + ScatterElement( + "ScatterAt5_12", + IdentifierLookup("pairs"), + "p", + Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "x", + IdentifierMemberAccess("p", "left", Vector()) + ) + ), + None + ) + ), + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = Vector.empty + ), "nested_conditionals" -> FileElement( imports = Vector.empty, @@ -232,12 +349,34 @@ object WdlFileToWdlomSpec { "Test", None, Set( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "a", PrimitiveLiteralExpressionElement(WomInteger(5))), - IfElement(PrimitiveLiteralExpressionElement(WomBoolean(true)), Vector( - IfElement(PrimitiveLiteralExpressionElement(WomBoolean(true)), Vector( - IfElement(PrimitiveLiteralExpressionElement(WomBoolean(true)), Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "b", PrimitiveLiteralExpressionElement(WomInteger(5))))))))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "c", SelectFirst(ArrayLiteral(Vector(IdentifierLookup("a"), IdentifierLookup("b"))))) + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "a", + PrimitiveLiteralExpressionElement(WomInteger(5)) + ), + IfElement( + PrimitiveLiteralExpressionElement(WomBoolean(true)), + Vector( + IfElement( + PrimitiveLiteralExpressionElement(WomBoolean(true)), + Vector( + IfElement( + PrimitiveLiteralExpressionElement(WomBoolean(true)), + Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "b", + PrimitiveLiteralExpressionElement(WomInteger(5)) + ) + ) + ) + ) + ) + ) + ), + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomIntegerType), + "c", + SelectFirst(ArrayLiteral(Vector(IdentifierLookup("a"), IdentifierLookup("b")))) + ) ), None, None, @@ -260,81 +399,165 @@ object WdlFileToWdlomSpec { IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "a", intLiteral), IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "x", IdentifierLookup("a")) ), - outputsSection = Some(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "z", IdentifierLookup("y"))))), + outputsSection = Some( + OutputsSectionElement( + Vector(OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "z", IdentifierLookup("y"))) + ) + ), metaSection = None, parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(5)))), - tasks = Vector()), + sourceLocation = Some(SourceFileLocation(5)) + ) + ), + tasks = Vector() + ), "simple_first_test" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "order", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "n", Some(PrimitiveLiteralExpressionElement(WomInteger(4)))), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more")))))), - graphElements = Set(CallElement("in_n_out", None, Vector.empty, Some(CallBodyElement(Vector(KvPair("total", IdentifierLookup("n")), KvPair("amount", IdentifierLookup("more"))))), Some(SourceFileLocation(19)))), - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "out", IdentifierMemberAccess("in_n_out", "out", List.empty))))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(14)))), - tasks = Vector(TaskDefinitionElement( - name = "in_n_out", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "total", None), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "amount", None)))), - declarations = Vector.empty, - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "out", Add(ReadInt(StdoutElement), PrimitiveLiteralExpressionElement(WomInteger(1))))))), - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("total"), PlaceholderAttributeSet.empty), - StringCommandPartElement(" ") - )))), - runtimeSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)) - ))), + workflows = Vector( + WorkflowDefinitionElement( + name = "order", + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "n", + Some(PrimitiveLiteralExpressionElement(WomInteger(4))) + ), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more"))) + ) + ) + ), + graphElements = Set( + CallElement( + "in_n_out", + None, + Vector.empty, + Some( + CallBodyElement( + Vector(KvPair("total", IdentifierLookup("n")), KvPair("amount", IdentifierLookup("more"))) + ) + ), + Some(SourceFileLocation(19)) + ) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "out", + IdentifierMemberAccess("in_n_out", "out", List.empty) + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(14)) + ) + ), + tasks = Vector( + TaskDefinitionElement( + name = "in_n_out", + inputsSection = Some( + InputsSectionElement( + Vector(InputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "total", None), + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "amount", None) + ) + ) + ), + declarations = Vector.empty, + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "out", + Add(ReadInt(StdoutElement), PrimitiveLiteralExpressionElement(WomInteger(1))) + ) + ) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("total"), PlaceholderAttributeSet.empty), + StringCommandPartElement(" ") + ) + ) + ) + ), + runtimeSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ) + ), "static_value_workflow" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "foo", - inputsSection = None, - graphElements = Set.empty, - outputsSection = Some(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "y", PrimitiveLiteralExpressionElement(WomInteger(3)))))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), - tasks = Vector.empty), + workflows = Vector( + WorkflowDefinitionElement( + name = "foo", + inputsSection = None, + graphElements = Set.empty, + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "y", + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), + tasks = Vector.empty + ), "standalone_task" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement("standalone_task", None, Set.empty, - None, None, None, - Some(SourceFileLocation(3)))), + workflows = Vector( + WorkflowDefinitionElement("standalone_task", None, Set.empty, None, None, None, Some(SourceFileLocation(3))) + ), tasks = Vector( TaskDefinitionElement( name = "standalone", - inputsSection = Some(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType), "bar", None)))), + inputsSection = Some( + InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType), "bar", None))) + ), declarations = Vector.empty, - outputsSection = Some(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", IdentifierLookup("bar"))))), - commandSection = CommandSectionElement(Vector( - CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("bar"), PlaceholderAttributeSet.empty) - )) - )), - runtimeSection = Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("someFakeDockerRuntime"))))), + outputsSection = Some( + OutputsSectionElement( + Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", IdentifierLookup("bar"))) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("bar"), PlaceholderAttributeSet.empty) + ) + ) + ) + ), + runtimeSection = + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("someFakeDockerRuntime"))))), metaSection = None, parameterMetaSection = None, sourceLocation = Some(SourceFileLocation(5)) - )) + ) + ) ), "task_with_metas" -> FileElement( @@ -347,32 +570,50 @@ object WdlFileToWdlomSpec { inputsSection = Some(InputsSectionElement(Vector.empty)), declarations = Vector.empty, outputsSection = Some(OutputsSectionElement(Vector.empty)), - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("echo Hello World ") - )))), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo Hello World ") + ) + ) + ) + ), runtimeSection = None, - metaSection = Some(MetaSectionElement( - Map("author" -> MetaValueElementString("John Doe"), - "email" -> MetaValueElementString("john.doe@yahoo.com")) - )), - parameterMetaSection = Some(ParameterMetaSectionElement( - Map("a" -> MetaValueElementString("just an integer"), - "b" -> MetaValueElementString("an important parameter"), - "x" -> MetaValueElementArray(Vector(MetaValueElementString("A"), - MetaValueElementString("B"), - MetaValueElementString("C"))), - "y" -> MetaValueElementArray(Vector(MetaValueElementInteger(1), - MetaValueElementInteger(2), - MetaValueElementInteger(3))), - "yf" -> MetaValueElementArray(Vector(MetaValueElementFloat(1.1), - MetaValueElementFloat(2.9), - MetaValueElementFloat(3.14))), - "z" -> MetaValueElementObject(Map("k1" -> MetaValueElementInteger(1), - "k2" -> MetaValueElementInteger(2), - "k3" -> MetaValueElementInteger(3))) - ))), + metaSection = Some( + MetaSectionElement( + Map("author" -> MetaValueElementString("John Doe"), + "email" -> MetaValueElementString("john.doe@yahoo.com") + ) + ) + ), + parameterMetaSection = Some( + ParameterMetaSectionElement( + Map( + "a" -> MetaValueElementString("just an integer"), + "b" -> MetaValueElementString("an important parameter"), + "x" -> MetaValueElementArray( + Vector(MetaValueElementString("A"), MetaValueElementString("B"), MetaValueElementString("C")) + ), + "y" -> MetaValueElementArray( + Vector(MetaValueElementInteger(1), MetaValueElementInteger(2), MetaValueElementInteger(3)) + ), + "yf" -> MetaValueElementArray( + Vector(MetaValueElementFloat(1.1), MetaValueElementFloat(2.9), MetaValueElementFloat(3.14)) + ), + "z" -> MetaValueElementObject( + Map("k1" -> MetaValueElementInteger(1), + "k2" -> MetaValueElementInteger(2), + "k3" -> MetaValueElementInteger(3) + ) + ) + ) + ) + ), sourceLocation = Some(SourceFileLocation(3)) - ))), + ) + ) + ), "task_with_metas2" -> FileElement( imports = Vector.empty, @@ -386,48 +627,58 @@ object WdlFileToWdlomSpec { outputsSection = Some(OutputsSectionElement(Vector.empty)), commandSection = CommandSectionElement(List.empty), runtimeSection = None, - metaSection = Some(MetaSectionElement( - Map("author" -> MetaValueElementString("John Doe"), + metaSection = Some( + MetaSectionElement( + Map( + "author" -> MetaValueElementString("John Doe"), "email" -> MetaValueElementString("john.doe@yahoo.com"), "b" -> MetaValueElementBoolean(true), "zipcode" -> MetaValueElementInteger(94043), "f" -> MetaValueElementFloat(1.3), - "numbers" -> MetaValueElementArray(Vector(MetaValueElementInteger(1), - MetaValueElementInteger(2), - MetaValueElementInteger(3))), + "numbers" -> MetaValueElementArray( + Vector(MetaValueElementInteger(1), MetaValueElementInteger(2), MetaValueElementInteger(3)) + ), "extras" -> MetaValueElementObject( - Map( "house" -> MetaValueElementString("With porch"), - "cat" -> MetaValueElementString("Lucy"))) - ))), + Map("house" -> MetaValueElementString("With porch"), "cat" -> MetaValueElementString("Lucy")) + ) + ) + ) + ), parameterMetaSection = None, sourceLocation = Some(SourceFileLocation(3)) - ))), + ) + ) + ), "no_input_no_output_workflow" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "no_input_no_output", - inputsSection = None, - graphElements = Set( - CallElement("no_inputs", Some("noi1"), Vector.empty, None, Some(SourceFileLocation(4))), - CallElement("no_inputs", None, Vector.empty, None, Some(SourceFileLocation(6))), - CallElement("no_inputs", Some("noi2"), Vector.empty, None, Some(SourceFileLocation(8))), - CallElement("no_inputs", Some("noi3"), Vector.empty, None, Some(SourceFileLocation(10))), - CallElement("no_inputs", Some("noi4"), Vector.empty, None, Some(SourceFileLocation(14))), - CallElement("no_inputs", Some("noi5"), Vector.empty, None, Some(SourceFileLocation(18))) - ), - outputsSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)))), + workflows = Vector( + WorkflowDefinitionElement( + name = "no_input_no_output", + inputsSection = None, + graphElements = Set( + CallElement("no_inputs", Some("noi1"), Vector.empty, None, Some(SourceFileLocation(4))), + CallElement("no_inputs", None, Vector.empty, None, Some(SourceFileLocation(6))), + CallElement("no_inputs", Some("noi2"), Vector.empty, None, Some(SourceFileLocation(8))), + CallElement("no_inputs", Some("noi3"), Vector.empty, None, Some(SourceFileLocation(10))), + CallElement("no_inputs", Some("noi4"), Vector.empty, None, Some(SourceFileLocation(14))), + CallElement("no_inputs", Some("noi5"), Vector.empty, None, Some(SourceFileLocation(18))) + ), + outputsSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), tasks = Vector( TaskDefinitionElement( name = "no_inputs", inputsSection = None, declarations = Vector.empty, outputsSection = None, - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), + commandSection = + CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), runtimeSection = None, metaSection = None, parameterMetaSection = None, @@ -441,297 +692,464 @@ object WdlFileToWdlomSpec { structs = Vector( StructElement( name = "A", - entries = Vector( - StructEntryElement("i", PrimitiveTypeElement(WomIntegerType)), - StructEntryElement("f", PrimitiveTypeElement(WomFloatType))) + entries = Vector(StructEntryElement("i", PrimitiveTypeElement(WomIntegerType)), + StructEntryElement("f", PrimitiveTypeElement(WomFloatType)) + ) ), StructElement( name = "B", entries = Vector( StructEntryElement("a", TypeAliasElement("A")), StructEntryElement("i", PrimitiveTypeElement(WomIntegerType)), - StructEntryElement("f", PrimitiveTypeElement(WomFloatType))) - )), - workflows = Vector(WorkflowDefinitionElement( - name = "nested_struct", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement( - TypeAliasElement("B"), - "b", - ObjectLiteral(Map( - "a" -> ObjectLiteral(Map( - "i" -> PrimitiveLiteralExpressionElement(WomInteger(5)), - "f" -> PrimitiveLiteralExpressionElement(WomFloat(5.5)))), - "i" -> PrimitiveLiteralExpressionElement(WomInteger(6)), - "f" -> PrimitiveLiteralExpressionElement(WomFloat(6.6)))))), - outputsSection = Some(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomFloatType), "f", IdentifierMemberAccess("b", "a", Vector("f")))))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(14))) + StructEntryElement("f", PrimitiveTypeElement(WomFloatType)) + ) + ) + ), + workflows = Vector( + WorkflowDefinitionElement( + name = "nested_struct", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement( + TypeAliasElement("B"), + "b", + ObjectLiteral( + Map( + "a" -> ObjectLiteral( + Map("i" -> PrimitiveLiteralExpressionElement(WomInteger(5)), + "f" -> PrimitiveLiteralExpressionElement(WomFloat(5.5)) + ) + ), + "i" -> PrimitiveLiteralExpressionElement(WomInteger(6)), + "f" -> PrimitiveLiteralExpressionElement(WomFloat(6.6)) + ) + ) + ) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomFloatType), + "f", + IdentifierMemberAccess("b", "a", Vector("f")) + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(14)) + ) ), tasks = Vector() ), "simple_scatter" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "simple_scatter", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "indices", ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2)), PrimitiveLiteralExpressionElement(WomInteger(3))))), - ScatterElement( - scatterName = "ScatterAt6_11", - scatterExpression = IdentifierLookup("indices"), - scatterVariableName = "i", - graphElements = Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "j", Add(IdentifierLookup("i"), PrimitiveLiteralExpressionElement(WomInteger(10)))) - ), - sourceLocation = None - ) - ), - outputsSection = Some( - OutputsSectionElement(Vector(OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "js", IdentifierLookup("j")))) - ), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3))) + workflows = Vector( + WorkflowDefinitionElement( + name = "simple_scatter", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement( + ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "indices", + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)), + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) + ) + ), + ScatterElement( + scatterName = "ScatterAt6_11", + scatterExpression = IdentifierLookup("indices"), + scatterVariableName = "i", + graphElements = Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "j", + Add(IdentifierLookup("i"), + PrimitiveLiteralExpressionElement(WomInteger(10)) + ) + ) + ), + sourceLocation = None + ) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "js", + IdentifierLookup("j") + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) ), tasks = Vector.empty ), "ogin_scatter" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "ogin_scatter", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "indices", ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2)), PrimitiveLiteralExpressionElement(WomInteger(3))))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "ogin_me", PrimitiveLiteralExpressionElement(WomInteger(10))), - ScatterElement( - scatterName = "ScatterAt8_11", - scatterExpression = IdentifierLookup("indices"), - scatterVariableName = "i", - graphElements = Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "j", Add(IdentifierLookup("i"), IdentifierLookup("ogin_me"))) - ), - sourceLocation = None - ) - ), - outputsSection = Some( - OutputsSectionElement(Vector(OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "js", IdentifierLookup("j")))) - ), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3))) + workflows = Vector( + WorkflowDefinitionElement( + name = "ogin_scatter", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement( + ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "indices", + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)), + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "ogin_me", + PrimitiveLiteralExpressionElement(WomInteger(10)) + ), + ScatterElement( + scatterName = "ScatterAt8_11", + scatterExpression = IdentifierLookup("indices"), + scatterVariableName = "i", + graphElements = Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "j", + Add(IdentifierLookup("i"), IdentifierLookup("ogin_me")) + ) + ), + sourceLocation = None + ) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "js", + IdentifierLookup("j") + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) ), tasks = Vector.empty ), "nested_scatter" -> FileElement( Vector(), Vector(), - Vector(WorkflowDefinitionElement( - "nested_scatter", - None, - Set( - IntermediateValueDeclarationElement( - ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), - "indices", - ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2)), PrimitiveLiteralExpressionElement(WomInteger(3)))) - ), - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomIntegerType), - "y", - PrimitiveLiteralExpressionElement(WomInteger(55)) - ), - ScatterElement( - scatterName = "ScatterAt8_11", - IdentifierLookup("indices"), - "a", - Vector( - ScatterElement( - scatterName = "ScatterAt9_13", - IdentifierLookup("indices"), "b", - Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "x", Add(IdentifierLookup("a"), IdentifierLookup("b"))), - ScatterElement( - scatterName = "ScatterAt11_15", - IdentifierLookup("indices"), - "c", - Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "j", Add(Add(Add(IdentifierLookup("a"), IdentifierLookup("b")), IdentifierLookup("c")), IdentifierLookup("x")))), - None), - ScatterElement( - scatterName = "ScatterAt14_15", - IdentifierLookup("j"), - "d", - Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "k", Add(IdentifierLookup("d"), IdentifierLookup("y")))), - None - ) - ), - sourceLocation = None + Vector( + WorkflowDefinitionElement( + "nested_scatter", + None, + Set( + IntermediateValueDeclarationElement( + ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "indices", + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)), + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) ) ), - None - ) - ), - Some( - OutputsSectionElement(Vector(OutputDeclarationElement(ArrayTypeElement(ArrayTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)))), "ks", IdentifierLookup("k")))) - ), - None, - None, - Some(SourceFileLocation(3))) + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomIntegerType), + "y", + PrimitiveLiteralExpressionElement(WomInteger(55)) + ), + ScatterElement( + scatterName = "ScatterAt8_11", + IdentifierLookup("indices"), + "a", + Vector( + ScatterElement( + scatterName = "ScatterAt9_13", + IdentifierLookup("indices"), + "b", + Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "x", + Add(IdentifierLookup("a"), IdentifierLookup("b")) + ), + ScatterElement( + scatterName = "ScatterAt11_15", + IdentifierLookup("indices"), + "c", + Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "j", + Add(Add(Add(IdentifierLookup("a"), IdentifierLookup("b")), + IdentifierLookup("c") + ), + IdentifierLookup("x") + ) + ) + ), + None + ), + ScatterElement( + scatterName = "ScatterAt14_15", + IdentifierLookup("j"), + "d", + Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "k", + Add(IdentifierLookup("d"), IdentifierLookup("y")) + ) + ), + None + ) + ), + sourceLocation = None + ) + ), + None + ) + ), + Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement( + ArrayTypeElement(ArrayTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)))), + "ks", + IdentifierLookup("k") + ) + ) + ) + ), + None, + None, + Some(SourceFileLocation(3)) + ) ), Vector() ), "two_level_scatter" -> FileElement( - Vector(), - Vector(), - Vector(WorkflowDefinitionElement( - "two_level_scatter", - None, - Set( - IntermediateValueDeclarationElement( - ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), - "indices", - ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2)), PrimitiveLiteralExpressionElement(WomInteger(3)))) - ), - ScatterElement( - scatterName = "ScatterAt8_11", - IdentifierLookup("indices"), - "a", - Vector( - ScatterElement( - scatterName = "ScatterAt9_13", - IdentifierLookup("indices"), "b", - Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "x", Add(IdentifierLookup("a"), IdentifierLookup("b")))), - sourceLocation = None + Vector(), + Vector(), + Vector( + WorkflowDefinitionElement( + "two_level_scatter", + None, + Set( + IntermediateValueDeclarationElement( + ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "indices", + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)), + PrimitiveLiteralExpressionElement(WomInteger(3)) + ) ) ), - None - ) - ), - None, - None, - None, - Some(SourceFileLocation(3))) + ScatterElement( + scatterName = "ScatterAt8_11", + IdentifierLookup("indices"), + "a", + Vector( + ScatterElement( + scatterName = "ScatterAt9_13", + IdentifierLookup("indices"), + "b", + Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "x", + Add(IdentifierLookup("a"), IdentifierLookup("b")) + ) + ), + sourceLocation = None + ) + ), + None + ) + ), + None, + None, + None, + Some(SourceFileLocation(3)) + ) ), Vector() ), "simple_conditional" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "simple_conditional", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), "bool", PrimitiveLiteralExpressionElement(WomBoolean(true))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", PrimitiveLiteralExpressionElement(WomInteger(5))), - IfElement( - conditionExpression = IdentifierLookup("bool"), - graphElements = Vector( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), "j", Add(IdentifierLookup("i"), PrimitiveLiteralExpressionElement(WomInteger(10)))) + workflows = Vector( + WorkflowDefinitionElement( + name = "simple_conditional", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), + "bool", + PrimitiveLiteralExpressionElement(WomBoolean(true)) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "i", + PrimitiveLiteralExpressionElement(WomInteger(5)) + ), + IfElement( + conditionExpression = IdentifierLookup("bool"), + graphElements = Vector( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "j", + Add(IdentifierLookup("i"), + PrimitiveLiteralExpressionElement(WomInteger(10)) + ) + ) + ) ) - ) - ), - outputsSection = Some( - OutputsSectionElement(Vector(OutputDeclarationElement(OptionalTypeElement(PrimitiveTypeElement(WomIntegerType)), "j_maybe", IdentifierLookup("j")))) - ), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(5))) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(OptionalTypeElement(PrimitiveTypeElement(WomIntegerType)), + "j_maybe", + IdentifierLookup("j") + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(5)) + ) ), tasks = Vector.empty ), "lots_of_nesting" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "lots_of_nesting", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b0", PrimitiveLiteralExpressionElement(WomBoolean(true))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b1", PrimitiveLiteralExpressionElement(WomBoolean(true))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), "b2", PrimitiveLiteralExpressionElement(WomBoolean(true))), - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "i0s", Range(PrimitiveLiteralExpressionElement(WomInteger(2)))), - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "i1s", Range(PrimitiveLiteralExpressionElement(WomInteger(2)))), - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "i2s", Range(PrimitiveLiteralExpressionElement(WomInteger(2)))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "s0", StringLiteral("hello")), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "s1", StringLiteral("world")), - IfElement( - conditionExpression = IdentifierLookup("b0"), - graphElements = Vector( - ScatterElement( - scatterName = "ScatterAt17_13", - scatterExpression = IdentifierLookup("i0s"), - scatterVariableName = "i0", - graphElements = Vector( - IfElement( - conditionExpression = IdentifierLookup("b1"), - graphElements = Vector( - ScatterElement( - scatterName = "ScatterAt19_17", - scatterExpression = IdentifierLookup("i1s"), - scatterVariableName = "i1", - graphElements = Vector( - IfElement( - conditionExpression = IdentifierLookup("b2"), - graphElements = Vector( - ScatterElement( - "ScatterAt21_21", - IdentifierLookup("i2s"), - "i2", - Vector( - IntermediateValueDeclarationElement( - PrimitiveTypeElement(WomStringType), - "s", - Add(IdentifierLookup("s0"), IdentifierLookup("s1")) - ) - ), - None + workflows = Vector( + WorkflowDefinitionElement( + name = "lots_of_nesting", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), + "b0", + PrimitiveLiteralExpressionElement(WomBoolean(true)) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), + "b1", + PrimitiveLiteralExpressionElement(WomBoolean(true)) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomBooleanType), + "b2", + PrimitiveLiteralExpressionElement(WomBoolean(true)) + ), + IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "i0s", + Range(PrimitiveLiteralExpressionElement(WomInteger(2))) + ), + IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "i1s", + Range(PrimitiveLiteralExpressionElement(WomInteger(2))) + ), + IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "i2s", + Range(PrimitiveLiteralExpressionElement(WomInteger(2))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "s0", StringLiteral("hello")), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "s1", StringLiteral("world")), + IfElement( + conditionExpression = IdentifierLookup("b0"), + graphElements = Vector( + ScatterElement( + scatterName = "ScatterAt17_13", + scatterExpression = IdentifierLookup("i0s"), + scatterVariableName = "i0", + graphElements = Vector( + IfElement( + conditionExpression = IdentifierLookup("b1"), + graphElements = Vector( + ScatterElement( + scatterName = "ScatterAt19_17", + scatterExpression = IdentifierLookup("i1s"), + scatterVariableName = "i1", + graphElements = Vector( + IfElement( + conditionExpression = IdentifierLookup("b2"), + graphElements = Vector( + ScatterElement( + "ScatterAt21_21", + IdentifierLookup("i2s"), + "i2", + Vector( + IntermediateValueDeclarationElement( + PrimitiveTypeElement(WomStringType), + "s", + Add(IdentifierLookup("s0"), IdentifierLookup("s1")) + ) + ), + None + ) ) ) - ) - ), - sourceLocation = None + ), + sourceLocation = None + ) ) ) - ) - ), - sourceLocation = None + ), + sourceLocation = None + ) ) ) - ) - ), - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement( - typeElement = OptionalTypeElement(ArrayTypeElement(OptionalTypeElement(ArrayTypeElement(OptionalTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType))))))), - name = "s_out", - expression = IdentifierLookup("s"))))), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3))) + ), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement( + typeElement = OptionalTypeElement( + ArrayTypeElement( + OptionalTypeElement( + ArrayTypeElement(OptionalTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)))) + ) + ) + ), + name = "s_out", + expression = IdentifierLookup("s") + ) + ) + ) + ), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) ), tasks = Vector.empty ), "simple_task" -> FileElement( - imports = Vector.empty, - structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement("simple_task", None, Set.empty, - None, None, None, - Some(SourceFileLocation(3)))), - tasks = Vector( - TaskDefinitionElement( - name = "simple", - inputsSection = None, - declarations = Vector.empty, - outputsSection = None, - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), - runtimeSection = None, - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(5)))) + imports = Vector.empty, + structs = Vector.empty, + workflows = Vector( + WorkflowDefinitionElement("simple_task", None, Set.empty, None, None, None, Some(SourceFileLocation(3))) + ), + tasks = Vector( + TaskDefinitionElement( + name = "simple", + inputsSection = None, + declarations = Vector.empty, + outputsSection = None, + commandSection = + CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement("echo Hello World "))))), + runtimeSection = None, + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(5)) + ) + ) ), "default_input_overrides" -> null, "nio_file" -> FileElement( @@ -741,80 +1159,199 @@ object WdlFileToWdlomSpec { tasks = Vector( TaskDefinitionElement( name = "nio_file", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "f", None), - InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "g", Some(IdentifierLookup("f"))), - InputDeclarationElement(OptionalTypeElement(PrimitiveTypeElement(WomSingleFileType)), "h", None) - ))), + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "f", None), + InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "g", Some(IdentifierLookup("f"))), + InputDeclarationElement(OptionalTypeElement(PrimitiveTypeElement(WomSingleFileType)), "h", None) + ) + ) + ), declarations = Vector.empty, - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "i", PrimitiveLiteralExpressionElement(WomInteger(5))) - ))), - commandSection = CommandSectionElement(Vector(CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("f"), PlaceholderAttributeSet(None,None,None,None)), - StringCommandPartElement(" | cut -c 1-5") - )))), - runtimeSection = Some(RuntimeAttributesSectionElement(Vector(KvPair("docker",StringLiteral("ubuntu:latest"))))), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "i", + PrimitiveLiteralExpressionElement(WomInteger(5)) + ) + ) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("f"), PlaceholderAttributeSet(None, None, None, None)), + StringCommandPartElement(" | cut -c 1-5") + ) + ) + ) + ), + runtimeSection = + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), metaSection = None, - parameterMetaSection = Some(ParameterMetaSectionElement(Map( - "f" -> MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))), - "g" -> MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))), - "h" -> MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))) - ))), + parameterMetaSection = Some( + ParameterMetaSectionElement( + Map( + "f" -> MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))), + "g" -> MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))), + "h" -> MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))) + ) + ) + ), sourceLocation = Some(SourceFileLocation(3)) ) ) - ), + ), "taskless_engine_functions" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - name = "taskless_engine_functions", - inputsSection = None, - graphElements = Set( - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "ints", ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2))))), - IntermediateValueDeclarationElement(NonEmptyTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType))), "definitelyInts", ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(2))))), - IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)), "strings", ArrayLiteral(Vector(StringLiteral("a"), StringLiteral("b")))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "filepath", StringLiteral("gs://not/a/real/file.txt")), - IntermediateValueDeclarationElement(ArrayTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType))), "matrix", - ArrayLiteral(Vector( - ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(0)))), - ArrayLiteral(Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), PrimitiveLiteralExpressionElement(WomInteger(0)))) - )) + workflows = Vector( + WorkflowDefinitionElement( + name = "taskless_engine_functions", + inputsSection = None, + graphElements = Set( + IntermediateValueDeclarationElement( + ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "ints", + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)) + ) + ) + ), + IntermediateValueDeclarationElement( + NonEmptyTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType))), + "definitelyInts", + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(2)) + ) + ) + ), + IntermediateValueDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)), + "strings", + ArrayLiteral(Vector(StringLiteral("a"), StringLiteral("b"))) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), + "filepath", + StringLiteral("gs://not/a/real/file.txt") + ), + IntermediateValueDeclarationElement( + ArrayTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType))), + "matrix", + ArrayLiteral( + Vector( + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(0)) + ) + ), + ArrayLiteral( + Vector(PrimitiveLiteralExpressionElement(WomInteger(1)), + PrimitiveLiteralExpressionElement(WomInteger(0)) + ) + ) + ) + ) + ), + IntermediateValueDeclarationElement( + ArrayTypeElement( + MapTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomStringType)) + ), + "list_of_maps", + ArrayLiteral( + Vector( + MapLiteral( + Map(PrimitiveLiteralExpressionElement(WomInteger(1)) -> StringLiteral("one"), + PrimitiveLiteralExpressionElement(WomInteger(2)) -> StringLiteral("two") + ) + ), + MapLiteral( + Map( + PrimitiveLiteralExpressionElement(WomInteger(11)) -> StringLiteral("eleven"), + PrimitiveLiteralExpressionElement(WomInteger(22)) -> StringLiteral("twenty-two") + ) + ) + ) + ) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomFloatType), + "f", + PrimitiveLiteralExpressionElement(WomFloat(1.024)) + ) ), - IntermediateValueDeclarationElement(ArrayTypeElement(MapTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomStringType))), "list_of_maps", - ArrayLiteral(Vector( - MapLiteral(Map(PrimitiveLiteralExpressionElement(WomInteger(1)) -> StringLiteral("one"), PrimitiveLiteralExpressionElement(WomInteger(2)) -> StringLiteral("two"))), - MapLiteral(Map(PrimitiveLiteralExpressionElement(WomInteger(11)) -> StringLiteral("eleven"), PrimitiveLiteralExpressionElement(WomInteger(22)) -> StringLiteral("twenty-two"))) - )) + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement( + ArrayTypeElement( + PairTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomStringType)) + ), + "int_cross_string", + Cross(IdentifierLookup("ints"), IdentifierLookup("strings")) + ), + OutputDeclarationElement(ArrayTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType))), + "transposed_matrix", + Transpose(IdentifierLookup("matrix")) + ), + OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "flattened_matrix", + Flatten(IdentifierLookup("matrix")) + ), + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "matrix_length", + Length(IdentifierLookup("matrix")) + ), + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "flattened_matrix_length", + Length(IdentifierLookup("flattened_matrix")) + ), + OutputDeclarationElement( + ArrayTypeElement( + PairTypeElement(PrimitiveTypeElement(WomIntegerType), PrimitiveTypeElement(WomStringType)) + ), + "flattened_map", + Flatten(IdentifierLookup("list_of_maps")) + ), + OutputDeclarationElement(PrimitiveTypeElement(WomStringType), + "file_basename", + Basename(IdentifierLookup("filepath"), None) + ), + OutputDeclarationElement(PrimitiveTypeElement(WomStringType), + "file_basename_extensionless", + Basename(IdentifierLookup("filepath"), Some(StringLiteral(".txt"))) + ), + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "f_floor", Floor(IdentifierLookup("f"))), + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), + "f_ceiling", + Ceil(IdentifierLookup("f")) + ), + OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "f_round", Round(IdentifierLookup("f"))), + OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), + "m1", + IndexAccess(IdentifierLookup("matrix"), + PrimitiveLiteralExpressionElement(WomInteger(1)) + ) + ), + OutputDeclarationElement( + PrimitiveTypeElement(WomIntegerType), + "m2", + IndexAccess(IndexAccess(IdentifierLookup("matrix"), PrimitiveLiteralExpressionElement(WomInteger(1))), + PrimitiveLiteralExpressionElement(WomInteger(1)) + ) + ) + ) + ) ), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomFloatType), "f", PrimitiveLiteralExpressionElement(WomFloat(1.024))) - ), - outputsSection = Some( - OutputsSectionElement(Vector( - OutputDeclarationElement(ArrayTypeElement(PairTypeElement(PrimitiveTypeElement(WomIntegerType),PrimitiveTypeElement(WomStringType))), "int_cross_string", Cross(IdentifierLookup("ints"),IdentifierLookup("strings"))), - OutputDeclarationElement(ArrayTypeElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType))), "transposed_matrix", Transpose(IdentifierLookup("matrix"))), - OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "flattened_matrix", Flatten(IdentifierLookup("matrix"))), - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "matrix_length", Length(IdentifierLookup("matrix"))), - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "flattened_matrix_length", Length(IdentifierLookup("flattened_matrix"))), - OutputDeclarationElement(ArrayTypeElement(PairTypeElement(PrimitiveTypeElement(WomIntegerType),PrimitiveTypeElement(WomStringType))), "flattened_map", Flatten(IdentifierLookup("list_of_maps"))), - OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "file_basename", Basename(IdentifierLookup("filepath"),None)), - OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "file_basename_extensionless", Basename(IdentifierLookup("filepath"),Some(StringLiteral(".txt")))), - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "f_floor",Floor(IdentifierLookup("f"))), - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "f_ceiling",Ceil(IdentifierLookup("f"))), - OutputDeclarationElement(PrimitiveTypeElement(WomIntegerType), "f_round",Round(IdentifierLookup("f"))), - OutputDeclarationElement( - ArrayTypeElement(PrimitiveTypeElement(WomIntegerType)), "m1", IndexAccess(IdentifierLookup("matrix"),PrimitiveLiteralExpressionElement(WomInteger(1)))), - OutputDeclarationElement( - PrimitiveTypeElement(WomIntegerType),"m2", IndexAccess( - IndexAccess(IdentifierLookup("matrix"),PrimitiveLiteralExpressionElement(WomInteger(1))),PrimitiveLiteralExpressionElement(WomInteger(1)))) - )) - ), - metaSection = None, - parameterMetaSection = None, - sourceLocation = Some(SourceFileLocation(3)) - )), + metaSection = None, + parameterMetaSection = None, + sourceLocation = Some(SourceFileLocation(3)) + ) + ), tasks = Vector.empty ), "command_syntaxes" -> FileElement( @@ -824,61 +1361,112 @@ object WdlFileToWdlomSpec { tasks = Vector( TaskDefinitionElement( name = "a", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "world1", Some(StringExpression(Vector(StringLiteral("wo"), StringPlaceholder(IdentifierLookup("rld")))))), - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "world2", Some(StringExpression(Vector(StringLiteral("wo"), StringPlaceholder(IdentifierLookup("rld")))))) - ))), + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement( + PrimitiveTypeElement(WomStringType), + "world1", + Some(StringExpression(Vector(StringLiteral("wo"), StringPlaceholder(IdentifierLookup("rld"))))) + ), + InputDeclarationElement( + PrimitiveTypeElement(WomStringType), + "world2", + Some(StringExpression(Vector(StringLiteral("wo"), StringPlaceholder(IdentifierLookup("rld"))))) + ) + ) + ) + ), declarations = Vector( IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "rld", StringLiteral("rld")) ), - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", ReadString(StdoutElement)) - ))), - commandSection = CommandSectionElement(Vector( - CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(StringLiteral("hello"), PlaceholderAttributeSet.empty), - StringCommandPartElement(" "), - PlaceholderCommandPartElement(IdentifierLookup("world1"), PlaceholderAttributeSet.empty) - )), - CommandSectionLine(Vector( - StringCommandPartElement("echo goodbye "), - PlaceholderCommandPartElement(IdentifierLookup("world2"), PlaceholderAttributeSet.empty) - )), - CommandSectionLine(Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("world1"), - PlaceholderAttributeSet(Some("foo"), Some("--yes"), Some("--no"), Some(", "))) - )) - )), - runtimeSection = Some(RuntimeAttributesSectionElement(Vector( - KvPair("docker", StringLiteral("ubuntu:latest")) - ))), + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", ReadString(StdoutElement)) + ) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(StringLiteral("hello"), PlaceholderAttributeSet.empty), + StringCommandPartElement(" "), + PlaceholderCommandPartElement(IdentifierLookup("world1"), PlaceholderAttributeSet.empty) + ) + ), + CommandSectionLine( + Vector( + StringCommandPartElement("echo goodbye "), + PlaceholderCommandPartElement(IdentifierLookup("world2"), PlaceholderAttributeSet.empty) + ) + ), + CommandSectionLine( + Vector( + StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("world1"), + PlaceholderAttributeSet(Some("foo"), + Some("--yes"), + Some("--no"), + Some(", ") + ) + ) + ) + ) + ) + ), + runtimeSection = Some( + RuntimeAttributesSectionElement( + Vector( + KvPair("docker", StringLiteral("ubuntu:latest")) + ) + ) + ), metaSection = None, parameterMetaSection = None, sourceLocation = Some(SourceFileLocation(3)) ), TaskDefinitionElement( name = "b", - inputsSection = Some(InputsSectionElement(Vector( - InputDeclarationElement(PrimitiveTypeElement(WomStringType), "world", Some(StringLiteral("world"))) - ))), + inputsSection = Some( + InputsSectionElement( + Vector( + InputDeclarationElement(PrimitiveTypeElement(WomStringType), "world", Some(StringLiteral("world"))) + ) + ) + ), declarations = Vector.empty, - outputsSection = Some(OutputsSectionElement(Vector( - OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", ReadString(StdoutElement)) - ))), - commandSection = CommandSectionElement(Vector( - CommandSectionLine(Vector( - StringCommandPartElement("echo hello ${world}") - )), - CommandSectionLine(Vector( - StringCommandPartElement("echo goodbye "), - PlaceholderCommandPartElement(IdentifierLookup("world"), PlaceholderAttributeSet.empty) + outputsSection = Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", ReadString(StdoutElement)) + ) + ) + ), + commandSection = CommandSectionElement( + Vector( + CommandSectionLine( + Vector( + StringCommandPartElement("echo hello ${world}") + ) + ), + CommandSectionLine( + Vector( + StringCommandPartElement("echo goodbye "), + PlaceholderCommandPartElement(IdentifierLookup("world"), PlaceholderAttributeSet.empty) + ) + ) + ) + ), + runtimeSection = Some( + RuntimeAttributesSectionElement( + Vector( + KvPair("docker", StringLiteral("ubuntu:latest")) + ) ) - ))), - runtimeSection = Some(RuntimeAttributesSectionElement(Vector( - KvPair("docker", StringLiteral("ubuntu:latest")) - ))), + ), metaSection = None, parameterMetaSection = None, sourceLocation = Some(SourceFileLocation(22)) @@ -888,177 +1476,272 @@ object WdlFileToWdlomSpec { "gap_in_command" -> FileElement( imports = Vector.empty, structs = Vector.empty, - workflows = Vector(WorkflowDefinitionElement( - "my_workflow", - None, - Set(CallElement("my_task", None, Vector.empty, None, Some(SourceFileLocation(4)))), - None, - None, - None, - Some(SourceFileLocation(3)) - )), - tasks = Vector(TaskDefinitionElement( - "my_task", - None, - Vector(), - Some(OutputsSectionElement( - Vector(OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)),"lines",ReadLines(StdoutElement))) - )), - CommandSectionElement(Vector(CommandSectionLine(Vector(StringCommandPartElement(""" echo "hi""""))), CommandSectionLine(Vector()), CommandSectionLine(Vector(StringCommandPartElement(""" echo "bye""""))))), - None, - None, - None, - Some(SourceFileLocation(7)))) + workflows = Vector( + WorkflowDefinitionElement( + "my_workflow", + None, + Set(CallElement("my_task", None, Vector.empty, None, Some(SourceFileLocation(4)))), + None, + None, + None, + Some(SourceFileLocation(3)) + ) + ), + tasks = Vector( + TaskDefinitionElement( + "my_task", + None, + Vector(), + Some( + OutputsSectionElement( + Vector( + OutputDeclarationElement(ArrayTypeElement(PrimitiveTypeElement(WomStringType)), + "lines", + ReadLines(StdoutElement) + ) + ) + ) + ), + CommandSectionElement( + Vector( + CommandSectionLine(Vector(StringCommandPartElement(""" echo "hi""""))), + CommandSectionLine(Vector()), + CommandSectionLine(Vector(StringCommandPartElement(""" echo "bye""""))) + ) + ), + None, + None, + None, + Some(SourceFileLocation(7)) + ) + ) ), "same_named_inputs_priority" -> FileElement( Vector(), Vector(), - Vector(WorkflowDefinitionElement( - "same_named_inputs_priority", - None, - Set( - CallElement("echo", Some("b"), Vector.empty, Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("2")))))), Some(SourceFileLocation(10))), - CallElement("echo", Some("a"), Vector.empty, Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("1")))))), Some(SourceFileLocation(6))), - IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "out", StringLiteral("hello")), - CallElement("echo", Some("d"), Vector.empty, Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("4")))))), Some(SourceFileLocation(18))), - CallElement("echo", Some("c"), Vector.empty, Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("3")))))), Some(SourceFileLocation(14))) - ), - None, - None, - None, - Some(SourceFileLocation(4)) - ) - ), - Vector(TaskDefinitionElement( - "echo", - Some(InputsSectionElement( - Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType),"out",None)))), - Vector(), - Some(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType),"result",IdentifierLookup("out"))))), - CommandSectionElement( - Vector(CommandSectionLine( - Vector( - StringCommandPartElement("echo "), - PlaceholderCommandPartElement(IdentifierLookup("out"), - PlaceholderAttributeSet(None,None,None,None))))) + Vector( + WorkflowDefinitionElement( + "same_named_inputs_priority", + None, + Set( + CallElement( + "echo", + Some("b"), + Vector.empty, + Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("2")))))), + Some(SourceFileLocation(10)) + ), + CallElement( + "echo", + Some("a"), + Vector.empty, + Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("1")))))), + Some(SourceFileLocation(6)) + ), + IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "out", StringLiteral("hello")), + CallElement( + "echo", + Some("d"), + Vector.empty, + Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("4")))))), + Some(SourceFileLocation(18)) + ), + CallElement( + "echo", + Some("c"), + Vector.empty, + Some(CallBodyElement(Vector(KvPair("out", Add(IdentifierLookup("out"), StringLiteral("3")))))), + Some(SourceFileLocation(14)) + ) + ), + None, + None, + None, + Some(SourceFileLocation(4)) + ) ), - Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), None, None, Some(SourceFileLocation(23)))) + Vector( + TaskDefinitionElement( + "echo", + Some(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType), "out", None)))), + Vector(), + Some( + OutputsSectionElement( + Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "result", IdentifierLookup("out"))) + ) + ), + CommandSectionElement( + Vector( + CommandSectionLine( + Vector(StringCommandPartElement("echo "), + PlaceholderCommandPartElement(IdentifierLookup("out"), + PlaceholderAttributeSet(None, None, None, None) + ) + ) + ) + ) + ), + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), + None, + None, + Some(SourceFileLocation(23)) + ) + ) ), "cmd_whitespace_spaces" -> FileElement( Vector.empty, Vector.empty, - Vector(WorkflowDefinitionElement( - "Test", - None, - Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(10)))), - None, - None, - None, - Some(SourceFileLocation(8))) + Vector( + WorkflowDefinitionElement( + "Test", + None, + Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(10)))), + None, + None, + None, + Some(SourceFileLocation(8)) + ) ), - Vector(TaskDefinitionElement( - "Echo", - None, - Vector(), - None, - CommandSectionElement(List( - CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with spaces\""))))), - Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), None, None, - Some(SourceFileLocation(14))) + Vector( + TaskDefinitionElement( + "Echo", + None, + Vector(), + None, + CommandSectionElement( + List(CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with spaces\"")))) + ), + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), + None, + None, + Some(SourceFileLocation(14)) + ) ) ), "cmd_whitespace_none" -> FileElement( Vector.empty, Vector.empty, - Vector(WorkflowDefinitionElement( - "Test", - None, - Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), - None, - None, - None, - Some(SourceFileLocation(3))) + Vector( + WorkflowDefinitionElement( + "Test", + None, + Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), + None, + None, + None, + Some(SourceFileLocation(3)) + ) ), - Vector(TaskDefinitionElement( - "Echo", - None, - Vector(), - None, - CommandSectionElement(List( - CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with nothing\""))))), - Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), None, None, - Some(SourceFileLocation(9))) + Vector( + TaskDefinitionElement( + "Echo", + None, + Vector(), + None, + CommandSectionElement( + List(CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with nothing\"")))) + ), + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), + None, + None, + Some(SourceFileLocation(9)) + ) ) ), "cmd_whitespace_tabs" -> FileElement( Vector.empty, Vector.empty, - Vector(WorkflowDefinitionElement( - "Test", - None, - Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), - None, - None, - None, - Some(SourceFileLocation(3))) + Vector( + WorkflowDefinitionElement( + "Test", + None, + Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), + None, + None, + None, + Some(SourceFileLocation(3)) + ) ), - Vector(TaskDefinitionElement( - "Echo", - None, - Vector(), - None, - CommandSectionElement(List( - CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with tabs\""))))), - Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), None, None, - Some(SourceFileLocation(9))) + Vector( + TaskDefinitionElement( + "Echo", + None, + Vector(), + None, + CommandSectionElement( + List(CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with tabs\"")))) + ), + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), + None, + None, + Some(SourceFileLocation(9)) + ) ) ), "cmd_strip_common_tabs" -> FileElement( Vector.empty, Vector.empty, - Vector(WorkflowDefinitionElement( - "Test", - None, - Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), - None, - None, - None, - Some(SourceFileLocation(3))) + Vector( + WorkflowDefinitionElement( + "Test", + None, + Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), + None, + None, + None, + Some(SourceFileLocation(3)) + ) ), - Vector(TaskDefinitionElement( - "Echo", - None, - Vector(), - None, - CommandSectionElement(List( - CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with tabs\""))), - CommandSectionLine(Vector(StringCommandPartElement("\t\techo \"I am prefixed with even more tabs\""))))), - Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), None, None, - Some(SourceFileLocation(9))) + Vector( + TaskDefinitionElement( + "Echo", + None, + Vector(), + None, + CommandSectionElement( + List( + CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with tabs\""))), + CommandSectionLine(Vector(StringCommandPartElement("\t\techo \"I am prefixed with even more tabs\""))) + ) + ), + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), + None, + None, + Some(SourceFileLocation(9)) + ) ) ), "cmd_strip_common_spaces" -> FileElement( Vector.empty, Vector.empty, - Vector(WorkflowDefinitionElement( - "Test", - None, - Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), - None, - None, - None, - Some(SourceFileLocation(3))) + Vector( + WorkflowDefinitionElement( + "Test", + None, + Set(CallElement("Echo", Some("echo"), Vector.empty, None, Some(SourceFileLocation(5)))), + None, + None, + None, + Some(SourceFileLocation(3)) + ) ), - Vector(TaskDefinitionElement( - "Echo", - None, - Vector(), - None, - CommandSectionElement(List( - CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with spaces\""))), - CommandSectionLine(Vector(StringCommandPartElement(" echo \"I am prefixed with even more spaces\""))))), - Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), None, None, - Some(SourceFileLocation(9))) + Vector( + TaskDefinitionElement( + "Echo", + None, + Vector(), + None, + CommandSectionElement( + List( + CommandSectionLine(Vector(StringCommandPartElement("echo \"I am prefixed with spaces\""))), + CommandSectionLine(Vector(StringCommandPartElement(" echo \"I am prefixed with even more spaces\""))) + ) + ), + Some(RuntimeAttributesSectionElement(Vector(KvPair("docker", StringLiteral("ubuntu:latest"))))), + None, + None, + Some(SourceFileLocation(9)) + ) ) ), "string_escaping" -> FileElement( @@ -1066,7 +1749,9 @@ object WdlFileToWdlomSpec { Vector(), Vector( WorkflowDefinitionElement( - "escapes", None, Set( + "escapes", + None, + Set( IntermediateValueDeclarationElement( PrimitiveTypeElement(WomStringType), "backslash", @@ -1168,9 +1853,12 @@ object WdlFileToWdlomSpec { StringLiteral(" \' ") ) ), - None, None, None, + None, + None, + None, Some(SourceFileLocation(3)) - )), + ) + ), Vector.empty ) ) diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessTypeEvaluatorSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessTypeEvaluatorSpec.scala index 82826e39139..06bbbc57725 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessTypeEvaluatorSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessTypeEvaluatorSpec.scala @@ -13,7 +13,6 @@ import wdl.model.draft3.graph.expression.TypeEvaluator.ops._ import wom.types._ import wom.values.WomInteger - class MemberAccessTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "member access type evaluator" @@ -47,13 +46,17 @@ class MemberAccessTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec ), memberAccessTail = NonEmptyList("left", List("right", "right", "left")) ) - nestedPairLookup.evaluateType(Map.empty) shouldBeValid WomPairType(WomIntegerType, WomIntegerType) + nestedPairLookup.evaluateType(Map.empty) shouldBeValid WomPairType(WomIntegerType, WomIntegerType) } it should "evaluate a nested member access on a call output" in { val callOutputLookup: ExpressionElement = IdentifierMemberAccess("t", "out", List("left", "right")) val linkedValues = Map[UnlinkedConsumedValueHook, GeneratedValueHandle]( - UnlinkedCallOutputOrIdentifierAndMemberAccessHook("t", "out") -> GeneratedCallOutputValueHandle("t", "out", WomPairType(WomPairType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), WomStringType)) + UnlinkedCallOutputOrIdentifierAndMemberAccessHook("t", "out") -> GeneratedCallOutputValueHandle( + "t", + "out", + WomPairType(WomPairType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), WomStringType) + ) ) callOutputLookup.evaluateType(linkedValues) shouldBeValid WomPairType(WomIntegerType, WomIntegerType) @@ -64,7 +67,13 @@ class MemberAccessTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec val linkedValues = Map[UnlinkedConsumedValueHook, GeneratedValueHandle]( UnlinkedCallOutputOrIdentifierAndMemberAccessHook("t", "out") -> GeneratedIdentifierValueHandle( linkableName = "t", - womType = WomCompositeType(Map("out" -> WomPairType(WomPairType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), WomIntegerType))) + womType = WomCompositeType( + Map( + "out" -> WomPairType(WomPairType(WomIntegerType, WomPairType(WomIntegerType, WomIntegerType)), + WomIntegerType + ) + ) + ) ) ) @@ -86,7 +95,10 @@ class MemberAccessTypeEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec it should "evaluate an identifier lookup" in { val identifierLookup: ExpressionElement = IdentifierLookup("foo") val linkedValues = Map[UnlinkedConsumedValueHook, GeneratedValueHandle]( - UnlinkedIdentifierHook("foo") -> GeneratedIdentifierValueHandle(linkableName = "foo", womType = WomPairType(WomStringType, WomStringType)) + UnlinkedIdentifierHook("foo") -> GeneratedIdentifierValueHandle(linkableName = "foo", + womType = + WomPairType(WomStringType, WomStringType) + ) ) identifierLookup.evaluateType(linkedValues) shouldBeValid WomPairType(WomStringType, WomStringType) diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessValueEvaluatorSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessValueEvaluatorSpec.scala index 272a85d38c6..3f90af6c35d 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessValueEvaluatorSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/MemberAccessValueEvaluatorSpec.scala @@ -13,7 +13,6 @@ import wdl.model.draft3.graph.expression.ValueEvaluator.ops._ import wom.expression.NoIoFunctionSet import wom.values.{WomInteger, WomObject, WomPair, WomString} - class MemberAccessValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val fiveLiteral = PrimitiveLiteralExpressionElement(WomInteger(5)) @@ -48,7 +47,10 @@ class MemberAccessValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpe ), memberAccessTail = NonEmptyList("left", List("right", "right", "left")) ) - nestedPairLookup.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomPair(womFive, womSix), Seq.empty) + nestedPairLookup.evaluateValue(Map.empty, NoIoFunctionSet, None) shouldBeValid EvaluatedValue( + WomPair(womFive, womSix), + Seq.empty + ) } it should "evaluate a nested member access on a call output" in { @@ -57,7 +59,9 @@ class MemberAccessValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpe "t.out" -> WomPair(WomPair(womFive, WomPair(womFive, womSix)), womFive) ) - callOutputLookup.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomPair(womFive, womSix), Seq.empty) + callOutputLookup.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomPair(womFive, womSix), + Seq.empty + ) } it should "evaluate a nested member access on an object" in { @@ -66,14 +70,18 @@ class MemberAccessValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpe "t" -> WomObject(Map("out" -> WomPair(WomPair(womFive, WomPair(womFive, womSix)), womFive))) ) - objectLookup.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomPair(womFive, womSix), Seq.empty) + objectLookup.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomPair(womFive, womSix), + Seq.empty + ) } it should "evaluate an identifier lookup" in { val identifierLookup: ExpressionElement = IdentifierLookup("foo") val inputs = Map("foo" -> WomString("foo")) - identifierLookup.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomString("foo"), Seq.empty) + identifierLookup.evaluateValue(inputs, NoIoFunctionSet, None) shouldBeValid EvaluatedValue(WomString("foo"), + Seq.empty + ) } } diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/TernaryIfEvaluatorSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/TernaryIfEvaluatorSpec.scala index 98b2f071b0a..0c9a4323dcc 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/TernaryIfEvaluatorSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/TernaryIfEvaluatorSpec.scala @@ -16,7 +16,6 @@ import wom.expression.NoIoFunctionSet import wom.types.{WomFloatType, WomIntegerType, WomType} import wom.values.{WomBoolean, WomFloat, WomInteger, WomValue} - /** * Checks that the draft3 value evaluators for ExpressionElements are wired to forward values through to the appropriate * underlying methods on WomValue. @@ -43,7 +42,10 @@ class TernaryIfEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with M // Test name, input expression, output type. val typeTests: List[(String, ExpressionElement, WomType)] = List( ("if true then 5 else 6", TernaryIf(trueLiteral, fiveLiteral, sixLiteral), WomIntegerType), - ("if false then 5.5 else 6", TernaryIf(falseLiteral, PrimitiveLiteralExpressionElement(WomFloat(5.5)), sixLiteral), WomFloatType) + ("if false then 5.5 else 6", + TernaryIf(falseLiteral, PrimitiveLiteralExpressionElement(WomFloat(5.5)), sixLiteral), + WomFloatType + ) ) valueTests foreach { case (name, expression, expected) => diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/UnaryAndBinaryOperatorsEvaluatorSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/UnaryAndBinaryOperatorsEvaluatorSpec.scala index 76c38f021cc..656e6ae21be 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/UnaryAndBinaryOperatorsEvaluatorSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/UnaryAndBinaryOperatorsEvaluatorSpec.scala @@ -15,7 +15,6 @@ import wom.expression.NoIoFunctionSet import wom.types.{WomBooleanType, WomIntegerType, WomType} import wom.values.{WomBoolean, WomInteger, WomValue} - /** * Checks that the draft3 value evaluators for ExpressionElements are wired to forward values through to the appropriate * underlying methods on WomValue. @@ -41,22 +40,18 @@ class UnaryAndBinaryOperatorsEvaluatorSpec extends AnyFlatSpec with CromwellTime ("-(+(-5))", UnaryNegation(UnaryPlus(UnaryNegation(fiveLiteral))), WomInteger(-5), WomIntegerType), ("!true", LogicalNot(trueLiteral), WomBoolean(false), WomBooleanType), ("!(!true)", LogicalNot(LogicalNot(trueLiteral)), WomBoolean(true), WomBooleanType), - ("true || false", LogicalOr(trueLiteral, falseLiteral), WomBoolean(true), WomBooleanType), ("true && false", LogicalAnd(trueLiteral, falseLiteral), WomBoolean(false), WomBooleanType), ("true == false", Equals(trueLiteral, falseLiteral), WomBoolean(false), WomBooleanType), ("true != false", NotEquals(trueLiteral, falseLiteral), WomBoolean(true), WomBooleanType), - ("5 < 6", LessThan(fiveLiteral, sixLiteral), WomBoolean(true), WomBooleanType), ("5 < 5", LessThan(fiveLiteral, fiveLiteral), WomBoolean(false), WomBooleanType), ("5 <= 6", LessThanOrEquals(fiveLiteral, sixLiteral), WomBoolean(true), WomBooleanType), ("5 <= 5", LessThanOrEquals(fiveLiteral, fiveLiteral), WomBoolean(true), WomBooleanType), - ("6 > 5", GreaterThan(sixLiteral, fiveLiteral), WomBoolean(true), WomBooleanType), ("5 > 5", GreaterThan(fiveLiteral, fiveLiteral), WomBoolean(false), WomBooleanType), ("6 >= 6", GreaterThanOrEquals(sixLiteral, fiveLiteral), WomBoolean(true), WomBooleanType), ("5 >= 5", GreaterThanOrEquals(fiveLiteral, fiveLiteral), WomBoolean(true), WomBooleanType), - ("5 + 5", Add(fiveLiteral, fiveLiteral), WomInteger(10), WomIntegerType), ("5 - 5", Subtract(fiveLiteral, fiveLiteral), WomInteger(0), WomIntegerType), ("5 * 5", Multiply(fiveLiteral, fiveLiteral), WomInteger(25), WomIntegerType), diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/consumed/ValueConsumerSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/consumed/ValueConsumerSpec.scala index f2aac51435e..51a352009e3 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/consumed/ValueConsumerSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/consumed/ValueConsumerSpec.scala @@ -10,7 +10,6 @@ import wdl.model.draft3.graph.ExpressionValueConsumer.ops._ import wdl.model.draft3.graph.UnlinkedIdentifierHook import wom.values.WomInteger - class ValueConsumerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "the glob value consumer" should "find consumed lookup 'x'" in { val expr: ExpressionElement = Glob(IdentifierLookup("x")) @@ -18,7 +17,8 @@ class ValueConsumerSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche } "the read_string value consumer" should "find consumed lookup 'x' in read_string(x[0])" in { - val expr: ExpressionElement = ReadString(IndexAccess(IdentifierLookup("x"), PrimitiveLiteralExpressionElement(WomInteger(0)))) + val expr: ExpressionElement = + ReadString(IndexAccess(IdentifierLookup("x"), PrimitiveLiteralExpressionElement(WomInteger(0)))) expr.expressionConsumedValueHooks shouldBe Set(UnlinkedIdentifierHook("x")) } } diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3ReadFileLimitsSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3ReadFileLimitsSpec.scala index 11b842f23ed..b856db1c3d7 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3ReadFileLimitsSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3ReadFileLimitsSpec.scala @@ -13,15 +13,14 @@ import wom.values.WomSingleFile import scala.concurrent.Future - class Draft3ReadFileLimitsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ReadLikeFunctions Size Limit Draft 3" - + it should "pass correct size limits to the ioFunctions for read_lines" in { - ReadLines(PrimitiveLiteralExpressionElement(WomSingleFile("blah"))) - .evaluateValue(Map.empty, ioFunctionTester(1, ""), None) - .valueOr(errors => fail(errors.toList.mkString(", "))) + ReadLines(PrimitiveLiteralExpressionElement(WomSingleFile("blah"))) + .evaluateValue(Map.empty, ioFunctionTester(1, ""), None) + .valueOr(errors => fail(errors.toList.mkString(", "))) } it should "pass correct size limits to the ioFunctions for read_bool" in { @@ -82,9 +81,13 @@ class Draft3ReadFileLimitsSpec extends AnyFlatSpec with CromwellTimeoutSpec with object Draft3ReadFileLimitsSpec { def ioFunctionTester(expectedMaxBytes: Int, result: String) = new EmptyIoFunctionSet { - override def readFile(path: String, maxBytes: Option[Int] = None, failOnOverflow: Boolean = false) = { + override def readFile(path: String, maxBytes: Option[Int] = None, failOnOverflow: Boolean = false) = if (maxBytes.contains(expectedMaxBytes)) Future.successful(result) - else Future.failed(new Exception(s"readFile was called with a max bytes value of ${maxBytes.getOrElse("No value")} but was expecting $expectedMaxBytes")) - } + else + Future.failed( + new Exception( + s"readFile was called with a max bytes value of ${maxBytes.getOrElse("No value")} but was expecting $expectedMaxBytes" + ) + ) } } diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala index 41c767d3a80..8fc05adbe0e 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala @@ -18,32 +18,48 @@ import wom.values._ import scala.concurrent.Future import scala.util.{Failure, Success, Try} - class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ReadLikeFunctions.size" it should "correctly report a 2048 byte file, in bytes by default" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), None).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { - res => assert(res.value == WomFloat(2048d)) + validate( + Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), None).evaluateValue(Map.empty, + testFunctions(Success(2048L)), + None + ) + ) { res => + assert(res.value == WomFloat(2048d)) } } it should "correctly report a 2048 byte file, in bytes" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("B")))).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { - res => assert(res.value == WomFloat(2048d)) + validate( + Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), + Some(PrimitiveLiteralExpressionElement(WomString("B"))) + ).evaluateValue(Map.empty, testFunctions(Success(2048L)), None) + ) { res => + assert(res.value == WomFloat(2048d)) } } it should "correctly report a 2048 byte file, in KB" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("KB")))).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { - res => assert(res.value == WomFloat(2.0d)) + validate( + Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), + Some(PrimitiveLiteralExpressionElement(WomString("KB"))) + ).evaluateValue(Map.empty, testFunctions(Success(2048L)), None) + ) { res => + assert(res.value == WomFloat(2.0d)) } } it should "correctly report a 2048 byte file, in KiB" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("KiB")))).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { - res => assert(res.value == WomFloat(2d)) + validate( + Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), + Some(PrimitiveLiteralExpressionElement(WomString("KiB"))) + ).evaluateValue(Map.empty, testFunctions(Success(2048L)), None) + ) { res => + assert(res.value == WomFloat(2d)) } } @@ -56,7 +72,9 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M } it should "correctly report the size of a supplied, optional optional, 2048 byte file" in { - val value = WomOptionalValue(WomOptionalType(WomSingleFileType), Option(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))))) + val value = WomOptionalValue(WomOptionalType(WomSingleFileType), + Option(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah")))) + ) validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d)) @@ -66,53 +84,90 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report the size of a supplied, optional, 2048 byte file, in MB" in { val value = WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))) - validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { - res => assert(res.value == WomFloat(0.001953125d)) + validate( + Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))) + .evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None) + ) { res => + assert(res.value == WomFloat(0.001953125d)) } } it should "correctly report that an unsupplied optional file is empty" in { val value = WomOptionalValue(WomSingleFileType, None) - validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), None)) { - res => assert(res.value == WomFloat(0d)) + validate( + Size(IdentifierLookup("x"), None).evaluateValue( + Map("x" -> value), + testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), + None + ) + ) { res => + assert(res.value == WomFloat(0d)) } } it should "correctly report that an unsupplied File?? is empty" in { - val value = WomOptionalValue(WomOptionalType(WomSingleFileType), Option(WomOptionalValue(WomSingleFileType, None))) - - validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), None)) { - res => assert(res.value == WomFloat(0d)) - } + val value = WomOptionalValue(WomOptionalType(WomSingleFileType), Option(WomOptionalValue(WomSingleFileType, None))) + + validate( + Size(IdentifierLookup("x"), None).evaluateValue( + Map("x" -> value), + testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), + None + ) + ) { res => + assert(res.value == WomFloat(0d)) + } } it should "correctly report that an unsupplied optional file is empty, even in MB" in { val value = WomOptionalValue(WomSingleFileType, None) - validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), None)) { - res => assert(res.value == WomFloat(0d)) + validate( + Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue( + Map("x" -> value), + testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), + None + ) + ) { res => + assert(res.value == WomFloat(0d)) } } it should "correctly report the size of an array of files, in GiB" in { val value = WomArray(Seq(WomSingleFile("blah"), WomSingleFile("blah"))) - validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("GiB")))).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { - res => assert(res.value == WomFloat(2048d * 2 / 1024 / 1024 / 1024)) + validate( + Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("GiB")))) + .evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None) + ) { res => + assert(res.value == WomFloat(2048d * 2 / 1024 / 1024 / 1024)) } } it should "correctly report that the size of an array of unsupplied optional files is empty, in MB" in { val value = WomArray(Seq(WomOptionalValue(WomSingleFileType, None), WomOptionalValue(WomSingleFileType, None))) - validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), None)) { - res => assert(res.value == WomFloat(0d)) + validate( + Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue( + Map("x" -> value), + testFunctions(Failure(new Exception("Bad call to size on an empty optional"))), + None + ) + ) { res => + assert(res.value == WomFloat(0d)) } } it should "correctly report the size of a mixed Array[File?] - some supplied and some not" in { - val value = WomArray(Seq(WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))), WomOptionalValue(WomSingleFileType, None), WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))), WomOptionalValue(WomSingleFileType, None))) + val value = WomArray( + Seq( + WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))), + WomOptionalValue(WomSingleFileType, None), + WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))), + WomOptionalValue(WomSingleFileType, None) + ) + ) validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d * 2)) @@ -121,25 +176,37 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "refuse to report file sizes for Ints" in { val value = WomInteger(55) - val oops = Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an Int"))), None) + val oops = Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))) + .evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an Int"))), None) oops match { case Valid(x) => fail(s"Expected an Integer to not have a file length but instead got ${x.value.toWomString}") - case Invalid(e) => e.head should be("The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got Int.") + case Invalid(e) => + e.head should be( + "The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got Int." + ) } } it should "refuse to report file sizes for Int?s" in { val value = WomOptionalValue(WomIntegerType, Some(WomInteger(55))) - val oops = Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an Int?"))), None) + val oops = Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))) + .evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("Bad call to size on an Int?"))), None) oops match { case Valid(x) => fail(s"Expected an Int? to not have a file length but instead got ${x.value.toWomString}") - case Invalid(e) => e.head should be("The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got Int?.") + case Invalid(e) => + e.head should be( + "The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got Int?." + ) } } it should "pass on underlying size reading errors" in { val value = WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))) - val oops = Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Failure(new Exception("'size' inner exception, expect me to be passed on"))), None) + val oops = Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue( + Map("x" -> value), + testFunctions(Failure(new Exception("'size' inner exception, expect me to be passed on"))), + None + ) oops match { case Valid(x) => fail(s"Expected an Int? to not have a file length but instead got ${x.value.toWomString}") case Invalid(e) => e.head should be("'size' inner exception, expect me to be passed on") diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/linking/expression/files/FileEvaluatorSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/linking/expression/files/FileEvaluatorSpec.scala index 63fbd05ba03..0a9dd40d206 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/linking/expression/files/FileEvaluatorSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/linking/expression/files/FileEvaluatorSpec.scala @@ -12,7 +12,6 @@ import wom.expression.NoIoFunctionSet import wom.types.{WomCompositeType, WomSingleFileType} import wom.values.WomSingleFile - class FileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "FileEvaluator[ExpressionElement]" @@ -21,7 +20,10 @@ class FileEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche val expressionElement: ExpressionElement = ObjectLiteral(Map("a_file" -> StringLiteral("moo.txt"))) val structType = WomCompositeType(Map("a_file" -> WomSingleFileType)) - val evaluatedFiles = expressionElement.predictFilesNeededToEvaluate(inputs = Map.empty, ioFunctionSet = NoIoFunctionSet, coerceTo = structType) + val evaluatedFiles = expressionElement.predictFilesNeededToEvaluate(inputs = Map.empty, + ioFunctionSet = NoIoFunctionSet, + coerceTo = structType + ) evaluatedFiles shouldBeValid Set(WomSingleFile("moo.txt")) } } diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wdl/WdlomToWdlFileSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wdl/WdlomToWdlFileSpec.scala index 2cd9579db0c..88cef1394db 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wdl/WdlomToWdlFileSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wdl/WdlomToWdlFileSpec.scala @@ -7,7 +7,7 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import wdl.draft3.transforms.ast2wdlom._ -import wdl.draft3.transforms.parsing.{FileStringParserInput, fileToAst, stringToAst} +import wdl.draft3.transforms.parsing.{fileToAst, stringToAst, FileStringParserInput} import wdl.model.draft3.elements._ import wdl.transforms.base.wdlom2wdl.WdlWriter.ops._ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl.fileElementWriter @@ -20,41 +20,39 @@ class WdlomToWdlFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match assert(testFiles.nonEmpty) - - private def stripLocationFromGraphElement(ge : WorkflowGraphElement) : WorkflowGraphElement = { + private def stripLocationFromGraphElement(ge: WorkflowGraphElement): WorkflowGraphElement = ge match { case se: ScatterElement => se.copy(sourceLocation = None) case ce: CallElement => ce.copy(sourceLocation = None) case _ => ge } - } - private def stripLocations(wf: WorkflowDefinitionElement) : WorkflowDefinitionElement = { + private def stripLocations(wf: WorkflowDefinitionElement): WorkflowDefinitionElement = wf.copy(graphElements = wf.graphElements.map(stripLocationFromGraphElement)) - } // Remove the source file information, where it exists. // // Re-printing does not preserve line numbers at the moment. - private def stripSourceLocations(fe : FileElement) : FileElement = { - FileElement(fe.imports, - fe.structs, - fe.workflows.map { case wf => - val wf2 = stripLocations(wf) - wf2.copy(sourceLocation = None) - }.toSeq, - fe.tasks.map { case task => task.copy(sourceLocation = None) }.toSeq) - } + private def stripSourceLocations(fe: FileElement): FileElement = + FileElement( + fe.imports, + fe.structs, + fe.workflows.map { case wf => + val wf2 = stripLocations(wf) + wf2.copy(sourceLocation = None) + }.toSeq, + fe.tasks.map { case task => task.copy(sourceLocation = None) }.toSeq + ) testFiles.foreach { file => - it should s"write a file that re-evaluates to the same case classes for ${file.name}" in { val model: Checked[FileElement] = (fileToAst andThen wrapAst andThen astToFileElement).run(file) - model match { + model match { case Right(wdlModel) => - - val newModel = (stringToAst andThen wrapAst andThen astToFileElement).run(FileStringParserInput(wdlModel.toWdlV1, file.name)) + val newModel = (stringToAst andThen wrapAst andThen astToFileElement).run( + FileStringParserInput(wdlModel.toWdlV1, file.name) + ) // Scala case class deep equality is so nice here newModel.map(stripSourceLocations) shouldEqual model.map(stripSourceLocations) diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wom/WdlFileToWomSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wom/WdlFileToWomSpec.scala index b8a8e2a7a32..d9eb867dd85 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wom/WdlFileToWomSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/wdlom2wom/WdlFileToWomSpec.scala @@ -33,24 +33,35 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher } testCases.list.filter(x => x.isRegularFile && x.extension.contains(".wdl")) foreach { testCase => - val fileName = testCase.name val testName = testCase.name.split("\\.").head val itShouldString = s"create a valid WOM object for $fileName" - val testOrIgnore: (=>Any) => Unit = if (testCase.name.endsWith(".ignored.wdl") || testCase.name.endsWith(".nowom.wdl")) { - (it should itShouldString).ignore _ - } else { - (it should itShouldString).in _ - } + val testOrIgnore: (=> Any) => Unit = + if (testCase.name.endsWith(".ignored.wdl") || testCase.name.endsWith(".nowom.wdl")) { + (it should itShouldString).ignore _ + } else { + (it should itShouldString).in _ + } testOrIgnore { - val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(fe => FileElementToWomBundleInputs(fe, "{}", convertNestedScatterToSubworkflow = true, List.empty, List.empty, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(fe => + FileElementToWomBundleInputs( + fe, + "{}", + convertNestedScatterToSubworkflow = true, + List.empty, + List.empty, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle converter.run(testCase) match { case Right(bundle) => validators(testName).apply(bundle) case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) fail(s"Failed to create WOM bundle: $formattedErrors") } } @@ -65,7 +76,17 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher // } // it should "be able to leave nested scatters intact" in { - val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(fe => FileElementToWomBundleInputs(fe, "{}", convertNestedScatterToSubworkflow = false, List.empty, List.empty, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(fe => + FileElementToWomBundleInputs( + fe, + "{}", + convertNestedScatterToSubworkflow = false, + List.empty, + List.empty, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle val twoLevelScatterFile = File("wdl/transforms/draft3/src/test/cases/two_level_scatter.wdl") @@ -75,27 +96,37 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val graph = wf.innerGraph // get the top scatter node - graph.scatters.size shouldBe(1) - val topScatter : ScatterNode = graph.scatters.toVector.head + graph.scatters.size shouldBe 1 + val topScatter: ScatterNode = graph.scatters.toVector.head val wfCalls = graph.allNodes.filterByType[WorkflowCallNode] // don't generate any sub-workflows - wfCalls.size shouldBe(0) + wfCalls.size shouldBe 0 // there should be one scatter inside the top scatter val innerGraph = topScatter.innerGraph - innerGraph.scatters.size shouldBe(1) + innerGraph.scatters.size shouldBe 1 Succeeded case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) fail(s"Failed to create WOM bundle: $formattedErrors") } } - it should "split a nested scatter into a toplevel scatter, and a bottom sub-workflow" in { - val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(fe => FileElementToWomBundleInputs(fe, "{}", convertNestedScatterToSubworkflow = true, List.empty, List.empty, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(fe => + FileElementToWomBundleInputs( + fe, + "{}", + convertNestedScatterToSubworkflow = true, + List.empty, + List.empty, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle val twoLevelScatterFile = File("wdl/transforms/draft3/src/test/cases/two_level_scatter.wdl") @@ -105,19 +136,19 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val graph = wf.innerGraph // There should be just one scatter. - graph.scatters.size shouldBe(1) + graph.scatters.size shouldBe 1 val wfCalls = graph.allNodes.filterByType[WorkflowCallNode] // There should be a call to a generated sub-workflow in the graph - wfCalls.size shouldBe(1) + wfCalls.size shouldBe 1 Succeeded case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) fail(s"Failed to create WOM bundle: $formattedErrors") } } - private val validators: Map[String, WomBundle => Assertion] = Map( "declaration_chain" -> anyWomWillDo, "empty_workflow" -> anyWomWillDo, @@ -161,9 +192,11 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher private def anyWomWillDo(b: WomBundle): Assertion = Succeeded private def validateStructDefinitionWom(b: WomBundle): Assertion = { - val wfDef: WorkflowDefinition = (b.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).head + val wfDef: WorkflowDefinition = + (b.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).head b.typeAliases.keySet shouldBe Set("FooStruct") - val structOutputType = (wfDef.graph.outputNodes.map(_.womType).filterByType[WomCompositeType]: Set[WomCompositeType]).head + val structOutputType = + (wfDef.graph.outputNodes.map(_.womType).filterByType[WomCompositeType]: Set[WomCompositeType]).head structOutputType.typeMap shouldBe Map( "simple" -> WomIntegerType, @@ -172,27 +205,44 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher } private def validateTaskDefinitionWom(b: WomBundle): Assertion = { - val taskDef: CallableTaskDefinition = (b.allCallables.values.toSet.filterByType[CallableTaskDefinition]: Set[CallableTaskDefinition]).head + val taskDef: CallableTaskDefinition = + (b.allCallables.values.toSet.filterByType[CallableTaskDefinition]: Set[CallableTaskDefinition]).head taskDef.name shouldBe "simple" - taskDef.commandTemplate(Map.empty) shouldBe List(WdlomWomStringCommandPart(StringCommandPartElement("echo Hello World "))) + taskDef.commandTemplate(Map.empty) shouldBe List( + WdlomWomStringCommandPart(StringCommandPartElement("echo Hello World ")) + ) } private def validateCommandSyntaxes(b: WomBundle): Assertion = { b.allCallables.size should be(2) - b.allCallables.get("a")match { + b.allCallables.get("a") match { case Some(taskA) => - taskA.inputs.filter(_.isInstanceOf[FixedInputDefinitionWithDefault]).map(_.name).toSet should be(Set("rld", "__world1", "__world2")) - taskA.inputs.filter(_.isInstanceOf[OptionalInputDefinition]).map(_.name).toSet should be(Set("world1", "world2")) + taskA.inputs.filter(_.isInstanceOf[FixedInputDefinitionWithDefault]).map(_.name).toSet should be( + Set("rld", "__world1", "__world2") + ) + taskA.inputs.filter(_.isInstanceOf[OptionalInputDefinition]).map(_.name).toSet should be( + Set("world1", "world2") + ) taskA.inputs.map(_.name).toSet should be(Set("rld", "__world1", "__world2", "world1", "world2")) taskA.outputs.map(_.name).toSet should be(Set("out")) - taskA.asInstanceOf[CallableTaskDefinition].runtimeAttributes.attributes("docker").asInstanceOf[WdlomWomExpression].expressionElement should be(StringLiteral("ubuntu:latest")) + taskA + .asInstanceOf[CallableTaskDefinition] + .runtimeAttributes + .attributes("docker") + .asInstanceOf[WdlomWomExpression] + .expressionElement should be(StringLiteral("ubuntu:latest")) case None => fail("Expected a task called 'a'") } b.allCallables.get("b") match { case Some(taskB) => taskB.inputs.map(_.name) should be(Seq("world")) taskB.outputs.map(_.name) should be(Seq("out")) - taskB.asInstanceOf[CallableTaskDefinition].runtimeAttributes.attributes("docker").asInstanceOf[WdlomWomExpression].expressionElement should be(StringLiteral("ubuntu:latest")) + taskB + .asInstanceOf[CallableTaskDefinition] + .runtimeAttributes + .attributes("docker") + .asInstanceOf[WdlomWomExpression] + .expressionElement should be(StringLiteral("ubuntu:latest")) case None => fail("Expected a task called 'b'") } } @@ -203,11 +253,17 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher case None => fail("No callable found 'nio_file'") case Some(nioFileTask) => // Plain old input: - nioFileTask.inputs.find(_.name == "f").get.parameterMeta should be(Some(MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))))) + nioFileTask.inputs.find(_.name == "f").get.parameterMeta should be( + Some(MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true)))) + ) // Input based on upstream: - nioFileTask.inputs.find(_.name == "g").get.parameterMeta should be(Some(MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))))) + nioFileTask.inputs.find(_.name == "g").get.parameterMeta should be( + Some(MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true)))) + ) // Optional input: - nioFileTask.inputs.find(_.name == "h").get.parameterMeta should be(Some(MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true))))) + nioFileTask.inputs.find(_.name == "h").get.parameterMeta should be( + Some(MetaValueElementObject(Map("localization_optional" -> MetaValueElementBoolean(true)))) + ) } } @@ -231,16 +287,20 @@ class WdlFileToWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher private def validateMetaSection(b: WomBundle): Assertion = { val task = b.primaryCallable.get.asInstanceOf[CallableTaskDefinition] - task.meta should be (Map("author" -> MetaValueElementString("John Doe"), - "email" -> MetaValueElementString("john.doe@yahoo.com"), - "b" -> MetaValueElementBoolean(true), - "zipcode" -> MetaValueElementInteger(94043), - "f" -> MetaValueElementFloat(1.3), - "numbers" -> MetaValueElementArray(Vector(MetaValueElementInteger(1), - MetaValueElementInteger(2), - MetaValueElementInteger(3))), - "extras" -> MetaValueElementObject(Map("house" -> MetaValueElementString("With porch"), - "cat" -> MetaValueElementString("Lucy"))) - )) + task.meta should be( + Map( + "author" -> MetaValueElementString("John Doe"), + "email" -> MetaValueElementString("john.doe@yahoo.com"), + "b" -> MetaValueElementBoolean(true), + "zipcode" -> MetaValueElementInteger(94043), + "f" -> MetaValueElementFloat(1.3), + "numbers" -> MetaValueElementArray( + Vector(MetaValueElementInteger(1), MetaValueElementInteger(2), MetaValueElementInteger(3)) + ), + "extras" -> MetaValueElementObject( + Map("house" -> MetaValueElementString("With porch"), "cat" -> MetaValueElementString("Lucy")) + ) + ) + ) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToCommandPartElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToCommandPartElement.scala index 20dc56cd2bc..ef98ac49d8f 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToCommandPartElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToCommandPartElement.scala @@ -9,15 +9,21 @@ import wdl.model.draft3.elements.{CommandPartElement, ExpressionElement, Placeho import wdl.model.draft3.elements.CommandPartElement.{PlaceholderCommandPartElement, StringCommandPartElement} object AstNodeToCommandPartElement { - def astNodeToCommandPartElement(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement], - astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] - ): CheckedAtoB[GenericAstNode, CommandPartElement] = CheckedAtoB.fromErrorOr { a: GenericAstNode => a match { - case t: GenericTerminal => astNodeToString(t).toValidated map StringCommandPartElement - case a: GenericAst => - val expressionElementV: ErrorOr[ExpressionElement] = a.getAttributeAs[ExpressionElement]("expr").toValidated - val attributesV: ErrorOr[PlaceholderAttributeSet] = a.getAttributeAs[PlaceholderAttributeSet]("attributes").toValidated + def astNodeToCommandPartElement(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement], + astNodeToPlaceholderAttributeSet: CheckedAtoB[GenericAstNode, PlaceholderAttributeSet] + ): CheckedAtoB[GenericAstNode, CommandPartElement] = CheckedAtoB.fromErrorOr { a: GenericAstNode => + a match { + case t: GenericTerminal => astNodeToString(t).toValidated map StringCommandPartElement + case a: GenericAst => + val expressionElementV: ErrorOr[ExpressionElement] = a.getAttributeAs[ExpressionElement]("expr").toValidated + val attributesV: ErrorOr[PlaceholderAttributeSet] = + a.getAttributeAs[PlaceholderAttributeSet]("attributes").toValidated - (expressionElementV, attributesV) mapN { (expressionElement, attributes) => PlaceholderCommandPartElement(expressionElement, attributes) } - case other => s"Conversion for $other not supported".invalidNel - }} + (expressionElementV, attributesV) mapN { (expressionElement, attributes) => + PlaceholderCommandPartElement(expressionElement, attributes) + } + case other => s"Conversion for $other not supported".invalidNel + } + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToExpressionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToExpressionElement.scala index e7f5a24d2ee..2a773fd4e23 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToExpressionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToExpressionElement.scala @@ -18,32 +18,39 @@ object AstNodeToExpressionElement { type EngineFunctionMaker = Vector[ExpressionElement] => ErrorOr[ExpressionElement] - def astNodeToExpressionElement(customEngineFunctionMakers: Map[String, EngineFunctionMaker]): CheckedAtoB[GenericAstNode, ExpressionElement] = { + def astNodeToExpressionElement( + customEngineFunctionMakers: Map[String, EngineFunctionMaker] + ): CheckedAtoB[GenericAstNode, ExpressionElement] = CheckedAtoB.fromErrorOr("parse expression")(convert(customEngineFunctionMakers) _) - } - protected def convert(customEngineFunctionMakers: Map[String, EngineFunctionMaker])(ast: GenericAstNode): ErrorOr[ExpressionElement] = { + protected def convert( + customEngineFunctionMakers: Map[String, EngineFunctionMaker] + )(ast: GenericAstNode): ErrorOr[ExpressionElement] = { implicit val recursiveConverter = CheckedAtoB.fromErrorOr(convert(customEngineFunctionMakers) _) implicit val recursiveKvConverter = AstNodeToKvPair.astNodeToKvPair ast match { - case t: GenericTerminal if asPrimitive.isDefinedAt((t.getTerminalStr, t.getSourceString)) => asPrimitive((t.getTerminalStr, t.getSourceString)).map(PrimitiveLiteralExpressionElement) + case t: GenericTerminal if asPrimitive.isDefinedAt((t.getTerminalStr, t.getSourceString)) => + asPrimitive((t.getTerminalStr, t.getSourceString)).map(PrimitiveLiteralExpressionElement) case t: GenericTerminal if t.getTerminalStr == "identifier" => IdentifierLookup(t.getSourceString).validNel case t: GenericTerminal if t.getTerminalStr == "none" => NoneLiteralElement.validNel case a: GenericAst if a.getName == "StringLiteral" => handleStringLiteral(a) case a: GenericAst if lhsRhsOperators.contains(a.getName) => useValidatedLhsAndRhs(a, lhsRhsOperators(a.getName)) - case a: GenericAst if unaryOperators.contains(a.getName) => a.getAttributeAs[ExpressionElement]("expression").map(unaryOperators(a.getName)).toValidated - case a: GenericAst if a.getName == "TupleLiteral" => (a.getAttributeAsVector[ExpressionElement]("values") flatMap { - case pair if pair.length == 2 => PairLiteral(pair.head, pair(1)).validNelCheck - case singleton if singleton.length == 1 => singleton.head.validNelCheck - case more => s"No WDL support for ${more.size}-tuples in draft 3".invalidNelCheck - }).toValidated - case a: GenericAst if a.getName == "ArrayLiteral" => a.getAttributeAsVector[ExpressionElement]("values").toValidated.map(ArrayLiteral) - case a: GenericAst if a.getName == "ArrayOrMapLookup" => { + case a: GenericAst if unaryOperators.contains(a.getName) => + a.getAttributeAs[ExpressionElement]("expression").map(unaryOperators(a.getName)).toValidated + case a: GenericAst if a.getName == "TupleLiteral" => + (a.getAttributeAsVector[ExpressionElement]("values") flatMap { + case pair if pair.length == 2 => PairLiteral(pair.head, pair(1)).validNelCheck + case singleton if singleton.length == 1 => singleton.head.validNelCheck + case more => s"No WDL support for ${more.size}-tuples in draft 3".invalidNelCheck + }).toValidated + case a: GenericAst if a.getName == "ArrayLiteral" => + a.getAttributeAsVector[ExpressionElement]("values").toValidated.map(ArrayLiteral) + case a: GenericAst if a.getName == "ArrayOrMapLookup" => (a.getAttributeAs[ExpressionElement]("lhs").toValidated: ErrorOr[ExpressionElement], - a.getAttributeAs[ExpressionElement]("rhs").toValidated: ErrorOr[ExpressionElement]) mapN IndexAccess - } + a.getAttributeAs[ExpressionElement]("rhs").toValidated: ErrorOr[ExpressionElement] + ) mapN IndexAccess case a: GenericAst if a.getName == "MemberAccess" => handleMemberAccess(a) case a: GenericAst if a.getName == "ObjectLiteral" => (for { @@ -69,10 +76,13 @@ object AstNodeToExpressionElement { val conditionValidation: ErrorOr[ExpressionElement] = a.getAttributeAs[ExpressionElement]("cond").toValidated val ifTrueValidation: ErrorOr[ExpressionElement] = a.getAttributeAs[ExpressionElement]("iftrue").toValidated val ifFalseValidation: ErrorOr[ExpressionElement] = a.getAttributeAs[ExpressionElement]("iffalse").toValidated - (conditionValidation, ifTrueValidation, ifFalseValidation) mapN { (cond, ifTrue, ifFalse) => TernaryIf(cond, ifTrue, ifFalse) } + (conditionValidation, ifTrueValidation, ifFalseValidation) mapN { (cond, ifTrue, ifFalse) => + TernaryIf(cond, ifTrue, ifFalse) + } case a: GenericAst if a.getName == "FunctionCall" => val functionNameValidation: ErrorOr[String] = a.getAttributeAs[String]("name").toValidated - val argsValidation: ErrorOr[Vector[ExpressionElement]] = a.getAttributeAsVector[ExpressionElement]("params").toValidated + val argsValidation: ErrorOr[Vector[ExpressionElement]] = + a.getAttributeAsVector[ExpressionElement]("params").toValidated val allEngineFunctionMakers = engineFunctionMakers ++ customEngineFunctionMakers (functionNameValidation, argsValidation) flatMapN { @@ -80,9 +90,10 @@ object AstNodeToExpressionElement { case (other, _) => s"Unknown engine function: '$other'".invalidNel } - - case unknownTerminal: GenericTerminal => s"No rule available to create ExpressionElement from terminal: ${unknownTerminal.getTerminalStr} ${unknownTerminal.getSourceString}".invalidNel - case unknownAst: GenericAst => s"No rule available to create ExpressionElement from Ast: ${unknownAst.getName}".invalidNel + case unknownTerminal: GenericTerminal => + s"No rule available to create ExpressionElement from terminal: ${unknownTerminal.getTerminalStr} ${unknownTerminal.getSourceString}".invalidNel + case unknownAst: GenericAst => + s"No rule available to create ExpressionElement from Ast: ${unknownAst.getName}".invalidNel } } @@ -104,14 +115,19 @@ object AstNodeToExpressionElement { "Remainder" -> Remainder.apply ) - private def useValidatedLhsAndRhs(a: GenericAst, combiner: BinaryOperatorElementMaker) - (implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[ExpressionElement] = { + private def useValidatedLhsAndRhs(a: GenericAst, combiner: BinaryOperatorElementMaker)(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] + ): ErrorOr[ExpressionElement] = { val lhsValidation: ErrorOr[ExpressionElement] = - a.getAttributeAs[ExpressionElement]("lhs").toValidated.contextualizeErrors(s"read left hand side of ${a.getName} expression") + a.getAttributeAs[ExpressionElement]("lhs") + .toValidated + .contextualizeErrors(s"read left hand side of ${a.getName} expression") val rhsValidation: ErrorOr[ExpressionElement] = - a.getAttributeAs[ExpressionElement]("rhs").toValidated.contextualizeErrors(s"read right-hand side of ${a.getName} expression") + a.getAttributeAs[ExpressionElement]("rhs") + .toValidated + .contextualizeErrors(s"read right-hand side of ${a.getName} expression") - (lhsValidation, rhsValidation) mapN { combiner } + (lhsValidation, rhsValidation) mapN combiner } private type UnaryOperatorElementMaker = ExpressionElement => ExpressionElement @@ -176,25 +192,28 @@ object AstNodeToExpressionElement { "sub" -> validateThreeParamEngineFunction(Sub, "sub") ) - private def validateNoParamEngineFunction(element: ExpressionElement, functionName: String) - (params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = + private def validateNoParamEngineFunction(element: ExpressionElement, functionName: String)( + params: Vector[ExpressionElement] + ): ErrorOr[ExpressionElement] = if (params.isEmpty) { element.validNel } else { s"Function $functionName expects 0 arguments but got ${params.size}".invalidNel } - def validateOneParamEngineFunction(elementMaker: ExpressionElement => ExpressionElement, functionName: String) - (params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = + def validateOneParamEngineFunction(elementMaker: ExpressionElement => ExpressionElement, functionName: String)( + params: Vector[ExpressionElement] + ): ErrorOr[ExpressionElement] = if (params.size == 1) { elementMaker.apply(params.head).validNel } else { s"Function $functionName expects exactly 1 argument but got ${params.size}".invalidNel } - private def validateOneOrTwoParamEngineFunction(elementMaker: (ExpressionElement, Option[ExpressionElement]) => ExpressionElement, - functionName: String) - (params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = + private def validateOneOrTwoParamEngineFunction( + elementMaker: (ExpressionElement, Option[ExpressionElement]) => ExpressionElement, + functionName: String + )(params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = if (params.size == 1) { elementMaker(params.head, None).validNel } else if (params.size == 2) { @@ -203,30 +222,35 @@ object AstNodeToExpressionElement { s"Function $functionName expects 1 or 2 arguments but got ${params.size}".invalidNel } - def validateTwoParamEngineFunction(elementMaker: (ExpressionElement, ExpressionElement) => ExpressionElement, functionName: String) - (params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = + def validateTwoParamEngineFunction(elementMaker: (ExpressionElement, ExpressionElement) => ExpressionElement, + functionName: String + )(params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = if (params.size == 2) { elementMaker.apply(params.head, params(1)).validNel } else { s"Function $functionName expects exactly 2 arguments but got ${params.size}".invalidNel } - private def validateThreeParamEngineFunction(elementMaker: (ExpressionElement, ExpressionElement, ExpressionElement) => ExpressionElement, functionName: String) - (params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = + private def validateThreeParamEngineFunction( + elementMaker: (ExpressionElement, ExpressionElement, ExpressionElement) => ExpressionElement, + functionName: String + )(params: Vector[ExpressionElement]): ErrorOr[ExpressionElement] = if (params.size == 3) { elementMaker.apply(params.head, params(1), params(2)).validNel } else { s"Function $functionName expects exactly 3 arguments but got ${params.size}".invalidNel } - private def handleMemberAccess(ast: GenericAst) - (implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[ExpressionElement] = { + private def handleMemberAccess( + ast: GenericAst + )(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[ExpressionElement] = { // Internal simplify method: // If the left-hand side is another member access, we can simplify them together. // If not, we can build a new member access: def simplify(leftExpression: ExpressionElement, suffix: String): ExpressionElement = leftExpression match { - case ExpressionMemberAccess(lefterExpression, tail) => ExpressionMemberAccess(lefterExpression, NonEmptyList(tail.head, tail.tail :+ suffix)) + case ExpressionMemberAccess(lefterExpression, tail) => + ExpressionMemberAccess(lefterExpression, NonEmptyList(tail.head, tail.tail :+ suffix)) case IdentifierMemberAccess(first, second, tail) => IdentifierMemberAccess(first, second, tail :+ suffix) case IdentifierLookup(identifier) => IdentifierMemberAccess(identifier, suffix, Vector.empty) case _ => ExpressionMemberAccess(leftExpression, NonEmptyList(suffix, List.empty)) @@ -241,10 +265,12 @@ object AstNodeToExpressionElement { } - private def handleStringLiteral(ast: GenericAst) - (implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[ExpressionElement] = { + private def handleStringLiteral( + ast: GenericAst + )(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[ExpressionElement] = { - implicit val astNodeToStringPiece: CheckedAtoB[GenericAstNode, StringPiece] = AstNodeToStringPiece.astNodeToStringPiece(Some(astNodeToExpressionElement)) + implicit val astNodeToStringPiece: CheckedAtoB[GenericAstNode, StringPiece] = + AstNodeToStringPiece.astNodeToStringPiece(Some(astNodeToExpressionElement)) ast.getAttributeAsVector[StringPiece]("pieces").toValidated map { pieces => if (pieces.isEmpty) { StringLiteral("") diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToKvPair.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToKvPair.scala index e26b1194cd3..886fa34735c 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToKvPair.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToKvPair.scala @@ -9,11 +9,12 @@ import wdl.model.draft3.elements.ExpressionElement import wdl.model.draft3.elements.ExpressionElement.KvPair object AstNodeToKvPair { - def astNodeToKvPair(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): CheckedAtoB[GenericAstNode, KvPair] = CheckedAtoB.fromErrorOr { + def astNodeToKvPair(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] + ): CheckedAtoB[GenericAstNode, KvPair] = CheckedAtoB.fromErrorOr { case a: GenericAst if a.getName == "ObjectKV" || a.getName == "MapLiteralKv" || a.getName == "RuntimeAttribute" => val keyValidation: ErrorOr[String] = a.getAttributeAs[String]("key").toValidated - val valueValidation: ErrorOr[ExpressionElement] = { val validation = a.getAttributeAs[ExpressionElement]("value").toValidated val forKeyContext: Option[String] = keyValidation.map(k => s"read value for key '$k'").toOption diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToMetaKvPair.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToMetaKvPair.scala index c0c3fe90d1f..a306c4ac19d 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToMetaKvPair.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToMetaKvPair.scala @@ -13,12 +13,14 @@ import wom.callable.{MetaKvPair, MetaValueElement} import scala.util.{Failure, Try} object AstNodeToMetaKvPair { - def astNodeToMetaKvPair(implicit astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString]): CheckedAtoB[GenericAstNode, MetaKvPair] = { + def astNodeToMetaKvPair(implicit + astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] + ): CheckedAtoB[GenericAstNode, MetaKvPair] = CheckedAtoB.fromErrorOr("convert AstNode to MetaKvPair")(convertMetaKvPair) - } - private def convertMetaKvPair(astNode: GenericAstNode) - (implicit astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString]): ErrorOr[MetaKvPair] = astNode match { + private def convertMetaKvPair( + astNode: GenericAstNode + )(implicit astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString]): ErrorOr[MetaKvPair] = astNode match { case a: GenericAst if a.getName == "MetaKvPair" => val keyValidation: ErrorOr[String] = a.getAttributeAs[String]("key").toValidated val valueValidation: ErrorOr[MetaValueElement] = convertMetaValue(a.getAttribute("value")) @@ -27,8 +29,9 @@ object AstNodeToMetaKvPair { case other => s"Expected Ast of type 'MetaKvPair' but got $other".invalidNel } - private def convertMetaValue(astNode: GenericAstNode) - (implicit astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString]): ErrorOr[MetaValueElement] = { + private def convertMetaValue( + astNode: GenericAstNode + )(implicit astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString]): ErrorOr[MetaValueElement] = { implicit val recursiveKvPairConversion = CheckedAtoB.fromErrorOr(convertMetaKvPair _) astNode match { // This is a primitive type, one of {null, boolean, float, int, string}. @@ -42,10 +45,13 @@ object AstNodeToMetaKvPair { case (name, other) => s"No conversion defined for Ast ($name, $other) to MetaValueElement".invalidNel } - case a: GenericAst if a.getName == "StaticString" => astNodeToStaticString(a).toValidated map { staticString => MetaValueElementString.apply(staticString.value) } + case a: GenericAst if a.getName == "StaticString" => + astNodeToStaticString(a).toValidated map { staticString => MetaValueElementString.apply(staticString.value) } case a: GenericAst if a.getName == "MetaArray" => - a.getAttributeAsVectorF[MetaValueElement]("values")(convertMetaValue(_).toEither).toValidated.map(MetaValueElementArray) + a.getAttributeAsVectorF[MetaValueElement]("values")(convertMetaValue(_).toEither) + .toValidated + .map(MetaValueElementArray) case a: GenericAst if a.getName == "MetaObject" => (for { diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToPlaceholderAttributeSet.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToPlaceholderAttributeSet.scala index b7e359bb042..26d30b9bef9 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToPlaceholderAttributeSet.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToPlaceholderAttributeSet.scala @@ -16,7 +16,9 @@ import wdl.model.draft3.elements._ object AstNodeToPlaceholderAttributeSet { - def attributeKvpConverter(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): CheckedAtoB[GenericAstList, PlaceholderAttributeSet] = { + def attributeKvpConverter(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] + ): CheckedAtoB[GenericAstList, PlaceholderAttributeSet] = { val singleElement = astNodeToAst andThen CheckedAtoB.fromErrorOr(convertAttributeKvp _) def convertAll(as: GenericAstList): ErrorOr[Vector[PlaceholderAttributeElement]] = @@ -26,40 +28,51 @@ object AstNodeToPlaceholderAttributeSet { } private def validAttributeSet(a: Vector[PlaceholderAttributeElement]): Checked[PlaceholderAttributeSet] = { - def foldFunction(acc: (PlaceholderAttributeSet, List[String]), next: PlaceholderAttributeElement): (PlaceholderAttributeSet, List[String]) = { + def foldFunction(acc: (PlaceholderAttributeSet, List[String]), + next: PlaceholderAttributeElement + ): (PlaceholderAttributeSet, List[String]) = (acc, next) match { - case ((PlaceholderAttributeSet(None, t, f, s), errors), DefaultAttributeElement(d)) => (PlaceholderAttributeSet(Option(d), t, f, s), errors) - case ((pas @ PlaceholderAttributeSet(Some(d1), _, _, _), errors), DefaultAttributeElement(d2)) => (pas, errors :+ s"""Cannot supply 'default="$d2"' because it is already supplied as 'default="$d1"'""") + case ((PlaceholderAttributeSet(None, t, f, s), errors), DefaultAttributeElement(d)) => + (PlaceholderAttributeSet(Option(d), t, f, s), errors) + case ((pas @ PlaceholderAttributeSet(Some(d1), _, _, _), errors), DefaultAttributeElement(d2)) => + (pas, errors :+ s"""Cannot supply 'default="$d2"' because it is already supplied as 'default="$d1"'""") - case ((PlaceholderAttributeSet(d, None, f, s), errors), TrueAttributeElement(t)) => (PlaceholderAttributeSet(d, Option(t), f, s), errors) - case ((pas @ PlaceholderAttributeSet(_, Some(t1), _, _), errors), TrueAttributeElement(t2)) => (pas, errors :+ s"""Cannot supply 'true="$t2"' because it is already supplied as 'true="$t1"'""") + case ((PlaceholderAttributeSet(d, None, f, s), errors), TrueAttributeElement(t)) => + (PlaceholderAttributeSet(d, Option(t), f, s), errors) + case ((pas @ PlaceholderAttributeSet(_, Some(t1), _, _), errors), TrueAttributeElement(t2)) => + (pas, errors :+ s"""Cannot supply 'true="$t2"' because it is already supplied as 'true="$t1"'""") - case ((PlaceholderAttributeSet(d, t, None, s), errors), FalseAttributeElement(f)) => (PlaceholderAttributeSet(d, t, Option(f), s), errors) - case ((pas @ PlaceholderAttributeSet(_, _, Some(f1), _), errors), FalseAttributeElement(f2)) => (pas, errors :+ s"""Cannot supply 'false="$f2"' because it is already supplied as 'false="$f1"'""") + case ((PlaceholderAttributeSet(d, t, None, s), errors), FalseAttributeElement(f)) => + (PlaceholderAttributeSet(d, t, Option(f), s), errors) + case ((pas @ PlaceholderAttributeSet(_, _, Some(f1), _), errors), FalseAttributeElement(f2)) => + (pas, errors :+ s"""Cannot supply 'false="$f2"' because it is already supplied as 'false="$f1"'""") - case ((PlaceholderAttributeSet(d, t, f, None), errors), SepAttributeElement(s)) => (PlaceholderAttributeSet(d, t, f, Option(s)), errors) - case ((pas @ PlaceholderAttributeSet(_, _, _, Some(s1)), errors), SepAttributeElement(s2)) => (pas, errors :+ s"""Cannot supply 'sep="$s2"' because it is already supplied as 'sep="$s1"'""") + case ((PlaceholderAttributeSet(d, t, f, None), errors), SepAttributeElement(s)) => + (PlaceholderAttributeSet(d, t, f, Option(s)), errors) + case ((pas @ PlaceholderAttributeSet(_, _, _, Some(s1)), errors), SepAttributeElement(s2)) => + (pas, errors :+ s"""Cannot supply 'sep="$s2"' because it is already supplied as 'sep="$s1"'""") } - } val folded = a.foldLeft((PlaceholderAttributeSet.empty, List.empty[String]))(foldFunction) folded match { case (_, nel) if nel.nonEmpty => Left(NonEmptyList.fromListUnsafe(nel)) - case (PlaceholderAttributeSet(_, Some(t), None, _), _) => s"""Cannot specify 'true="$t"' without also having a 'false="..."' attribute""".invalidNelCheck - case (PlaceholderAttributeSet(_, None, Some(f), _), _) => s"""Cannot specify 'false="$f"' without also having a 'true="..."' attribute""".invalidNelCheck + case (PlaceholderAttributeSet(_, Some(t), None, _), _) => + s"""Cannot specify 'true="$t"' without also having a 'false="..."' attribute""".invalidNelCheck + case (PlaceholderAttributeSet(_, None, Some(f), _), _) => + s"""Cannot specify 'false="$f"' without also having a 'true="..."' attribute""".invalidNelCheck case (valid, _) => valid.validNelCheck } } - private def convertAttributeKvp(a: GenericAst)(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[PlaceholderAttributeElement] = { - (placeholderAttributeConstructor(a), - placeholderAttributeValue(a)) mapN { (constructor, value) => + private def convertAttributeKvp(a: GenericAst)(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] + ): ErrorOr[PlaceholderAttributeElement] = + (placeholderAttributeConstructor(a), placeholderAttributeValue(a)) mapN { (constructor, value) => constructor.apply(value) } - } - private def placeholderAttributeConstructor(kvpAst: GenericAst): ErrorOr[String => PlaceholderAttributeElement] = { + private def placeholderAttributeConstructor(kvpAst: GenericAst): ErrorOr[String => PlaceholderAttributeElement] = kvpAst.getAttributeAs[String]("key").toValidated.flatMap { case "sep" => (SepAttributeElement.apply _).validNel case "default" => (DefaultAttributeElement.apply _).validNel @@ -67,17 +80,18 @@ object AstNodeToPlaceholderAttributeSet { case "false" => (FalseAttributeElement.apply _).validNel case other => s"Invalid placeholder attribute: $other".invalidNel } - } - private def placeholderAttributeValue(kvpAst: GenericAst)(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[String] = { + private def placeholderAttributeValue( + kvpAst: GenericAst + )(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): ErrorOr[String] = kvpAst.getAttributeAs[ExpressionElement]("value").toValidated.flatMap { case StringLiteral(literalValue) => literalValue.validNel - case StringExpression(pieces) if pieces.length == 1 => pieces.head match { - case StringLiteral(literalValue) => literalValue.validNel - case other => s"Cannot use $other as a placeholder attribute. Must be a primitive literal".invalidNel - } + case StringExpression(pieces) if pieces.length == 1 => + pieces.head match { + case StringLiteral(literalValue) => literalValue.validNel + case other => s"Cannot use $other as a placeholder attribute. Must be a primitive literal".invalidNel + } case PrimitiveLiteralExpressionElement(primitive) => primitive.valueString.validNel case other => s"Cannot use $other as a placeholder attribute. Must be a primitive literal".invalidNel } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToStaticString.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToStaticString.scala index fb3a9ba661a..06e8b640a33 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToStaticString.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToStaticString.scala @@ -7,39 +7,45 @@ import wdl.model.draft3.elements.ExpressionElement.{StringEscapeSequence, String import wdl.model.draft3.elements._ object AstNodeToStringPiece { - def astNodeToStringPiece(expressionProcessor: Option[CheckedAtoB[GenericAstNode, ExpressionElement]]): CheckedAtoB[GenericAstNode, StringPiece] = CheckedAtoB.fromCheck("read string piece") { - - case simple: GenericTerminal if simple.getTerminalStr == "string" => StringLiteral(simple.getSourceString).validNelCheck - case escape: GenericTerminal if escape.getTerminalStr == "escape" => StringEscapeSequence.parseEscapeSequence(escape.getSourceString).toEither + def astNodeToStringPiece( + expressionProcessor: Option[CheckedAtoB[GenericAstNode, ExpressionElement]] + ): CheckedAtoB[GenericAstNode, StringPiece] = CheckedAtoB.fromCheck("read string piece") { + + case simple: GenericTerminal if simple.getTerminalStr == "string" => + StringLiteral(simple.getSourceString).validNelCheck + case escape: GenericTerminal if escape.getTerminalStr == "escape" => + StringEscapeSequence.parseEscapeSequence(escape.getSourceString).toEither case expr: GenericAst if expr.getName == "ExpressionPlaceholder" => expressionProcessor match { - case Some(processor) => expr.getAttributeAs[ExpressionElement]("expr")(processor) map StringPlaceholder: Checked[StringPlaceholder] + case Some(processor) => + expr.getAttributeAs[ExpressionElement]("expr")(processor) map StringPlaceholder: Checked[StringPlaceholder] case None => "String placeholders are not allowed in static strings".invalidNelCheck } - - case otherTerminal: GenericTerminal => s"Unexpected parse tree. Expected string piece but found Terminal '${otherTerminal.getTerminalStr}' (${otherTerminal.getSourceString})".invalidNelCheck - case otherAst: GenericAst => s"Unexpected parse tree. Expected string piece but found AST ${otherAst.getName}".invalidNelCheck + case otherTerminal: GenericTerminal => + s"Unexpected parse tree. Expected string piece but found Terminal '${otherTerminal.getTerminalStr}' (${otherTerminal.getSourceString})".invalidNelCheck + case otherAst: GenericAst => + s"Unexpected parse tree. Expected string piece but found AST ${otherAst.getName}".invalidNelCheck } } object AstNodeToStaticString { - def astNodeToStaticStringElement(): CheckedAtoB[GenericAstNode, StaticString] = CheckedAtoB.fromCheck("convert AstNode to StaticString") { - case a: GenericAst if a.getName == "StaticString" => - - implicit val astNodeToStringPiece = AstNodeToStringPiece.astNodeToStringPiece(None) - - if (a.getAttributes.contains("value")) { - a.getAttributeAsVector[StringPiece]("value") map { pieces => - val unescaped = pieces map { - case StringLiteral(s) => s - case e: StringEscapeSequence => e.unescape - case _: StringPlaceholder => "String placeholders are not allowed in static strings".invalidNelCheck + def astNodeToStaticStringElement(): CheckedAtoB[GenericAstNode, StaticString] = + CheckedAtoB.fromCheck("convert AstNode to StaticString") { + case a: GenericAst if a.getName == "StaticString" => + implicit val astNodeToStringPiece = AstNodeToStringPiece.astNodeToStringPiece(None) + + if (a.getAttributes.contains("value")) { + a.getAttributeAsVector[StringPiece]("value") map { pieces => + val unescaped = pieces map { + case StringLiteral(s) => s + case e: StringEscapeSequence => e.unescape + case _: StringPlaceholder => "String placeholders are not allowed in static strings".invalidNelCheck + } + StaticString(unescaped.mkString("")) } - StaticString(unescaped.mkString("")) - } - } else StaticString("").validNelCheck + } else StaticString("").validNelCheck - case other => s"Bad value $other".invalidNelCheck - } + case other => s"Bad value $other".invalidNelCheck + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToTypeElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToTypeElement.scala index 5f440cdb323..286f0dfa4fd 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToTypeElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstNodeToTypeElement.scala @@ -10,40 +10,55 @@ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl.typeElementWriter object AstNodeToTypeElement { - def astNodeToTypeElement(additionalPrimitiveTypes: Map[String, WomPrimitiveType]): CheckedAtoB[GenericAstNode, TypeElement] = + def astNodeToTypeElement( + additionalPrimitiveTypes: Map[String, WomPrimitiveType] + ): CheckedAtoB[GenericAstNode, TypeElement] = CheckedAtoB.fromCheck("convert AstNode to TypeElement") { astNode => - implicit lazy val astNodeToTypeElementInst = astNodeToTypeElement(additionalPrimitiveTypes) lazy val fullTypeMap = typeMap ++ additionalPrimitiveTypes astNode match { - case a: GenericAst if a.getName == "OptionalType" => a.getAttributeAs[TypeElement]("innerType") map OptionalTypeElement - case a: GenericAst if a.getName == "NonEmptyType" => a.getAttributeAs[TypeElement]("innerType") map NonEmptyTypeElement + case a: GenericAst if a.getName == "OptionalType" => + a.getAttributeAs[TypeElement]("innerType") map OptionalTypeElement + case a: GenericAst if a.getName == "NonEmptyType" => + a.getAttributeAs[TypeElement]("innerType") map NonEmptyTypeElement case a: GenericAst if a.getName == "Type" => compoundType(a) - case unknownAst: GenericAst => s"No rule available to create TypeElement from Ast: '${unknownAst.getName}'".invalidNelCheck - case t: GenericTerminal if t.getTerminalStr == "type" && fullTypeMap.contains(t.getSourceString) => PrimitiveTypeElement(fullTypeMap(t.getSourceString)).validNelCheck - case t: GenericTerminal if t.getTerminalStr == "type" && t.getSourceString == "Object" => ObjectTypeElement.validNelCheck + case unknownAst: GenericAst => + s"No rule available to create TypeElement from Ast: '${unknownAst.getName}'".invalidNelCheck + case t: GenericTerminal if t.getTerminalStr == "type" && fullTypeMap.contains(t.getSourceString) => + PrimitiveTypeElement(fullTypeMap(t.getSourceString)).validNelCheck + case t: GenericTerminal if t.getTerminalStr == "type" && t.getSourceString == "Object" => + ObjectTypeElement.validNelCheck case t: GenericTerminal if t.getTerminalStr == "identifier" => TypeAliasElement(t.getSourceString).validNelCheck - case t: GenericTerminal => s"No rule available to create TypeElement from '${t.getTerminalStr}' Terminal with value '${t.getSourceString}'".invalidNelCheck - case _ => s"No rule available to create TypeElement from AstNode: ${astNode.getClass.getSimpleName}".invalidNelCheck + case t: GenericTerminal => + s"No rule available to create TypeElement from '${t.getTerminalStr}' Terminal with value '${t.getSourceString}'".invalidNelCheck + case _ => + s"No rule available to create TypeElement from AstNode: ${astNode.getClass.getSimpleName}".invalidNelCheck } } - private def compoundType(typeAst: GenericAst) - (implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, TypeElement]): Checked[TypeElement] = typeAst.getAttributeAs[String]("name") flatMap { - case "Array" => typeAst.getAttributeAsVector[TypeElement]("subtype") flatMap { - case one if one.size == 1 => ArrayTypeElement(one.head).validNelCheck - case other => s"Arrays must have exactly one type parameter, but got ${other.map(_.toWdlV1).mkString("[", ",", "]")}".invalidNelCheck - } - case "Pair" => typeAst.getAttributeAsVector[TypeElement]("subtype") flatMap { - case two if two.size == 2 => PairTypeElement(two.head, two(1)).validNelCheck - case other => s"Pairs must have exactly two type parameters, but got ${other.map(_.toWdlV1).mkString("[", ",", "]")}".invalidNelCheck - } - case "Map" => typeAst.getAttributeAsVector[TypeElement]("subtype") flatMap { - case two if two.size == 2 => MapTypeElement(two.head, two(1)).validNelCheck - case other => s"Maps must have exactly two type parameters, but got ${other.map(_.toWdlV1).mkString("[", ",", "]")}".invalidNelCheck - } + private def compoundType(typeAst: GenericAst)(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, TypeElement] + ): Checked[TypeElement] = typeAst.getAttributeAs[String]("name") flatMap { + case "Array" => + typeAst.getAttributeAsVector[TypeElement]("subtype") flatMap { + case one if one.size == 1 => ArrayTypeElement(one.head).validNelCheck + case other => + s"Arrays must have exactly one type parameter, but got ${other.map(_.toWdlV1).mkString("[", ",", "]")}".invalidNelCheck + } + case "Pair" => + typeAst.getAttributeAsVector[TypeElement]("subtype") flatMap { + case two if two.size == 2 => PairTypeElement(two.head, two(1)).validNelCheck + case other => + s"Pairs must have exactly two type parameters, but got ${other.map(_.toWdlV1).mkString("[", ",", "]")}".invalidNelCheck + } + case "Map" => + typeAst.getAttributeAsVector[TypeElement]("subtype") flatMap { + case two if two.size == 2 => MapTypeElement(two.head, two(1)).validNelCheck + case other => + s"Maps must have exactly two type parameters, but got ${other.map(_.toWdlV1).mkString("[", ",", "]")}".invalidNelCheck + } case unknown => s"No rule available to create TypeElement from compound type: $unknown".invalidNelCheck } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCallElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCallElement.scala index 99f5e19e63c..31c730c939a 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCallElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCallElement.scala @@ -14,10 +14,11 @@ import wom.SourceFileLocation object AstToCallElement { - def astToCallElement(implicit astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair]): CheckedAtoB[GenericAst, CallElement] = CheckedAtoB.fromErrorOr { ast => - def convertBodyElement(a: GenericAst): Checked[CallBodyElement] = { + def astToCallElement(implicit + astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] + ): CheckedAtoB[GenericAst, CallElement] = CheckedAtoB.fromErrorOr { ast => + def convertBodyElement(a: GenericAst): Checked[CallBodyElement] = a.getAttributeAsVector[KvPair]("inputs") map CallBodyElement - } val callableNameValidation: ErrorOr[String] = astNodeToString(ast.getAttribute("task")).toValidated @@ -26,13 +27,16 @@ object AstToCallElement { case None => None.validNel } - val afterValidation: ErrorOr[Vector[String]] = ast.getAttributeAsVector[String]("after", optional = true).toValidated + val afterValidation: ErrorOr[Vector[String]] = + ast.getAttributeAsVector[String]("after", optional = true).toValidated - implicit val astNodeToCallBodyElement: CheckedAtoB[GenericAstNode, CallBodyElement] = astNodeToAst andThen CheckedAtoB.fromCheck(convertBodyElement _) + implicit val astNodeToCallBodyElement: CheckedAtoB[GenericAstNode, CallBodyElement] = + astNodeToAst andThen CheckedAtoB.fromCheck(convertBodyElement _) - val callBodyValidation: ErrorOr[Option[CallBodyElement]] = ast.getAttributeAsOptional[CallBodyElement]("body").toValidated + val callBodyValidation: ErrorOr[Option[CallBodyElement]] = + ast.getAttributeAsOptional[CallBodyElement]("body").toValidated - val sourceLocation : Option[SourceFileLocation] = ast.getSourceLine.map(SourceFileLocation(_)) + val sourceLocation: Option[SourceFileLocation] = ast.getSourceLine.map(SourceFileLocation(_)) // This 'mapN' is split into two so that if we have a call name we can include it in the error message (callableNameValidation, aliasValidation) flatMapN { (name, alias) => diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCommandSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCommandSectionElement.scala index c371e050043..f1a682e1850 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCommandSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToCommandSectionElement.scala @@ -9,9 +9,9 @@ import wdl.model.draft3.elements.CommandPartElement.StringCommandPartElement import wdl.model.draft3.elements.{CommandPartElement, CommandSectionElement, CommandSectionLine} object AstToCommandSectionElement { - def astToCommandSectionElement(implicit astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] - ): CheckedAtoB[GenericAst, CommandSectionElement] = CheckedAtoB.fromCheck { ast: GenericAst => - + def astToCommandSectionElement(implicit + astNodeToCommandPartElement: CheckedAtoB[GenericAstNode, CommandPartElement] + ): CheckedAtoB[GenericAst, CommandSectionElement] = CheckedAtoB.fromCheck { ast: GenericAst => ast.getAttributeAsVector[CommandPartElement]("parts") flatMap { parts => val lines = makeLines(parts) val trimmed = trimStartAndEndBlankLines(lines) @@ -36,9 +36,7 @@ object AstToCommandSectionElement { for { prefix <- commonPrefix lines <- stripStarts(trimmed, prefix) - } yield { - CommandSectionElement(lines) - } + } yield CommandSectionElement(lines) } } @@ -63,9 +61,8 @@ object AstToCommandSectionElement { finalAccumulator map dropEmpties } - private def trimStartAndEndBlankLines(elements: Vector[CommandSectionLine]): Vector[CommandSectionLine] = { + private def trimStartAndEndBlankLines(elements: Vector[CommandSectionLine]): Vector[CommandSectionLine] = elements.dropWhile(allWhitespace).reverse.dropWhile(allWhitespace).reverse - } private def dropEmpties(line: CommandSectionLine): CommandSectionLine = { def empty(c: CommandPartElement): Boolean = c match { @@ -86,19 +83,20 @@ object AstToCommandSectionElement { lines.map(leadingWhitespaceForLine(_).getOrElse("")) } - private def stripStarts(lines: Vector[CommandSectionLine], prefix: String): Checked[List[CommandSectionLine]] = { + private def stripStarts(lines: Vector[CommandSectionLine], prefix: String): Checked[List[CommandSectionLine]] = if (prefix.isEmpty) lines.toList.validNelCheck else lines.toList traverse { line: CommandSectionLine => line.parts.headOption match { case Some(StringCommandPartElement(str)) if str.startsWith(prefix) => - CommandSectionLine(Vector(StringCommandPartElement(str.stripPrefix(prefix))) ++ line.parts.tail).validNelCheck + CommandSectionLine( + Vector(StringCommandPartElement(str.stripPrefix(prefix))) ++ line.parts.tail + ).validNelCheck case _ => "Failed to strip common whitespace prefix from line.".invalidNelCheck } } - } private def allWhitespace(s: String): Boolean = s.forall(_.isWhitespace) private def allWhitespace(c: CommandPartElement): Boolean = c match { diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToDeclarationContent.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToDeclarationContent.scala index ad91c5370eb..93d489d10c5 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToDeclarationContent.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToDeclarationContent.scala @@ -7,17 +7,17 @@ import cats.syntax.either._ import common.transforms.CheckedAtoB object AstToDeclarationContent { - def astToDeclarationContent(implicit astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement], - astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] - ): CheckedAtoB[GenericAst, DeclarationContent] = CheckedAtoB.fromErrorOr("read declaration") { a => - - + def astToDeclarationContent(implicit + astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement], + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] + ): CheckedAtoB[GenericAst, DeclarationContent] = CheckedAtoB.fromErrorOr("read declaration") { a => val nameValidation: ErrorOr[String] = astNodeToString(a.getAttribute("name")).toValidated val outputTypeValidation: ErrorOr[TypeElement] = a.getAttributeAs[TypeElement]("type").toValidated - val expressionElementValidation: ErrorOr[ExpressionElement] = a.getAttributeAs[ExpressionElement]("expression").toValidated + val expressionElementValidation: ErrorOr[ExpressionElement] = + a.getAttributeAs[ExpressionElement]("expression").toValidated - (nameValidation, outputTypeValidation, expressionElementValidation) mapN { - (name, outputType, expression) => DeclarationContent(outputType, name, expression) + (nameValidation, outputTypeValidation, expressionElementValidation) mapN { (name, outputType, expression) => + DeclarationContent(outputType, name, expression) } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileBodyElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileBodyElement.scala index 8a2b2480025..36338b5fabc 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileBodyElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileBodyElement.scala @@ -7,17 +7,17 @@ import wdl.model.draft3.elements.{FileBodyElement, StructElement, TaskDefinition object AstToFileBodyElement { - def astToFileBodyElement(implicit workflowConverter: CheckedAtoB[GenericAst, WorkflowDefinitionElement], - taskConverter: CheckedAtoB[GenericAst, TaskDefinitionElement], - structConverter: CheckedAtoB[GenericAst, StructElement]): CheckedAtoB[GenericAst, FileBodyElement] = { + def astToFileBodyElement(implicit + workflowConverter: CheckedAtoB[GenericAst, WorkflowDefinitionElement], + taskConverter: CheckedAtoB[GenericAst, TaskDefinitionElement], + structConverter: CheckedAtoB[GenericAst, StructElement] + ): CheckedAtoB[GenericAst, FileBodyElement] = CheckedAtoB.fromCheck(convert(workflowConverter, taskConverter, structConverter)) - } def convert(workflowConverter: CheckedAtoB[GenericAst, WorkflowDefinitionElement], taskConverter: CheckedAtoB[GenericAst, TaskDefinitionElement], - structConverter: CheckedAtoB[GenericAst, StructElement]) - (ast: GenericAst) - : Checked[FileBodyElement] = ast.getName match { + structConverter: CheckedAtoB[GenericAst, StructElement] + )(ast: GenericAst): Checked[FileBodyElement] = ast.getName match { case "Workflow" => workflowConverter.run(ast) case "Task" => taskConverter.run(ast) case "Struct" => structConverter.run(ast) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileElement.scala index 3fcd92c061c..02ed87d0e86 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToFileElement.scala @@ -9,12 +9,14 @@ import wdl.model.draft3.elements._ object AstToFileElement { - def astToFileElement(implicit astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement], - astNodeToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] - ): CheckedAtoB[GenericAst, FileElement] = CheckedAtoB.fromErrorOr { ast => - - val validatedImportElements: ErrorOr[Vector[ImportElement]] = ast.getAttributeAsVector[ImportElement]("imports").toValidated - val validatedFileBodyElements: ErrorOr[Vector[FileBodyElement]] = ast.getAttributeAsVector[FileBodyElement]("body").toValidated + def astToFileElement(implicit + astNodeToImportElement: CheckedAtoB[GenericAstNode, ImportElement], + astNodeToFileBodyElement: CheckedAtoB[GenericAstNode, FileBodyElement] + ): CheckedAtoB[GenericAst, FileElement] = CheckedAtoB.fromErrorOr { ast => + val validatedImportElements: ErrorOr[Vector[ImportElement]] = + ast.getAttributeAsVector[ImportElement]("imports").toValidated + val validatedFileBodyElements: ErrorOr[Vector[FileBodyElement]] = + ast.getAttributeAsVector[FileBodyElement]("body").toValidated (validatedImportElements, validatedFileBodyElements) mapN { (importElements, fileBodyElements) => val workflowElements: Vector[WorkflowDefinitionElement] = fileBodyElements.filterByType[WorkflowDefinitionElement] diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToIfElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToIfElement.scala index 45eaa471e33..37cd043e571 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToIfElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToIfElement.scala @@ -8,12 +8,10 @@ import common.validation.ErrorOr.ErrorOr import wdl.model.draft3.elements._ object AstToIfElement { - def astToIfElement(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement], - astNodeToWorkflowGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] - - ): CheckedAtoB[GenericAst, IfElement] = CheckedAtoB.fromErrorOr("parse if block") { ast => - - + def astToIfElement(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement], + astNodeToWorkflowGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] + ): CheckedAtoB[GenericAst, IfElement] = CheckedAtoB.fromErrorOr("parse if block") { ast => val conditionCollectionExpressionValidation: ErrorOr[ExpressionElement] = ast.getAttributeAs[ExpressionElement]("expression").toValidated.contextualizeErrors("parse if (...) condition") val bodyValidation: ErrorOr[Vector[WorkflowGraphElement]] = diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToImportElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToImportElement.scala index ce0042461ed..b3775ed67f9 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToImportElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToImportElement.scala @@ -9,15 +9,19 @@ import wdl.model.draft3.elements.{ImportElement, LanguageElement, StaticString} object AstToImportElement { - def astToImportElement(implicit astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString]): CheckedAtoB[GenericAst, ImportElement] = + def astToImportElement(implicit + astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] + ): CheckedAtoB[GenericAst, ImportElement] = CheckedAtoB.fromErrorOr("convert Ast to ImportElement") { a => - val importPath: ErrorOr[String] = a.getAttributeAs[StaticString]("uri").map(_.value).toValidated val alias: ErrorOr[Option[String]] = a.getAttributeAsOptional[String]("namespace").toValidated - val aliasElementMaker: CheckedAtoB[GenericAstNode, ImportStructRenameElement] = astNodeToAst andThen CheckedAtoB.fromErrorOr(convertAliasElement _) - val structRenames: ErrorOr[Vector[ImportStructRenameElement]] = a.getAttributeAsVector[ImportStructRenameElement]("aliases")(aliasElementMaker).toValidated - val structRenameMap: ErrorOr[Map[String, String]] = structRenames.map(_.map(rename => rename.oldName -> rename.newName).toMap) + val aliasElementMaker: CheckedAtoB[GenericAstNode, ImportStructRenameElement] = + astNodeToAst andThen CheckedAtoB.fromErrorOr(convertAliasElement _) + val structRenames: ErrorOr[Vector[ImportStructRenameElement]] = + a.getAttributeAsVector[ImportStructRenameElement]("aliases")(aliasElementMaker).toValidated + val structRenameMap: ErrorOr[Map[String, String]] = + structRenames.map(_.map(rename => rename.oldName -> rename.newName).toMap) (importPath, alias, structRenameMap) mapN ImportElement } @@ -29,5 +33,5 @@ object AstToImportElement { (oldName, newName) mapN ImportStructRenameElement } - private final case class ImportStructRenameElement(oldName: String, newName: String) extends LanguageElement + final private case class ImportStructRenameElement(oldName: String, newName: String) extends LanguageElement } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputDeclarationElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputDeclarationElement.scala index b9a54a4b250..193e4aa5a1e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputDeclarationElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputDeclarationElement.scala @@ -9,8 +9,10 @@ import wdl.model.draft3.elements.{ExpressionElement, InputDeclarationElement, Ty object AstToInputDeclarationElement { - def astToInputDeclarationElement(implicit astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement], - astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement]): CheckedAtoB[GenericAst, InputDeclarationElement] = CheckedAtoB.fromErrorOr { a: GenericAst => + def astToInputDeclarationElement(implicit + astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement], + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement] + ): CheckedAtoB[GenericAst, InputDeclarationElement] = CheckedAtoB.fromErrorOr { a: GenericAst => val nameValidation: ErrorOr[String] = astNodeToString(a.getAttribute("name")).toValidated val inputTypeValidation: ErrorOr[TypeElement] = astNodeToTypeElement(a.getAttribute("type")).toValidated val expressionValidation: ErrorOr[Option[ExpressionElement]] = Option(a.getAttribute("expression")) match { @@ -18,9 +20,8 @@ object AstToInputDeclarationElement { case None => None.validNel } - (nameValidation, inputTypeValidation, expressionValidation) mapN { - (name, inputType, expression) => - InputDeclarationElement(inputType, name, expression) + (nameValidation, inputTypeValidation, expressionValidation) mapN { (name, inputType, expression) => + InputDeclarationElement(inputType, name, expression) } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputsSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputsSectionElement.scala index fd0774be89d..04f7347a032 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputsSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToInputsSectionElement.scala @@ -5,9 +5,9 @@ import wdl.model.draft3.elements.{InputDeclarationElement, InputsSectionElement} object AstToInputsSectionElement { - def astToInputsSectionElement(implicit astNodeToInputDeclaration: CheckedAtoB[GenericAstNode, InputDeclarationElement] - ): CheckedAtoB[GenericAst, InputsSectionElement] = CheckedAtoB.fromCheck("convert Ast to Inputs Section") { a => - + def astToInputsSectionElement(implicit + astNodeToInputDeclaration: CheckedAtoB[GenericAstNode, InputDeclarationElement] + ): CheckedAtoB[GenericAst, InputsSectionElement] = CheckedAtoB.fromCheck("convert Ast to Inputs Section") { a => a.getAttributeAsVector[InputDeclarationElement]("inputs") map { declarations => InputsSectionElement(declarations) } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToMetaSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToMetaSectionElement.scala index 5cda9ade77a..d889ee57b13 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToMetaSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToMetaSectionElement.scala @@ -5,9 +5,9 @@ import wdl.model.draft3.elements.MetaSectionElement import wom.callable.MetaKvPair object AstToMetaSectionElement { - def astToMetaSectionElement(implicit astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] - ): CheckedAtoB[GenericAst, MetaSectionElement] = CheckedAtoB.fromCheck("convert AST to Meta Section") { ast => - + def astToMetaSectionElement(implicit + astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] + ): CheckedAtoB[GenericAst, MetaSectionElement] = CheckedAtoB.fromCheck("convert AST to Meta Section") { ast => ast.getAttributeAsVector[MetaKvPair]("map") map { attributes => val asMap = attributes.map(kv => kv.key -> kv.value).toMap MetaSectionElement(asMap) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToOutputsSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToOutputsSectionElement.scala index fcac78ec9b1..70f1eb7a605 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToOutputsSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToOutputsSectionElement.scala @@ -4,9 +4,11 @@ import common.transforms.CheckedAtoB import wdl.model.draft3.elements.{DeclarationContent, OutputDeclarationElement, OutputsSectionElement} object AstToOutputsSectionElement { - def astToOutputSectionElement(implicit astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, DeclarationContent] - ): CheckedAtoB[GenericAst, OutputsSectionElement] = CheckedAtoB.fromCheck("read outputs section") { a => - - a.getAttributeAsVector[DeclarationContent]("outputs").map(_.map(OutputDeclarationElement.fromContent)).map(OutputsSectionElement) + def astToOutputSectionElement(implicit + astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, DeclarationContent] + ): CheckedAtoB[GenericAst, OutputsSectionElement] = CheckedAtoB.fromCheck("read outputs section") { a => + a.getAttributeAsVector[DeclarationContent]("outputs") + .map(_.map(OutputDeclarationElement.fromContent)) + .map(OutputsSectionElement) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToParameterMetaSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToParameterMetaSectionElement.scala index c87a4ef4579..6baa864bcb8 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToParameterMetaSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToParameterMetaSectionElement.scala @@ -5,12 +5,13 @@ import wdl.model.draft3.elements.ParameterMetaSectionElement import wom.callable.MetaKvPair object AstToParameterMetaSectionElement { - def astToParameterMetaSectionElement(implicit astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] - ): CheckedAtoB[GenericAst, ParameterMetaSectionElement] = CheckedAtoB.fromCheck("convert AST to parameter_meta section") { ast => - - ast.getAttributeAsVector[MetaKvPair]("map") map { attributes => - val asMap = attributes.map(kv => kv.key -> kv.value).toMap - ParameterMetaSectionElement(asMap) + def astToParameterMetaSectionElement(implicit + astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] + ): CheckedAtoB[GenericAst, ParameterMetaSectionElement] = + CheckedAtoB.fromCheck("convert AST to parameter_meta section") { ast => + ast.getAttributeAsVector[MetaKvPair]("map") map { attributes => + val asMap = attributes.map(kv => kv.key -> kv.value).toMap + ParameterMetaSectionElement(asMap) + } } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToRuntimeAttributesSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToRuntimeAttributesSectionElement.scala index 1e06f35e225..86dc9e019bd 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToRuntimeAttributesSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToRuntimeAttributesSectionElement.scala @@ -5,11 +5,12 @@ import wdl.model.draft3.elements.ExpressionElement.KvPair import wdl.model.draft3.elements.RuntimeAttributesSectionElement object AstToRuntimeAttributesSectionElement { - def astToRuntimeSectionElement(implicit astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] - ): CheckedAtoB[GenericAst, RuntimeAttributesSectionElement] = CheckedAtoB.fromCheck("convert AST to runtime section") { ast => - - ast.getAttributeAsVector[KvPair]("map") map { attributes => - RuntimeAttributesSectionElement(attributes) + def astToRuntimeSectionElement(implicit + astNodeToKvPair: CheckedAtoB[GenericAstNode, KvPair] + ): CheckedAtoB[GenericAst, RuntimeAttributesSectionElement] = + CheckedAtoB.fromCheck("convert AST to runtime section") { ast => + ast.getAttributeAsVector[KvPair]("map") map { attributes => + RuntimeAttributesSectionElement(attributes) + } } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToScatterElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToScatterElement.scala index cee211f86dc..7d265cd7e5e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToScatterElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToScatterElement.scala @@ -8,19 +8,22 @@ import wdl.model.draft3.elements._ import wom.SourceFileLocation object AstToScatterElement { - def astToScatterElement(implicit astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement], - astNodeToWorkflowGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] - ): CheckedAtoB[GenericAst, ScatterElement] = CheckedAtoB.fromErrorOr("process scatter block") { ast => - + def astToScatterElement(implicit + astNodeToExpressionElement: CheckedAtoB[GenericAstNode, ExpressionElement], + astNodeToWorkflowGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] + ): CheckedAtoB[GenericAst, ScatterElement] = CheckedAtoB.fromErrorOr("process scatter block") { ast => val scatterVariableValidation: ErrorOr[GenericTerminal] = ast.getAttributeAs[GenericTerminal]("item").toValidated - val scatterCollectionExpressionValidation: ErrorOr[ExpressionElement] = ast.getAttributeAs[ExpressionElement]("collection").toValidated - val bodyValidation: ErrorOr[Vector[WorkflowGraphElement]] = ast.getAttributeAsVector[WorkflowGraphElement]("body").toValidated - val sourceLocation : Option[SourceFileLocation] = ast.getSourceLine.map(SourceFileLocation(_)) + val scatterCollectionExpressionValidation: ErrorOr[ExpressionElement] = + ast.getAttributeAs[ExpressionElement]("collection").toValidated + val bodyValidation: ErrorOr[Vector[WorkflowGraphElement]] = + ast.getAttributeAsVector[WorkflowGraphElement]("body").toValidated + val sourceLocation: Option[SourceFileLocation] = ast.getSourceLine.map(SourceFileLocation(_)) - (scatterVariableValidation, scatterCollectionExpressionValidation, bodyValidation) mapN { (variable, collection, body) => - val scatterName = s"ScatterAt${variable.getLine}_${variable.getColumn}" - ScatterElement(scatterName, collection, variable.getSourceString, body, sourceLocation) + (scatterVariableValidation, scatterCollectionExpressionValidation, bodyValidation) mapN { + (variable, collection, body) => + val scatterName = s"ScatterAt${variable.getLine}_${variable.getColumn}" + ScatterElement(scatterName, collection, variable.getSourceString, body, sourceLocation) } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToStructElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToStructElement.scala index 4854c2e7bcf..99c56307aae 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToStructElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToStructElement.scala @@ -8,9 +8,9 @@ import common.validation.ErrorOr.ErrorOr import wdl.model.draft3.elements.{StructElement, StructEntryElement, TypeElement} object AstToStructElement { - def astToStructElement(implicit astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] - ): CheckedAtoB[GenericAst, StructElement] = CheckedAtoB.fromErrorOr("convert AST to struct definition") { a => - + def astToStructElement(implicit + astNodeToTypeElement: CheckedAtoB[GenericAstNode, TypeElement] + ): CheckedAtoB[GenericAst, StructElement] = CheckedAtoB.fromErrorOr("convert AST to struct definition") { a => def convertAstToStructEntry(a: GenericAst): ErrorOr[StructEntryElement] = { val name: ErrorOr[String] = a.getAttributeAs[String]("name").toValidated val typeElement: ErrorOr[TypeElement] = a.getAttributeAs[TypeElement]("type").toValidated @@ -18,12 +18,13 @@ object AstToStructElement { (name, typeElement).mapN(StructEntryElement.apply) } - implicit val astNodeToStructEntry: CheckedAtoB[GenericAstNode, StructEntryElement] = astNodeToAst andThen CheckedAtoB.fromErrorOr("convert AST to struct entry")(convertAstToStructEntry) + implicit val astNodeToStructEntry: CheckedAtoB[GenericAstNode, StructEntryElement] = + astNodeToAst andThen CheckedAtoB.fromErrorOr("convert AST to struct entry")(convertAstToStructEntry) val nameValidation: ErrorOr[String] = a.getAttributeAs[String]("name").toValidated - val entriesValidation: ErrorOr[Vector[StructEntryElement]] = a.getAttributeAsVector[StructEntryElement]("entries").toValidated + val entriesValidation: ErrorOr[Vector[StructEntryElement]] = + a.getAttributeAsVector[StructEntryElement]("entries").toValidated (nameValidation, entriesValidation) mapN { (name, entries) => StructElement(name, entries) } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskDefinitionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskDefinitionElement.scala index 2916186c0ef..fdda85254b9 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskDefinitionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskDefinitionElement.scala @@ -11,14 +11,15 @@ import wom.SourceFileLocation object AstToTaskDefinitionElement { - def astToTaskDefinitionElement(implicit astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] - ): CheckedAtoB[GenericAst, TaskDefinitionElement] = CheckedAtoB.fromErrorOr - { a: GenericAst => s"read task definition${a.lineAndColumnString}" } - { a => - + def astToTaskDefinitionElement(implicit + astNodeToTaskSectionElement: CheckedAtoB[GenericAstNode, TaskSectionElement] + ): CheckedAtoB[GenericAst, TaskDefinitionElement] = CheckedAtoB.fromErrorOr { a: GenericAst => + s"read task definition${a.lineAndColumnString}" + } { a => val nameElementValidation: ErrorOr[String] = astNodeToString(a.getAttribute("name")).toValidated - val sectionsValidation: ErrorOr[Vector[TaskSectionElement]] = a.getAttributeAsVector[TaskSectionElement]("sections").toValidated - val sourceLocation : Option[SourceFileLocation] = a.getSourceLine.map(SourceFileLocation(_)) + val sectionsValidation: ErrorOr[Vector[TaskSectionElement]] = + a.getAttributeAsVector[TaskSectionElement]("sections").toValidated + val sourceLocation: Option[SourceFileLocation] = a.getSourceLine.map(SourceFileLocation(_)) (nameElementValidation, sectionsValidation) flatMapN { (nameElement, sections) => combineElements(nameElement, sections, sourceLocation) @@ -27,19 +28,41 @@ object AstToTaskDefinitionElement { def combineElements(nameElement: String, bodyElements: Vector[TaskSectionElement], - sourceLocation: Option[SourceFileLocation]) = { - val inputsSectionElement: ErrorOr[Option[InputsSectionElement]] = validateOneMax(bodyElements.filterByType[InputsSectionElement], "inputs") - val declarations: Vector[IntermediateValueDeclarationElement] = bodyElements.filterByType[IntermediateValueDeclarationElement] - val outputsSectionElement: ErrorOr[Option[OutputsSectionElement]] = validateOneMax(bodyElements.filterByType[OutputsSectionElement], "outputs") - val commandSectionElement: ErrorOr[CommandSectionElement] = validateExists(bodyElements.filterByType[CommandSectionElement], "command") - val runtimeSectionElement: ErrorOr[Option[RuntimeAttributesSectionElement]] = validateOneMax(bodyElements.filterByType[RuntimeAttributesSectionElement], "runtime") - - val metaSectionElement: ErrorOr[Option[MetaSectionElement]] = validateOneMax(bodyElements.filterByType[MetaSectionElement], "meta") - val parameterMetaSectionElement: ErrorOr[Option[ParameterMetaSectionElement]] = validateOneMax(bodyElements.filterByType[ParameterMetaSectionElement], "parameterMeta") - - (inputsSectionElement, outputsSectionElement, commandSectionElement, runtimeSectionElement, metaSectionElement, parameterMetaSectionElement) mapN { - (inputs, outputs, command, runtime, meta, parameterMeta) => - TaskDefinitionElement(nameElement, inputs, declarations, outputs, command, runtime, meta, parameterMeta, sourceLocation) + sourceLocation: Option[SourceFileLocation] + ) = { + val inputsSectionElement: ErrorOr[Option[InputsSectionElement]] = + validateOneMax(bodyElements.filterByType[InputsSectionElement], "inputs") + val declarations: Vector[IntermediateValueDeclarationElement] = + bodyElements.filterByType[IntermediateValueDeclarationElement] + val outputsSectionElement: ErrorOr[Option[OutputsSectionElement]] = + validateOneMax(bodyElements.filterByType[OutputsSectionElement], "outputs") + val commandSectionElement: ErrorOr[CommandSectionElement] = + validateExists(bodyElements.filterByType[CommandSectionElement], "command") + val runtimeSectionElement: ErrorOr[Option[RuntimeAttributesSectionElement]] = + validateOneMax(bodyElements.filterByType[RuntimeAttributesSectionElement], "runtime") + + val metaSectionElement: ErrorOr[Option[MetaSectionElement]] = + validateOneMax(bodyElements.filterByType[MetaSectionElement], "meta") + val parameterMetaSectionElement: ErrorOr[Option[ParameterMetaSectionElement]] = + validateOneMax(bodyElements.filterByType[ParameterMetaSectionElement], "parameterMeta") + + (inputsSectionElement, + outputsSectionElement, + commandSectionElement, + runtimeSectionElement, + metaSectionElement, + parameterMetaSectionElement + ) mapN { (inputs, outputs, command, runtime, meta, parameterMeta) => + TaskDefinitionElement(nameElement, + inputs, + declarations, + outputs, + command, + runtime, + meta, + parameterMeta, + sourceLocation + ) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskSectionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskSectionElement.scala index f5c724d242d..117a4984b69 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskSectionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToTaskSectionElement.scala @@ -5,14 +5,15 @@ import common.validation.Checked._ import wdl.model.draft3.elements._ object AstToTaskSectionElement { - def astToTaskSectionElement(implicit astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement], - astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement], - astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent], - astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement], - astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement], - astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement], - astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] - ): CheckedAtoB[GenericAst, TaskSectionElement] = CheckedAtoB.fromCheck { a: GenericAst => + def astToTaskSectionElement(implicit + astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement], + astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement], + astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent], + astNodeToRuntimeAttributesSectionElement: CheckedAtoB[GenericAstNode, RuntimeAttributesSectionElement], + astNodeToCommandSectionElement: CheckedAtoB[GenericAstNode, CommandSectionElement], + astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement], + astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement] + ): CheckedAtoB[GenericAst, TaskSectionElement] = CheckedAtoB.fromCheck { a: GenericAst => a.getName match { case "Inputs" => astNodeToInputsSectionElement.run(a) case "Outputs" => astNodeToOutputsSectionElement(a) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowBodyElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowBodyElement.scala index 71fd9f78c16..1c213153994 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowBodyElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowBodyElement.scala @@ -5,17 +5,20 @@ import common.validation.Checked._ import wdl.model.draft3.elements._ object AstToWorkflowBodyElement { - def astToWorkflowBodyElement(implicit astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement], - astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement], - astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement], - astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement], - astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] - ): CheckedAtoB[GenericAst, WorkflowBodyElement] = CheckedAtoB.fromCheck { ast: GenericAst => ast.getName match { - case "Inputs" => astNodeToInputsSectionElement(ast) - case "Outputs" => astNodeToOutputsSectionElement(ast) - case "Meta" => astNodeToMetaSectionElement(ast) - case "ParameterMeta" => astNodeToParameterMetaSectionElement(ast) - case "Declaration" | "Call" | "Scatter" | "If" => astNodeToGraphElement(ast) - case other => s"No conversion defined for Ast with name $other to WorkflowBodyElement".invalidNelCheck - }} + def astToWorkflowBodyElement(implicit + astNodeToInputsSectionElement: CheckedAtoB[GenericAstNode, InputsSectionElement], + astNodeToOutputsSectionElement: CheckedAtoB[GenericAstNode, OutputsSectionElement], + astNodeToMetaSectionElement: CheckedAtoB[GenericAstNode, MetaSectionElement], + astNodeToParameterMetaSectionElement: CheckedAtoB[GenericAstNode, ParameterMetaSectionElement], + astNodeToGraphElement: CheckedAtoB[GenericAstNode, WorkflowGraphElement] + ): CheckedAtoB[GenericAst, WorkflowBodyElement] = CheckedAtoB.fromCheck { ast: GenericAst => + ast.getName match { + case "Inputs" => astNodeToInputsSectionElement(ast) + case "Outputs" => astNodeToOutputsSectionElement(ast) + case "Meta" => astNodeToMetaSectionElement(ast) + case "ParameterMeta" => astNodeToParameterMetaSectionElement(ast) + case "Declaration" | "Call" | "Scatter" | "If" => astNodeToGraphElement(ast) + case other => s"No conversion defined for Ast with name $other to WorkflowBodyElement".invalidNelCheck + } + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala index 6b8e7d08dd0..b7128ed7c9e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala @@ -12,12 +12,14 @@ import wdl.model.draft3.elements.ExpressionElement._ object AstToWorkflowDefinitionElement { - def astToWorkflowDefinitionElement(implicit astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] - ): CheckedAtoB[GenericAst, WorkflowDefinitionElement] = CheckedAtoB.fromErrorOr { a: GenericAst => + def astToWorkflowDefinitionElement(implicit + astNodeToWorkflowBodyElement: CheckedAtoB[GenericAstNode, WorkflowBodyElement] + ): CheckedAtoB[GenericAst, WorkflowDefinitionElement] = CheckedAtoB.fromErrorOr { a: GenericAst => val nameElementValidation: ErrorOr[String] = astNodeToString(a.getAttribute("name")).toValidated - val sourceLocation : Option[SourceFileLocation] = a.getSourceLine.map(SourceFileLocation(_)) - val bodyElementsValidation: ErrorOr[Vector[WorkflowBodyElement]] = a.getAttributeAsVector[WorkflowBodyElement]("body")(astNodeToWorkflowBodyElement).toValidated + val sourceLocation: Option[SourceFileLocation] = a.getSourceLine.map(SourceFileLocation(_)) + val bodyElementsValidation: ErrorOr[Vector[WorkflowBodyElement]] = + a.getAttributeAsVector[WorkflowBodyElement]("body")(astNodeToWorkflowBodyElement).toValidated (nameElementValidation, bodyElementsValidation) flatMapN { (name, bodyElements) => combineElements(name, sourceLocation, bodyElements) } @@ -25,35 +27,65 @@ object AstToWorkflowDefinitionElement { private def combineElements(name: String, sourceLocation: Option[SourceFileLocation], - bodyElements: Vector[WorkflowBodyElement]) = { + bodyElements: Vector[WorkflowBodyElement] + ) = { val inputsSectionValidation: ErrorOr[Option[InputsSectionElement]] = for { - inputValidateElement <- validateSize(bodyElements.filterByType[InputsSectionElement], "inputs", 1): ErrorOr[Option[InputsSectionElement]] + inputValidateElement <- validateSize(bodyElements.filterByType[InputsSectionElement], "inputs", 1): ErrorOr[ + Option[InputsSectionElement] + ] _ <- checkDisallowedInputElement(inputValidateElement, StdoutElement, "stdout") _ <- checkDisallowedInputElement(inputValidateElement, StderrElement, "stderr") } yield inputValidateElement - val intermediateValueDeclarationStdoutCheck = checkDisallowedIntermediates(bodyElements.filterByType[IntermediateValueDeclarationElement], StdoutElement, "stdout") - val intermediateValueDeclarationStderrCheck = checkDisallowedIntermediates(bodyElements.filterByType[IntermediateValueDeclarationElement], StderrElement, "stderr") + val intermediateValueDeclarationStdoutCheck = checkDisallowedIntermediates( + bodyElements.filterByType[IntermediateValueDeclarationElement], + StdoutElement, + "stdout" + ) + val intermediateValueDeclarationStderrCheck = checkDisallowedIntermediates( + bodyElements.filterByType[IntermediateValueDeclarationElement], + StderrElement, + "stderr" + ) val outputsSectionValidation: ErrorOr[Option[OutputsSectionElement]] = for { - outputValidateElement <- validateSize(bodyElements.filterByType[OutputsSectionElement], "outputs", 1): ErrorOr[Option[OutputsSectionElement]] + outputValidateElement <- validateSize(bodyElements.filterByType[OutputsSectionElement], "outputs", 1): ErrorOr[ + Option[OutputsSectionElement] + ] _ <- checkDisallowedOutputElement(outputValidateElement, StdoutElement, "stdout") _ <- checkDisallowedOutputElement(outputValidateElement, StderrElement, "stderr") } yield outputValidateElement val graphSections: Vector[WorkflowGraphElement] = bodyElements.filterByType[WorkflowGraphElement] - val metaSectionValidation: ErrorOr[Option[MetaSectionElement]] = validateSize(bodyElements.filterByType[MetaSectionElement], "meta", 1) - val parameterMetaSectionValidation: ErrorOr[Option[ParameterMetaSectionElement]] = validateSize(bodyElements.filterByType[ParameterMetaSectionElement], "parameterMeta", 1) - - (inputsSectionValidation, outputsSectionValidation, metaSectionValidation, parameterMetaSectionValidation, intermediateValueDeclarationStdoutCheck, intermediateValueDeclarationStderrCheck) mapN { - (validInputs, validOutputs, meta, parameterMeta, _, _) => - WorkflowDefinitionElement(name, validInputs, graphSections.toSet, validOutputs, meta, parameterMeta, sourceLocation) + val metaSectionValidation: ErrorOr[Option[MetaSectionElement]] = + validateSize(bodyElements.filterByType[MetaSectionElement], "meta", 1) + val parameterMetaSectionValidation: ErrorOr[Option[ParameterMetaSectionElement]] = + validateSize(bodyElements.filterByType[ParameterMetaSectionElement], "parameterMeta", 1) + + (inputsSectionValidation, + outputsSectionValidation, + metaSectionValidation, + parameterMetaSectionValidation, + intermediateValueDeclarationStdoutCheck, + intermediateValueDeclarationStderrCheck + ) mapN { (validInputs, validOutputs, meta, parameterMeta, _, _) => + WorkflowDefinitionElement(name, + validInputs, + graphSections.toSet, + validOutputs, + meta, + parameterMeta, + sourceLocation + ) } } - def checkDisallowedInputElement(inputSection: Option[InputsSectionElement], expressionType: FunctionCallElement, expressionName: String): ErrorOr[Unit] = { + def checkDisallowedInputElement(inputSection: Option[InputsSectionElement], + expressionType: FunctionCallElement, + expressionName: String + ): ErrorOr[Unit] = inputSection match { case Some(section) => if (section.inputDeclarations.flatMap(_.expression).exists(_.isInstanceOf[expressionType.type])) { @@ -61,9 +93,11 @@ object AstToWorkflowDefinitionElement { } else ().validNel case None => ().validNel } - } - def checkDisallowedOutputElement(outputSection: Option[OutputsSectionElement], expressionType: FunctionCallElement, expressionName: String): ErrorOr[Unit] = { + def checkDisallowedOutputElement(outputSection: Option[OutputsSectionElement], + expressionType: FunctionCallElement, + expressionName: String + ): ErrorOr[Unit] = outputSection match { case Some(section) => if (section.outputs.map(_.expression).exists(_.isInstanceOf[expressionType.type])) { @@ -71,13 +105,14 @@ object AstToWorkflowDefinitionElement { } else ().validNel case None => ().validNel } - } - def checkDisallowedIntermediates(intermediate: Vector[IntermediateValueDeclarationElement], expressionType: FunctionCallElement, expressionName: String): ErrorOr[Unit] = { + def checkDisallowedIntermediates(intermediate: Vector[IntermediateValueDeclarationElement], + expressionType: FunctionCallElement, + expressionName: String + ): ErrorOr[Unit] = if (intermediate.map(_.expression).exists(_.isInstanceOf[expressionType.type])) { s"Workflow cannot have $expressionName expression at intermediate declaration section at workflow-level.".invalidNel } else ().validNel - } private def validateSize[A](elements: Vector[A], sectionName: String, numExpected: Int): ErrorOr[Option[A]] = { val sectionValidation: ErrorOr[Option[A]] = if (elements.size > numExpected) { @@ -88,4 +123,4 @@ object AstToWorkflowDefinitionElement { sectionValidation } -} \ No newline at end of file +} diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowGraphNodeElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowGraphNodeElement.scala index 863a72d0e96..e3bf25d3956 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowGraphNodeElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowGraphNodeElement.scala @@ -7,17 +7,20 @@ import common.validation.Checked._ import wdl.model.draft3.elements._ object AstToWorkflowGraphNodeElementConverterMaker { - private def astToWorkflowGraphNodeElement(astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent], - astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement], - astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement], - astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] - ): CheckedAtoB[GenericAst, WorkflowGraphElement] = CheckedAtoB.fromCheck { a: GenericAst => a.getName match { - case "Declaration" => astNodeToDeclarationContent(a).map(IntermediateValueDeclarationElement.fromContent) - case "Call" => astNodeToCallElement(a) - case "Scatter" => astNodeToScatterElement(a) - case "If" => astNodeToIfElement(a) - case other => s"No conversion defined for Ast with name $other to WorkflowGraphElement".invalidNelCheck - }} + private def astToWorkflowGraphNodeElement( + astNodeToDeclarationContent: CheckedAtoB[GenericAstNode, DeclarationContent], + astNodeToCallElement: CheckedAtoB[GenericAstNode, CallElement], + astNodeToScatterElement: CheckedAtoB[GenericAstNode, ScatterElement], + astNodeToIfElement: CheckedAtoB[GenericAstNode, IfElement] + ): CheckedAtoB[GenericAst, WorkflowGraphElement] = CheckedAtoB.fromCheck { a: GenericAst => + a.getName match { + case "Declaration" => astNodeToDeclarationContent(a).map(IntermediateValueDeclarationElement.fromContent) + case "Call" => astNodeToCallElement(a) + case "Scatter" => astNodeToScatterElement(a) + case "If" => astNodeToIfElement(a) + case other => s"No conversion defined for Ast with name $other to WorkflowGraphElement".invalidNelCheck + } + } } class AstToWorkflowGraphNodeElementConverterMaker() { @@ -28,9 +31,10 @@ class AstToWorkflowGraphNodeElementConverterMaker() { val uninitializedMessage = NonEmptyList.fromListUnsafe(List("")) - implicit val astNodeToDeclarationContentFixed: CheckedAtoB[GenericAstNode, DeclarationContent] = CheckedAtoB.fromCheck { a => - Either.fromOption(astNodeToDeclarationContent, uninitializedMessage) flatMap { c => c.run(a) } - } + implicit val astNodeToDeclarationContentFixed: CheckedAtoB[GenericAstNode, DeclarationContent] = + CheckedAtoB.fromCheck { a => + Either.fromOption(astNodeToDeclarationContent, uninitializedMessage) flatMap { c => c.run(a) } + } implicit val astNodeToCallElementFixed: CheckedAtoB[GenericAstNode, CallElement] = CheckedAtoB.fromCheck { a => Either.fromOption(astNodeToCallElement, uninitializedMessage) flatMap { c => c.run(a) } @@ -48,5 +52,6 @@ class AstToWorkflowGraphNodeElementConverterMaker() { astNodeToDeclarationContentFixed, astNodeToCallElementFixed, astNodeToScatterElementFixed, - astNodeToIfElementFixed) + astNodeToIfElementFixed + ) } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/GenericAst.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/GenericAst.scala index 903371b59cd..190c6118feb 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/GenericAst.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/GenericAst.scala @@ -23,7 +23,8 @@ trait GenericAstNode { def firstTerminal: Option[GenericTerminal] = { def foldFunction(acc: Option[GenericTerminal], next: GenericTerminal): Option[GenericTerminal] = acc match { - case Some(t) if t.getLine > next.getLine || (t.getLine == next.getLine && t.getColumn > next.getColumn) => Some(next) + case Some(t) if t.getLine > next.getLine || (t.getLine == next.getLine && t.getColumn > next.getColumn) => + Some(next) case None => Some(next) case _ => acc } @@ -36,11 +37,10 @@ trait GenericAstNode { // all the information needed for downstream analysis phases. However, it turns out that // getting accurate information out of Hermes is not that simple. For now, we just // get the initial source line, which is -more or less- accurate. - def getSourceLine: Option[Int] = { - firstTerminal map {t => t.getLine } - } + def getSourceLine: Option[Int] = + firstTerminal map { t => t.getLine } - def lineAndColumnString = firstTerminal map { t => s" at line ${t.getLine} column ${t.getColumn}"} getOrElse("") + def lineAndColumnString = firstTerminal map { t => s" at line ${t.getLine} column ${t.getColumn}" } getOrElse "" } trait GenericAst extends GenericAstNode { @@ -48,13 +48,12 @@ trait GenericAst extends GenericAstNode { def getAttributes: Map[String, GenericAstNode] def getName: String - private def getAttributeAsAstNodeVector(attr: String, optional: Boolean): Checked[Vector[GenericAstNode]] = { + private def getAttributeAsAstNodeVector(attr: String, optional: Boolean): Checked[Vector[GenericAstNode]] = Option(getAttribute(attr)) match { case Some(attributeNode) => attributeNode.astListAsVector case None if optional => Vector.empty.validNelCheck case None => s"No expected attribute '$attr' found".invalidNelCheck } - } /** * Will get an attribute on this Ast as an AstNode and then convert that into a single element of @@ -71,7 +70,9 @@ trait GenericAst extends GenericAstNode { * Will get an attribute on this Ast as an AstList and then convert that into a vector of Ast * @param attr The attribute to read from this Ast */ - def getAttributeAsVector[A](attr: String, optional: Boolean = false)(implicit toA: CheckedAtoB[GenericAstNode, A]): Checked[Vector[A]] = { + def getAttributeAsVector[A](attr: String, optional: Boolean = false)(implicit + toA: CheckedAtoB[GenericAstNode, A] + ): Checked[Vector[A]] = for { asVector <- getAttributeAsAstNodeVector(attr, optional) // This toValidated/toEither dance is necessary to @@ -79,13 +80,14 @@ trait GenericAst extends GenericAstNode { // (2) convert back into a Checked for the flatMap result <- asVector.traverse(item => toA.run(item).toValidated).toEither } yield result - } /** * Will get an attribute on this Ast as an AstList and then convert that into a vector of Ast * @param attr The attribute to read from this Ast */ - def getAttributeAsVectorF[A](attr: String, optional: Boolean = false)(toA: GenericAstNode => Checked[A]): Checked[Vector[A]] = { + def getAttributeAsVectorF[A](attr: String, optional: Boolean = false)( + toA: GenericAstNode => Checked[A] + ): Checked[Vector[A]] = for { asVector <- getAttributeAsAstNodeVector(attr, optional) // This toValidated/toEither dance is necessary to @@ -93,17 +95,15 @@ trait GenericAst extends GenericAstNode { // (2) convert back into a Checked for the flatMap result <- asVector.traverse(item => toA(item).toValidated).toEither } yield result - } /** * Gets an attribute on this Ast as an Optional Ast, returns an empty Option if the attribute is empty. */ - def getAttributeAsOptional[A](attr: String)(implicit toA: CheckedAtoB[GenericAstNode, A]): Checked[Option[A]] = { + def getAttributeAsOptional[A](attr: String)(implicit toA: CheckedAtoB[GenericAstNode, A]): Checked[Option[A]] = Option(getAttribute(attr)) match { case None => None.validNelCheck case Some(attribute) => toA.run(attribute).map(Option.apply) } - } } trait GenericAstList extends GenericAstNode { diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/ast2wdlom.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/ast2wdlom.scala index 7c7615332be..f9a7085a6fe 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/ast2wdlom.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/ast2wdlom.scala @@ -20,10 +20,14 @@ package object ast2wdlom { case other => s"Cannot convert from AstNode type '${other.getClass.getSimpleName}' into Terminal".invalidNelCheck } - implicit val astNodeToString: CheckedAtoB[GenericAstNode, String] = CheckedAtoB.fromCheck { a: GenericAstNode => a match { - case t: GenericTerminal => t.getSourceString.validNelCheck - case a: GenericAst => s"Cannot convert Ast of type ${a.getName} into String. Did you want one of its attributes (${a.getAttributes.keys.mkString(", ")})?".invalidNelCheck - case other: GenericAstNode => s"Cannot convert ${other.getClass.getSimpleName} into String".invalidNelCheck - }} + implicit val astNodeToString: CheckedAtoB[GenericAstNode, String] = CheckedAtoB.fromCheck { a: GenericAstNode => + a match { + case t: GenericTerminal => t.getSourceString.validNelCheck + case a: GenericAst => + s"Cannot convert Ast of type ${a.getName} into String. Did you want one of its attributes (${a.getAttributes.keys + .mkString(", ")})?".invalidNelCheck + case other: GenericAstNode => s"Cannot convert ${other.getClass.getSimpleName} into String".invalidNelCheck + } + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/BinaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/BinaryOperatorEvaluators.scala index 46ef93f9374..780acd737d4 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/BinaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/BinaryOperatorEvaluators.scala @@ -22,9 +22,9 @@ object BinaryOperatorEvaluators { implicit val remainderEvaluator: ExpressionValueConsumer[Remainder] = forOperation private def forOperation[A <: BinaryOperation] = new ExpressionValueConsumer[A] { - override def expressionConsumedValueHooks(a: A) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: A)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a.left.expressionConsumedValueHooks ++ a.right.expressionConsumedValueHooks - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/EngineFunctionEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/EngineFunctionEvaluators.scala index 52c0024f0d4..772e158bc0b 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/EngineFunctionEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/EngineFunctionEvaluators.scala @@ -7,15 +7,19 @@ import wdl.model.draft3.graph.ExpressionValueConsumer.ops._ object EngineFunctionEvaluators { - implicit val stdoutElementValueConsumer: ExpressionValueConsumer[StdoutElement.type] = new ExpressionValueConsumer[ExpressionElement.StdoutElement.type] { - override def expressionConsumedValueHooks(a: ExpressionElement.StdoutElement.type) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = Set.empty - } + implicit val stdoutElementValueConsumer: ExpressionValueConsumer[StdoutElement.type] = + new ExpressionValueConsumer[ExpressionElement.StdoutElement.type] { + override def expressionConsumedValueHooks(a: ExpressionElement.StdoutElement.type)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = Set.empty + } - implicit val stderrElementValueConsumer: ExpressionValueConsumer[StderrElement.type] = new ExpressionValueConsumer[ExpressionElement.StderrElement.type] { - override def expressionConsumedValueHooks(a: ExpressionElement.StderrElement.type) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = Set.empty - } + implicit val stderrElementValueConsumer: ExpressionValueConsumer[StderrElement.type] = + new ExpressionValueConsumer[ExpressionElement.StderrElement.type] { + override def expressionConsumedValueHooks(a: ExpressionElement.StderrElement.type)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = Set.empty + } implicit val readLinesValueConsumer: ExpressionValueConsumer[ReadLines] = forOneParamFunction implicit val readTsvValueConsumer: ExpressionValueConsumer[ReadTsv] = forOneParamFunction @@ -54,29 +58,33 @@ object EngineFunctionEvaluators { implicit val joinValueConsumer: ExpressionValueConsumer[Sep] = forTwoParamFunction implicit val subFunctionValueConsumer: ExpressionValueConsumer[Sub] = new ExpressionValueConsumer[Sub] { - override def expressionConsumedValueHooks(a: Sub) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks( + a: Sub + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = a.input.expressionConsumedValueHooks ++ a.pattern.expressionConsumedValueHooks ++ a.replace.expressionConsumedValueHooks - } } - private def forOneParamFunction[A <: OneParamFunctionCallElement]: ExpressionValueConsumer[A] = new ExpressionValueConsumer[A] { - override def expressionConsumedValueHooks(a: A) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = a.param.expressionConsumedValueHooks - } + private def forOneParamFunction[A <: OneParamFunctionCallElement]: ExpressionValueConsumer[A] = + new ExpressionValueConsumer[A] { + override def expressionConsumedValueHooks(a: A)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a.param.expressionConsumedValueHooks + } - private def forOneOrTwoParamFunction[A <: OneOrTwoParamFunctionCallElement]: ExpressionValueConsumer[A] = new ExpressionValueConsumer[A] { - override def expressionConsumedValueHooks(a: A) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - a.firstParam.expressionConsumedValueHooks ++ - a.secondParam.toSet.flatMap { secondParam: ExpressionElement => secondParam.expressionConsumedValueHooks } + private def forOneOrTwoParamFunction[A <: OneOrTwoParamFunctionCallElement]: ExpressionValueConsumer[A] = + new ExpressionValueConsumer[A] { + override def expressionConsumedValueHooks(a: A)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + a.firstParam.expressionConsumedValueHooks ++ + a.secondParam.toSet.flatMap { secondParam: ExpressionElement => secondParam.expressionConsumedValueHooks } } - } - private def forTwoParamFunction[A <: TwoParamFunctionCallElement]: ExpressionValueConsumer[A] = new ExpressionValueConsumer[A] { - override def expressionConsumedValueHooks(a: A) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { - a.arg1.expressionConsumedValueHooks ++ a.arg2.expressionConsumedValueHooks + private def forTwoParamFunction[A <: TwoParamFunctionCallElement]: ExpressionValueConsumer[A] = + new ExpressionValueConsumer[A] { + override def expressionConsumedValueHooks(a: A)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + a.arg1.expressionConsumedValueHooks ++ a.arg2.expressionConsumedValueHooks } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LiteralEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LiteralEvaluators.scala index c85a33854d3..e58096c59a0 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LiteralEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LiteralEvaluators.scala @@ -7,47 +7,64 @@ import wdl.model.draft3.graph.{ExpressionValueConsumer, UnlinkedConsumedValueHoo object LiteralEvaluators { - implicit val expressionElementSetUnlinkedValueConsumer: ExpressionValueConsumer[Set[ExpressionElement]] = new ExpressionValueConsumer[Set[ExpressionElement]] { - override def expressionConsumedValueHooks(elements: Set[ExpressionElement])(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - elements.flatMap { e: ExpressionElement => expressionValueConsumer.expressionConsumedValueHooks(e)(expressionValueConsumer) } - } + implicit val expressionElementSetUnlinkedValueConsumer: ExpressionValueConsumer[Set[ExpressionElement]] = + new ExpressionValueConsumer[Set[ExpressionElement]] { + override def expressionConsumedValueHooks( + elements: Set[ExpressionElement] + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + elements.flatMap { e: ExpressionElement => + expressionValueConsumer.expressionConsumedValueHooks(e)(expressionValueConsumer) + } + } implicit val kvPairUnlinkedValueConsumer: ExpressionValueConsumer[KvPair] = new ExpressionValueConsumer[KvPair] { - override def expressionConsumedValueHooks(a: ExpressionElement.KvPair)(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + override def expressionConsumedValueHooks(a: ExpressionElement.KvPair)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = expressionValueConsumer.expressionConsumedValueHooks(a.value)(expressionValueConsumer) } - implicit val objectLiteralUnlinkedValueConsumer: ExpressionValueConsumer[ObjectLiteral] = new ExpressionValueConsumer[ObjectLiteral] { - override def expressionConsumedValueHooks(o: ObjectLiteral) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - o.elements.values.toSet[ExpressionElement].expressionConsumedValueHooks - } + implicit val objectLiteralUnlinkedValueConsumer: ExpressionValueConsumer[ObjectLiteral] = + new ExpressionValueConsumer[ObjectLiteral] { + override def expressionConsumedValueHooks(o: ObjectLiteral)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + o.elements.values.toSet[ExpressionElement].expressionConsumedValueHooks + } - implicit val mapLiteralUnlinkedValueConsumer: ExpressionValueConsumer[MapLiteral] = new ExpressionValueConsumer[MapLiteral] { - override def expressionConsumedValueHooks(m: MapLiteral) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - m.elements.keys.toSet[ExpressionElement].expressionConsumedValueHooks ++ - m.elements.values.toSet[ExpressionElement].expressionConsumedValueHooks - } + implicit val mapLiteralUnlinkedValueConsumer: ExpressionValueConsumer[MapLiteral] = + new ExpressionValueConsumer[MapLiteral] { + override def expressionConsumedValueHooks(m: MapLiteral)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + m.elements.keys.toSet[ExpressionElement].expressionConsumedValueHooks ++ + m.elements.values.toSet[ExpressionElement].expressionConsumedValueHooks + } - implicit val pairLiteralUnlinkedValueConsumer: ExpressionValueConsumer[PairLiteral] = new ExpressionValueConsumer[PairLiteral] { - override def expressionConsumedValueHooks(p: PairLiteral) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - p.left.expressionConsumedValueHooks ++ p.right.expressionConsumedValueHooks - } + implicit val pairLiteralUnlinkedValueConsumer: ExpressionValueConsumer[PairLiteral] = + new ExpressionValueConsumer[PairLiteral] { + override def expressionConsumedValueHooks(p: PairLiteral)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + p.left.expressionConsumedValueHooks ++ p.right.expressionConsumedValueHooks + } - implicit val arrayLiteralUnlinkedValueConsumer: ExpressionValueConsumer[ArrayLiteral] = new ExpressionValueConsumer[ArrayLiteral] { - override def expressionConsumedValueHooks(a: ArrayLiteral) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - a.elements.toSet[ExpressionElement].expressionConsumedValueHooks - } + implicit val arrayLiteralUnlinkedValueConsumer: ExpressionValueConsumer[ArrayLiteral] = + new ExpressionValueConsumer[ArrayLiteral] { + override def expressionConsumedValueHooks(a: ArrayLiteral)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + a.elements.toSet[ExpressionElement].expressionConsumedValueHooks + } - implicit val stringExpressionUnlinkedValueConsumer: ExpressionValueConsumer[StringExpression] = new ExpressionValueConsumer[StringExpression] { - override def expressionConsumedValueHooks(a: StringExpression) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - a.pieces.flatMap { - case StringPlaceholder(expr) => expr.expressionConsumedValueHooks.toList - case _ => List.empty - }.toSet - } + implicit val stringExpressionUnlinkedValueConsumer: ExpressionValueConsumer[StringExpression] = + new ExpressionValueConsumer[StringExpression] { + override def expressionConsumedValueHooks( + a: StringExpression + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + a.pieces.flatMap { + case StringPlaceholder(expr) => expr.expressionConsumedValueHooks.toList + case _ => List.empty + }.toSet + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LookupEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LookupEvaluators.scala index 41b9536f694..24024751adc 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LookupEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/LookupEvaluators.scala @@ -3,34 +3,41 @@ package wdl.transforms.base.linking.expression.consumed import wdl.model.draft3.elements.ExpressionElement import wdl.model.draft3.elements.ExpressionElement._ import wdl.model.draft3.graph.ExpressionValueConsumer.ops._ -import wdl.model.draft3.graph.{ExpressionValueConsumer, UnlinkedCallOutputOrIdentifierAndMemberAccessHook, UnlinkedConsumedValueHook, UnlinkedIdentifierHook} +import wdl.model.draft3.graph.{ + ExpressionValueConsumer, + UnlinkedCallOutputOrIdentifierAndMemberAccessHook, + UnlinkedConsumedValueHook, + UnlinkedIdentifierHook +} object LookupEvaluators { - implicit val identifierLookupUnlinkedValueConsumer: ExpressionValueConsumer[IdentifierLookup] = new ExpressionValueConsumer[IdentifierLookup] { - override def expressionConsumedValueHooks(a: IdentifierLookup) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - Set[UnlinkedConsumedValueHook](UnlinkedIdentifierHook(a.identifier)) - } + implicit val identifierLookupUnlinkedValueConsumer: ExpressionValueConsumer[IdentifierLookup] = + new ExpressionValueConsumer[IdentifierLookup] { + override def expressionConsumedValueHooks(a: IdentifierLookup)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = + Set[UnlinkedConsumedValueHook](UnlinkedIdentifierHook(a.identifier)) + } implicit val identifierMemberAccessUnlinkedValueConsumer = new ExpressionValueConsumer[IdentifierMemberAccess] { - override def expressionConsumedValueHooks(a: IdentifierMemberAccess) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: IdentifierMemberAccess)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = Set[UnlinkedConsumedValueHook](UnlinkedCallOutputOrIdentifierAndMemberAccessHook(a.first, a.second)) - } } implicit val expressionMemberAccessUnlinkedValueConsumer = new ExpressionValueConsumer[ExpressionMemberAccess] { - override def expressionConsumedValueHooks(a: ExpressionMemberAccess) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: ExpressionMemberAccess)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a.expression.expressionConsumedValueHooks - } } implicit val indexAccessUnlinkedValueConsumer = new ExpressionValueConsumer[IndexAccess] { - override def expressionConsumedValueHooks(a: IndexAccess) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = { + override def expressionConsumedValueHooks(a: IndexAccess)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a.expressionElement.expressionConsumedValueHooks ++ a.index.expressionConsumedValueHooks - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/TernaryIfEvaluator.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/TernaryIfEvaluator.scala index b55d56c76ba..22d7df1a67a 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/TernaryIfEvaluator.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/TernaryIfEvaluator.scala @@ -7,9 +7,11 @@ import wdl.model.draft3.graph.{ExpressionValueConsumer, UnlinkedConsumedValueHoo object TernaryIfEvaluator { - implicit val ternaryIfUnlinkedValueConsumer: ExpressionValueConsumer[TernaryIf] = new ExpressionValueConsumer[TernaryIf] { - override def expressionConsumedValueHooks(a: TernaryIf) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = - a.condition.expressionConsumedValueHooks ++ a.ifTrue.expressionConsumedValueHooks ++ a.ifFalse.expressionConsumedValueHooks - } + implicit val ternaryIfUnlinkedValueConsumer: ExpressionValueConsumer[TernaryIf] = + new ExpressionValueConsumer[TernaryIf] { + override def expressionConsumedValueHooks( + a: TernaryIf + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = + a.condition.expressionConsumedValueHooks ++ a.ifTrue.expressionConsumedValueHooks ++ a.ifFalse.expressionConsumedValueHooks + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/UnaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/UnaryOperatorEvaluators.scala index d9ddcbb8495..1f642e43942 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/UnaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/consumed/UnaryOperatorEvaluators.scala @@ -11,7 +11,8 @@ object UnaryOperatorEvaluators { implicit val unaryPlusEvaluator: ExpressionValueConsumer[UnaryPlus] = forOperation private def forOperation[A <: UnaryOperation] = new ExpressionValueConsumer[A] { - override def expressionConsumedValueHooks(a: A) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): Set[UnlinkedConsumedValueHook] = a.argument.expressionConsumedValueHooks + override def expressionConsumedValueHooks(a: A)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): Set[UnlinkedConsumedValueHook] = a.argument.expressionConsumedValueHooks } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/BinaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/BinaryOperatorEvaluators.scala index b138ed3b0d0..e6ab692f04b 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/BinaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/BinaryOperatorEvaluators.scala @@ -28,11 +28,16 @@ object BinaryOperatorEvaluators { implicit val remainderEvaluator: FileEvaluator[Remainder] = forOperation(_.mod(_)) private def forOperation[A <: BinaryOperation](op: (WomValue, WomValue) => Try[WomValue]) = new FileEvaluator[A] { - override def predictFilesNeededToEvaluate(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + override def predictFilesNeededToEvaluate(a: A, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (a.left.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.right.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) mapN { _ ++ _ } - } + a.right.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) mapN { _ ++ _ } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/EngineFunctionEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/EngineFunctionEvaluators.scala index 498b04dbe87..11c687588a7 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/EngineFunctionEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/EngineFunctionEvaluators.scala @@ -17,13 +17,18 @@ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl.expressionElementWriter object EngineFunctionEvaluators { - private def evaluateToFile(forFunction: String, outermostElement: ExpressionElement, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, nestedElement: Option[ExpressionElement] = None) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + private def evaluateToFile(forFunction: String, + outermostElement: ExpressionElement, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + nestedElement: Option[ExpressionElement] = None + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { // `IndexAccess` `ExpressionElement`s require recursion to look for contained `IdentifierLookup` or `IdentifierMemberAccess`. // After all `IndexAccess` elements have been traversed the original `outermostElement` should be evaluated for files. val elementToExamine = nestedElement getOrElse outermostElement (elementToExamine match { - case IndexAccess(expressionElement, _) => evaluateToFile(forFunction, outermostElement, inputs, ioFunctionSet, Option(expressionElement)) + case IndexAccess(expressionElement, _) => + evaluateToFile(forFunction, outermostElement, inputs, ioFunctionSet, Option(expressionElement)) // If the specified identifier is not among the inputs there are no files to delocalize from this expression. case IdentifierLookup(identifier) if !inputs.contains(identifier) => Set.empty[WomFile].validNel case IdentifierMemberAccess(first, _, _) if !inputs.contains(first) => Set.empty[WomFile].validNel @@ -35,59 +40,90 @@ object EngineFunctionEvaluators { }).contextualizeErrors(s"predict files needed to de-localize from '${outermostElement.toWdlV1}' for $forFunction") } - def singleParameterPassthroughFileEvaluator[A <: OneParamFunctionCallElement]: FileEvaluator[A] = new FileEvaluator[A] { - override def predictFilesNeededToEvaluate(a: A, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { - a.param.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + def singleParameterPassthroughFileEvaluator[A <: OneParamFunctionCallElement]: FileEvaluator[A] = + new FileEvaluator[A] { + override def predictFilesNeededToEvaluate(a: A, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = + a.param.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) } - } - def singleParameterEvaluateToFileFileEvaluator[A <: OneParamFunctionCallElement](functionName: String): FileEvaluator[A] = new FileEvaluator[A] { + def singleParameterEvaluateToFileFileEvaluator[A <: OneParamFunctionCallElement]( + functionName: String + ): FileEvaluator[A] = new FileEvaluator[A] { override def predictFilesNeededToEvaluate(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = evaluateToFile(functionName, a.param, inputs, ioFunctionSet) - } } implicit val stdoutFunctionEvaluator: FileEvaluator[StdoutElement.type] = new FileEvaluator[StdoutElement.type] { - override def predictFilesNeededToEvaluate(a: ExpressionElement.StdoutElement.type, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + override def predictFilesNeededToEvaluate(a: ExpressionElement.StdoutElement.type, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = Set.empty[WomFile].validNel } implicit val stderrFunctionEvaluator: FileEvaluator[StderrElement.type] = new FileEvaluator[StderrElement.type] { - override def predictFilesNeededToEvaluate(a: ExpressionElement.StderrElement.type, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + override def predictFilesNeededToEvaluate(a: ExpressionElement.StderrElement.type, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = Set.empty[WomFile].validNel } - implicit val readLinesFunctionEvaluator: FileEvaluator[ReadLines] = singleParameterEvaluateToFileFileEvaluator("read_lines") + implicit val readLinesFunctionEvaluator: FileEvaluator[ReadLines] = singleParameterEvaluateToFileFileEvaluator( + "read_lines" + ) implicit val readTsvFunctionEvaluator: FileEvaluator[ReadTsv] = singleParameterEvaluateToFileFileEvaluator("read_tsv") implicit val readMapFunctionEvaluator: FileEvaluator[ReadMap] = singleParameterEvaluateToFileFileEvaluator("read_map") - implicit val readObjectFunctionEvaluator: FileEvaluator[ReadObject] = singleParameterEvaluateToFileFileEvaluator("read_object") + implicit val readObjectFunctionEvaluator: FileEvaluator[ReadObject] = singleParameterEvaluateToFileFileEvaluator( + "read_object" + ) - implicit val readObjectsFunctionEvaluator: FileEvaluator[ReadObjects] = singleParameterEvaluateToFileFileEvaluator("read_objects") + implicit val readObjectsFunctionEvaluator: FileEvaluator[ReadObjects] = singleParameterEvaluateToFileFileEvaluator( + "read_objects" + ) - implicit val readJsonFunctionEvaluator: FileEvaluator[ReadJson] = singleParameterEvaluateToFileFileEvaluator("read_json") + implicit val readJsonFunctionEvaluator: FileEvaluator[ReadJson] = singleParameterEvaluateToFileFileEvaluator( + "read_json" + ) implicit val readIntFunctionEvaluator: FileEvaluator[ReadInt] = singleParameterEvaluateToFileFileEvaluator("read_int") - implicit val readStringFunctionEvaluator: FileEvaluator[ReadString] = singleParameterEvaluateToFileFileEvaluator("read_string") + implicit val readStringFunctionEvaluator: FileEvaluator[ReadString] = singleParameterEvaluateToFileFileEvaluator( + "read_string" + ) - implicit val readFloatFunctionEvaluator: FileEvaluator[ReadFloat] = singleParameterEvaluateToFileFileEvaluator("read_float") + implicit val readFloatFunctionEvaluator: FileEvaluator[ReadFloat] = singleParameterEvaluateToFileFileEvaluator( + "read_float" + ) - implicit val readBooleanFunctionEvaluator: FileEvaluator[ReadBoolean] = singleParameterEvaluateToFileFileEvaluator("read_boolean") + implicit val readBooleanFunctionEvaluator: FileEvaluator[ReadBoolean] = singleParameterEvaluateToFileFileEvaluator( + "read_boolean" + ) implicit val writeLinesFunctionEvaluator: FileEvaluator[WriteLines] = singleParameterPassthroughFileEvaluator @@ -122,28 +158,46 @@ object EngineFunctionEvaluators { implicit val roundFunctionEvaluator: FileEvaluator[Round] = singleParameterPassthroughFileEvaluator implicit val globFunctionEvaluator: FileEvaluator[Glob] = new FileEvaluator[Glob] { - override def predictFilesNeededToEvaluate(a: Glob, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + override def predictFilesNeededToEvaluate(a: Glob, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = a.param.evaluateValue(inputs, ioFunctionSet, None) flatMap { case EvaluatedValue(p: WomPrimitive, _) => Set[WomFile](WomGlobFile(p.valueString)).validNel - case other => s"Could not predict files to delocalize from '$a' for 'glob'. Expected a primitive but got ${other.getClass.getSimpleName}".invalidNel + case other => + s"Could not predict files to delocalize from '$a' for 'glob'. Expected a primitive but got ${other.getClass.getSimpleName}".invalidNel } - } } implicit val sizeFunctionEvaluator: FileEvaluator[Size] = new FileEvaluator[Size] { - override def predictFilesNeededToEvaluate(a: Size, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + override def predictFilesNeededToEvaluate(a: Size, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (evaluateToFile("size", a.file, inputs, ioFunctionSet): ErrorOr[Set[WomFile]], - a.unit.fold(Set.empty[WomFile].validNel: ErrorOr[Set[WomFile]])(_.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo))) mapN { _ ++ _ } + a.unit.fold(Set.empty[WomFile].validNel: ErrorOr[Set[WomFile]])( + _.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) + ) mapN { _ ++ _ } } implicit val basenameFunctionEvaluator: FileEvaluator[Basename] = new FileEvaluator[Basename] { - override def predictFilesNeededToEvaluate(a: Basename, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + override def predictFilesNeededToEvaluate(a: Basename, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = Set.empty[WomFile].validNel } @@ -151,11 +205,14 @@ object EngineFunctionEvaluators { override def predictFilesNeededToEvaluate(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (a.arg1.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.arg2.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) mapN { _ ++ _ } + a.arg2.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) mapN { _ ++ _ } } implicit val zipFunctionEvaluator: FileEvaluator[Zip] = twoParameterFunctionPassthroughFileEvaluator[Zip] @@ -163,11 +220,17 @@ object EngineFunctionEvaluators { implicit val prefixFunctionEvaluator: FileEvaluator[Prefix] = twoParameterFunctionPassthroughFileEvaluator[Prefix] implicit val subFunctionEvaluator: FileEvaluator[Sub] = new FileEvaluator[Sub] { - override def predictFilesNeededToEvaluate(a: Sub, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + override def predictFilesNeededToEvaluate(a: Sub, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (a.pattern.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.input.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.replace.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) mapN { _ ++ _ ++ _ } + a.input.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), + a.replace.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) mapN { _ ++ _ ++ _ } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LiteralEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LiteralEvaluators.scala index 309ff83b618..db494c27bfa 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LiteralEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LiteralEvaluators.scala @@ -14,22 +14,27 @@ import wom.types.{WomCompositeType, WomSingleFileType, WomType} import wom.values.{WomFile, WomSingleFile, WomValue} object LiteralEvaluators { - implicit val primitiveValueEvaluator: FileEvaluator[PrimitiveLiteralExpressionElement] = new FileEvaluator[PrimitiveLiteralExpressionElement] { - override def predictFilesNeededToEvaluate(a: PrimitiveLiteralExpressionElement, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = Set.empty[WomFile].validNel - } + implicit val primitiveValueEvaluator: FileEvaluator[PrimitiveLiteralExpressionElement] = + new FileEvaluator[PrimitiveLiteralExpressionElement] { + override def predictFilesNeededToEvaluate(a: PrimitiveLiteralExpressionElement, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = Set.empty[WomFile].validNel + } implicit val stringLiteralEvaluator: FileEvaluator[StringLiteral] = new FileEvaluator[StringLiteral] { override def predictFilesNeededToEvaluate(a: StringLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = coerceTo match { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = coerceTo match { case WomSingleFileType => Set[WomFile](WomSingleFile(a.value)).validNel case _ => Set.empty[WomFile].validNel } @@ -39,9 +44,11 @@ object LiteralEvaluators { override def predictFilesNeededToEvaluate(a: ObjectLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = { def filesInObjectField(fieldAndWomTypeTuple: (String, WomType)): ErrorOr[Set[WomFile]] = { val (field, womType) = fieldAndWomTypeTuple a.elements.get(field) match { @@ -52,7 +59,10 @@ object LiteralEvaluators { coerceTo match { case WomCompositeType(mapping, _) => mapping.toList.traverse(filesInObjectField).map(_.flatten.toSet) - case _ => a.elements.values.toList.traverse(_.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)).map(_.toSet.flatten) + case _ => + a.elements.values.toList + .traverse(_.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) + .map(_.toSet.flatten) } } } @@ -61,34 +71,40 @@ object LiteralEvaluators { override def predictFilesNeededToEvaluate(a: MapLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { - a.elements.toList.flatMap { case (x,y) => List(x, y) }.traverse(_.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)).map(_.toSet.flatten) - } + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = + a.elements.toList + .flatMap { case (x, y) => List(x, y) } + .traverse(_.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) + .map(_.toSet.flatten) } implicit val arrayLiteralEvaluator: FileEvaluator[ArrayLiteral] = new FileEvaluator[ArrayLiteral] { override def predictFilesNeededToEvaluate(a: ArrayLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = a.elements.toList.traverse(_.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)).map(_.toSet.flatten) - } } implicit val pairLiteralEvaluator: FileEvaluator[PairLiteral] = new FileEvaluator[PairLiteral] { override def predictFilesNeededToEvaluate(a: PairLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (a.left.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.right.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) mapN { _ ++ _ } - } + a.right.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) mapN { _ ++ _ } } } - diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LookupEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LookupEvaluators.scala index 586fe7a0cf1..390edda3c9e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LookupEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/LookupEvaluators.scala @@ -17,39 +17,51 @@ object LookupEvaluators { override def predictFilesNeededToEvaluate(a: IdentifierLookup, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = Set.empty[WomFile].validNel } - implicit val expressionMemberAccessEvaluator: FileEvaluator[ExpressionMemberAccess] = new FileEvaluator[ExpressionMemberAccess] { - override def predictFilesNeededToEvaluate(a: ExpressionMemberAccess, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { - a.expression.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + implicit val expressionMemberAccessEvaluator: FileEvaluator[ExpressionMemberAccess] = + new FileEvaluator[ExpressionMemberAccess] { + override def predictFilesNeededToEvaluate(a: ExpressionMemberAccess, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = + a.expression.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) } - } - implicit val identifierMemberAccessEvaluator: FileEvaluator[IdentifierMemberAccess] = new FileEvaluator[IdentifierMemberAccess] { - override def predictFilesNeededToEvaluate(a: IdentifierMemberAccess, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = - Set.empty[WomFile].validNel - } + implicit val identifierMemberAccessEvaluator: FileEvaluator[IdentifierMemberAccess] = + new FileEvaluator[IdentifierMemberAccess] { + override def predictFilesNeededToEvaluate(a: IdentifierMemberAccess, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = + Set.empty[WomFile].validNel + } implicit val indexAccessFileEvaluator: FileEvaluator[IndexAccess] = new FileEvaluator[IndexAccess] { - override def predictFilesNeededToEvaluate(a: IndexAccess, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + override def predictFilesNeededToEvaluate(a: IndexAccess, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (a.expressionElement.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.index.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) mapN { _ ++ _ } - } + a.index.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) mapN { _ ++ _ } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/TernaryIfEvaluator.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/TernaryIfEvaluator.scala index 6b394abe810..ff42a04522c 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/TernaryIfEvaluator.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/TernaryIfEvaluator.scala @@ -15,12 +15,14 @@ object TernaryIfEvaluator { override def predictFilesNeededToEvaluate(a: TernaryIf, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = { + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = (a.condition.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.ifTrue.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), - a.ifFalse.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo)) mapN { _ ++ _ ++ _ } - } + a.ifTrue.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo), + a.ifFalse.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) + ) mapN { _ ++ _ ++ _ } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/UnaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/UnaryOperatorEvaluators.scala index 3a05cb4f1b2..00fde3a1c54 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/UnaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/files/UnaryOperatorEvaluators.scala @@ -21,9 +21,11 @@ object UnaryOperatorEvaluators { override def predictFilesNeededToEvaluate(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - coerceTo: WomType) - (implicit fileEvaluator: FileEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[WomFile]] = + coerceTo: WomType + )(implicit + fileEvaluator: FileEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[WomFile]] = a.argument.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/package.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/package.scala index ebc16cec2af..59f98ee63b5 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/package.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/package.scala @@ -14,18 +14,22 @@ import wdl.model.draft3.graph.ExpressionValueConsumer.ops._ package object expression { - implicit def expressionElementToWomExpression(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): WomExpressionMaker[ExpressionElement] = new WomExpressionMaker[ExpressionElement] { + implicit def expressionElementToWomExpression(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): WomExpressionMaker[ExpressionElement] = new WomExpressionMaker[ExpressionElement] { override def makeWomExpression(a: ExpressionElement, typeAliases: Map[String, WomType], - consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]): ErrorOr[WomExpression] = { + consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + ): ErrorOr[WomExpression] = { val consumedValueHooks = a.expressionConsumedValueHooks val neededLinkedValues = consumedValueHooks.toList.traverse { case c if consumedValueLookup.contains(c) => (c -> consumedValueLookup(c)).validNel - case missing => s"Could not create WOM expression for '$a': Found no generated value for consumed value $missing".invalidNel + case missing => + s"Could not create WOM expression for '$a': Found no generated value for consumed value $missing".invalidNel } neededLinkedValues flatMap { lookup => WdlomWomExpression.make(a, lookup.toMap) } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/BinaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/BinaryOperatorEvaluators.scala index 03f5d6d77ba..c18a0aadc84 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/BinaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/BinaryOperatorEvaluators.scala @@ -28,10 +28,11 @@ object BinaryOperatorEvaluators { implicit val remainderEvaluator: TypeEvaluator[Remainder] = forOperation(_.mod(_)) private def forOperation[A <: BinaryOperation](op: (WomType, WomType) => Try[WomType]) = new TypeEvaluator[A] { - override def evaluateType(a: A, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - (a.left.evaluateType(linkedValues), - a.right.evaluateType(linkedValues)) flatMapN { (left, right) => op(left, right).toErrorOr } - } + override def evaluateType(a: A, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = + (a.left.evaluateType(linkedValues), a.right.evaluateType(linkedValues)) flatMapN { (left, right) => + op(left, right).toErrorOr + } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/EngineFunctionEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/EngineFunctionEvaluators.scala index 6911708a322..8268cdaac72 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/EngineFunctionEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/EngineFunctionEvaluators.scala @@ -17,232 +17,241 @@ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl.expressionElementWriter object EngineFunctionEvaluators { - implicit val stdoutFunctionEvaluator: TypeEvaluator[StdoutElement.type] = new TypeEvaluator[ExpressionElement.StdoutElement.type] { - override def evaluateType(a: ExpressionElement.StdoutElement.type, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - WomSingleFileType.validNel + implicit val stdoutFunctionEvaluator: TypeEvaluator[StdoutElement.type] = + new TypeEvaluator[ExpressionElement.StdoutElement.type] { + override def evaluateType(a: ExpressionElement.StdoutElement.type, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = + WomSingleFileType.validNel } - } - implicit val stderrFunctionEvaluator: TypeEvaluator[StderrElement.type] = new TypeEvaluator[ExpressionElement.StderrElement.type] { - override def evaluateType(a: ExpressionElement.StderrElement.type, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - WomSingleFileType.validNel + implicit val stderrFunctionEvaluator: TypeEvaluator[StderrElement.type] = + new TypeEvaluator[ExpressionElement.StderrElement.type] { + override def evaluateType(a: ExpressionElement.StderrElement.type, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = + WomSingleFileType.validNel } - } implicit val readLinesFunctionEvaluator: TypeEvaluator[ReadLines] = new TypeEvaluator[ReadLines] { - override def evaluateType(a: ReadLines, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadLines, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomArrayType(WomStringType)) - } } implicit val readTsvFunctionEvaluator: TypeEvaluator[ReadTsv] = new TypeEvaluator[ReadTsv] { - override def evaluateType(a: ReadTsv, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadTsv, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomArrayType(WomArrayType(WomStringType))) - } } implicit val readMapFunctionEvaluator: TypeEvaluator[ReadMap] = new TypeEvaluator[ReadMap] { - override def evaluateType(a: ReadMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomMapType(WomStringType, WomStringType)) - } } implicit val readObjectFunctionEvaluator: TypeEvaluator[ReadObject] = new TypeEvaluator[ReadObject] { - override def evaluateType(a: ReadObject, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadObject, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomObjectType) - } } implicit val readObjectsFunctionEvaluator: TypeEvaluator[ReadObjects] = new TypeEvaluator[ReadObjects] { - override def evaluateType(a: ReadObjects, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadObjects, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomArrayType(WomObjectType)) - } } implicit val readJsonFunctionEvaluator: TypeEvaluator[ReadJson] = new TypeEvaluator[ReadJson] { - override def evaluateType(a: ReadJson, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadJson, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = // we can't figure out the WomType of data without reading the file hence evaluate it to `WomAnyType` validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomAnyType) - } } implicit val readIntFunctionEvaluator: TypeEvaluator[ReadInt] = new TypeEvaluator[ReadInt] { - override def evaluateType(a: ReadInt, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadInt, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomIntegerType) - } } implicit val readStringFunctionEvaluator: TypeEvaluator[ReadString] = new TypeEvaluator[ReadString] { - override def evaluateType(a: ReadString, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadString, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomStringType) - } } implicit val readFloatFunctionEvaluator: TypeEvaluator[ReadFloat] = new TypeEvaluator[ReadFloat] { - override def evaluateType(a: ReadFloat, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadFloat, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomFloatType) - } } implicit val readBooleanFunctionEvaluator: TypeEvaluator[ReadBoolean] = new TypeEvaluator[ReadBoolean] { - override def evaluateType(a: ReadBoolean, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ReadBoolean, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomSingleFileType).map(_ => WomBooleanType) - } } implicit val writeLinesFunctionEvaluator: TypeEvaluator[WriteLines] = new TypeEvaluator[WriteLines] { - override def evaluateType(a: WriteLines, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: WriteLines, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomStringType)).map(_ => WomSingleFileType) - } } implicit val writeTsvFunctionEvaluator: TypeEvaluator[WriteTsv] = new TypeEvaluator[WriteTsv] { - override def evaluateType(a: WriteTsv, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: WriteTsv, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomArrayType(WomStringType))).map(_ => WomSingleFileType) - } } implicit val writeMapFunctionEvaluator: TypeEvaluator[WriteMap] = new TypeEvaluator[WriteMap] { - override def evaluateType(a: WriteMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: WriteMap, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomMapType(WomAnyType, WomAnyType)).map(_ => WomSingleFileType) - } } implicit val writeObjectFunctionEvaluator: TypeEvaluator[WriteObject] = new TypeEvaluator[WriteObject] { - override def evaluateType(a: WriteObject, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: WriteObject, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomObjectType).map(_ => WomSingleFileType) - } } implicit val writeObjectsFunctionEvaluator: TypeEvaluator[WriteObjects] = new TypeEvaluator[WriteObjects] { - override def evaluateType(a: WriteObjects, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: WriteObjects, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomObjectType)).map(_ => WomSingleFileType) - } } implicit val writeJsonFunctionEvaluator: TypeEvaluator[WriteJson] = new TypeEvaluator[WriteJson] { - override def evaluateType(a: WriteJson, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: WriteJson, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.param.evaluateType(linkedValues).flatMap { - case v if WomBooleanType.isCoerceableFrom(v) || - WomIntegerType.isCoerceableFrom(v) || - WomFloatType.isCoerceableFrom(v) || - WomStringType.isCoerceableFrom(v) || - WomObjectType.isCoerceableFrom(v) || - WomPairType(WomAnyType, WomAnyType).isCoerceableFrom(v) || - WomArrayType(WomAnyType).isCoerceableFrom(v) => WomSingleFileType.validNel - case v => (s"Invalid parameter '${a.param}'. Valid input types are 'Boolean', 'String', 'Integer', 'Float', 'Object'," + - s" 'Pair[_, _]', 'Map[_, _] or 'Array[_]' but got '${v.friendlyName}'").invalidNel + case v + if WomBooleanType.isCoerceableFrom(v) || + WomIntegerType.isCoerceableFrom(v) || + WomFloatType.isCoerceableFrom(v) || + WomStringType.isCoerceableFrom(v) || + WomObjectType.isCoerceableFrom(v) || + WomPairType(WomAnyType, WomAnyType).isCoerceableFrom(v) || + WomArrayType(WomAnyType).isCoerceableFrom(v) => + WomSingleFileType.validNel + case v => + (s"Invalid parameter '${a.param}'. Valid input types are 'Boolean', 'String', 'Integer', 'Float', 'Object'," + + s" 'Pair[_, _]', 'Map[_, _] or 'Array[_]' but got '${v.friendlyName}'").invalidNel } - } } implicit val rangeFunctionEvaluator: TypeEvaluator[Range] = new TypeEvaluator[Range] { - override def evaluateType(a: Range, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Range, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomIntegerType).map(_ => WomArrayType(WomIntegerType)) - } } implicit val transposeFunctionEvaluator: TypeEvaluator[Transpose] = new TypeEvaluator[Transpose] { - override def evaluateType(a: Transpose, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Transpose, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.param.evaluateType(linkedValues).flatMap { case a @ WomArrayType(WomArrayType(_)) => a.validNel - case foundType => s"Invalid parameter '${a.param}'. Expected 'Array[Array[_]]' but got '${foundType.stableName}'".invalidNel + case foundType => + s"Invalid parameter '${a.param}'. Expected 'Array[Array[_]]' but got '${foundType.stableName}'".invalidNel } - } } implicit val lengthFunctionEvaluator: TypeEvaluator[Length] = new TypeEvaluator[Length] { - override def evaluateType(a: Length, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Length, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomArrayType(WomAnyType)).map(_ => WomIntegerType) - } } implicit val flattenFunctionEvaluator: TypeEvaluator[Flatten] = new TypeEvaluator[Flatten] { - override def evaluateType(a: Flatten, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Flatten, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.param.evaluateType(linkedValues).flatMap { case WomArrayType(inner @ WomArrayType(_)) => inner.validNel - case foundType => s"Invalid parameter '${a.param}'. Expected 'Array[Array[_]]' but got '${foundType.stableName}'".invalidNel + case foundType => + s"Invalid parameter '${a.param}'. Expected 'Array[Array[_]]' but got '${foundType.stableName}'".invalidNel } - } } implicit val selectFirstFunctionEvaluator: TypeEvaluator[SelectFirst] = new TypeEvaluator[SelectFirst] { - override def evaluateType(a: SelectFirst, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: SelectFirst, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.param.evaluateType(linkedValues).flatMap { case WomArrayType(WomOptionalType(inner)) => inner.validNel case WomArrayType(alreadyNonOptional) => alreadyNonOptional.validNel - case foundType => s"Invalid parameter '${a.param}'. Expected an array of optional values (eg 'Array[X?]') but got '${foundType.stableName}'".invalidNel + case foundType => + s"Invalid parameter '${a.param}'. Expected an array of optional values (eg 'Array[X?]') but got '${foundType.stableName}'".invalidNel } - } } implicit val selectAllFunctionEvaluator: TypeEvaluator[SelectAll] = new TypeEvaluator[SelectAll] { - override def evaluateType(a: SelectAll, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: SelectAll, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.param.evaluateType(linkedValues).flatMap { case WomArrayType(WomOptionalType(inner)) => WomArrayType(inner).validNel case alreadyNonOptional: WomArrayType => alreadyNonOptional.validNel - case foundType => s"Invalid parameter '${a.param}'. Expected an array of optional values (eg 'Array[X?]') but got '${foundType.stableName}'".invalidNel + case foundType => + s"Invalid parameter '${a.param}'. Expected an array of optional values (eg 'Array[X?]') but got '${foundType.stableName}'".invalidNel } - } } implicit val definedFunctionEvaluator: TypeEvaluator[Defined] = new TypeEvaluator[Defined] { - override def evaluateType(a: Defined, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Defined, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomOptionalType(WomAnyType)).map(_ => WomBooleanType) - } } implicit val floorFunctionEvaluator: TypeEvaluator[Floor] = new TypeEvaluator[Floor] { - override def evaluateType(a: Floor, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Floor, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomFloatType).map(_ => WomIntegerType) - } } implicit val ceilFunctionEvaluator: TypeEvaluator[Ceil] = new TypeEvaluator[Ceil] { - override def evaluateType(a: Ceil, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Ceil, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomFloatType).map(_ => WomIntegerType) - } } implicit val roundFunctionEvaluator: TypeEvaluator[Round] = new TypeEvaluator[Round] { - override def evaluateType(a: Round, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Round, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomFloatType).map(_ => WomIntegerType) - } } implicit val globFunctionTypeEvaluator: TypeEvaluator[Glob] = new TypeEvaluator[Glob] { - override def evaluateType(a: Glob, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Glob, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = validateParamType(a.param, linkedValues, WomStringType).map(_ => WomArrayType(WomSingleFileType)) - } } implicit val sizeFunctionEvaluator: TypeEvaluator[Size] = new TypeEvaluator[Size] { @@ -253,81 +262,94 @@ object EngineFunctionEvaluators { case _ => false } - override def evaluateType(a: Size, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Size, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val validatedSecondArg: ErrorOr[Unit] = a.secondParam match { case None => ().validNel case Some(arg) => validateParamType(arg, linkedValues, WomStringType).void } val validatedFirstArg: ErrorOr[Unit] = a.firstParam.evaluateType(linkedValues).flatMap { case t if suitableSizeType(t) => ().validNel - case other => s"Invalid first 'size' parameter. Expected File, File? Array[File] or Array[File?] but got ${other.stableName}".invalidNel + case other => + s"Invalid first 'size' parameter. Expected File, File? Array[File] or Array[File?] but got ${other.stableName}".invalidNel } (validatedFirstArg, validatedSecondArg) mapN { (_, _) => WomFloatType } } } implicit val basenameFunctionEvaluator: TypeEvaluator[Basename] = new TypeEvaluator[Basename] { - override def evaluateType(a: Basename, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Basename, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val validatedSecondArg: ErrorOr[Unit] = a.secondParam match { case None => ().validNel case Some(arg) => validateParamType(arg, linkedValues, WomStringType).void } - (validateParamType(a.firstParam, linkedValues, WomSingleFileType), - validatedSecondArg) mapN { (_, _) => WomStringType } + (validateParamType(a.firstParam, linkedValues, WomSingleFileType), validatedSecondArg) mapN { (_, _) => + WomStringType + } } } - private def crossOrZipType(arg1: ExpressionElement, arg2: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + private def crossOrZipType(arg1: ExpressionElement, + arg2: ExpressionElement, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = (arg1.evaluateType(linkedValues), arg2.evaluateType(linkedValues)) match { case (Valid(WomArrayType(left)), Valid(WomArrayType(right))) => WomArrayType(WomPairType(left, right)).validNel - case (Valid(otherLeft), Valid(WomArrayType(_))) => s"Invalid left parameter '${arg1.toWdlV1}'. Expected Array type but got '${otherLeft.stableName}'".invalidNel - case (Valid(WomArrayType(_)), Valid(otherRight)) => s"Invalid right parameter '${arg2.toWdlV1}'. Expected Array type but got '${otherRight.stableName}'".invalidNel - case (Valid(otherLeft), Valid(otherRight)) => s"Invalid left and right parameters '(${arg1.toWdlV1}, ${arg2.toWdlV1})'. Expected two Array types but got '(${otherLeft.stableName}, ${otherRight.stableName})'".invalidNel + case (Valid(otherLeft), Valid(WomArrayType(_))) => + s"Invalid left parameter '${arg1.toWdlV1}'. Expected Array type but got '${otherLeft.stableName}'".invalidNel + case (Valid(WomArrayType(_)), Valid(otherRight)) => + s"Invalid right parameter '${arg2.toWdlV1}'. Expected Array type but got '${otherRight.stableName}'".invalidNel + case (Valid(otherLeft), Valid(otherRight)) => + s"Invalid left and right parameters '(${arg1.toWdlV1}, ${arg2.toWdlV1})'. Expected two Array types but got '(${otherLeft.stableName}, ${otherRight.stableName})'".invalidNel // One or more are invalid, so mapN function won't actually ever run: case (otherLeft, otherRight) => (otherLeft, otherRight) mapN { (_, _) => WomNothingType } } - } implicit val zipFunctionEvaluator: TypeEvaluator[Zip] = new TypeEvaluator[Zip] { - override def evaluateType(a: Zip, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Zip, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = crossOrZipType(a.arg1, a.arg2, linkedValues) - } } implicit val crossFunctionEvaluator: TypeEvaluator[Cross] = new TypeEvaluator[Cross] { - override def evaluateType(a: Cross, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Cross, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = crossOrZipType(a.arg1, a.arg2, linkedValues) - } } implicit val prefixFunctionEvaluator: TypeEvaluator[Prefix] = new TypeEvaluator[Prefix] { - override def evaluateType(a: Prefix, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: Prefix, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = (validateParamType(a.prefix, linkedValues, WomStringType), - validateParamType(a.array, linkedValues, WomArrayType(WomStringType)) + validateParamType(a.array, linkedValues, WomArrayType(WomStringType)) ) mapN { (_, _) => WomArrayType(WomStringType) } - } } implicit val subFunctionEvaluator: TypeEvaluator[Sub] = new TypeEvaluator[Sub] { - override def evaluateType(a: Sub, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - + override def evaluateType(a: Sub, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = (validateParamType(a.input, linkedValues, WomSingleFileType), - validateParamType(a.pattern, linkedValues, WomSingleFileType), - validateParamType(a.replace, linkedValues, WomSingleFileType)) mapN { (_, _, _) => WomStringType } - } + validateParamType(a.pattern, linkedValues, WomSingleFileType), + validateParamType(a.replace, linkedValues, WomSingleFileType) + ) mapN { (_, _, _) => WomStringType } } - def validateParamType(param: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], expectedType: WomType) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + def validateParamType(param: ExpressionElement, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], + expectedType: WomType + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = param.evaluateType(linkedValues).flatMap { foundType => - if (expectedType.isCoerceableFrom(foundType)) { foundType.validNel } else { s"Invalid parameter '$param'. Expected '${expectedType.stableName}' but got '${foundType.stableName}'".invalidNel } + if (expectedType.isCoerceableFrom(foundType)) { foundType.validNel } + else { + s"Invalid parameter '$param'. Expected '${expectedType.stableName}' but got '${foundType.stableName}'".invalidNel + } } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LiteralEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LiteralEvaluators.scala index c51e106cc46..9e74672f5bc 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LiteralEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LiteralEvaluators.scala @@ -13,36 +13,44 @@ import wdl.model.draft3.graph.expression.TypeEvaluator.ops._ import wom.types._ object LiteralEvaluators { - implicit val primitiveTypeEvaluator: TypeEvaluator[PrimitiveLiteralExpressionElement] = new TypeEvaluator[PrimitiveLiteralExpressionElement] { - override def evaluateType(a: PrimitiveLiteralExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = - a.value.womType.validNel - } + implicit val primitiveTypeEvaluator: TypeEvaluator[PrimitiveLiteralExpressionElement] = + new TypeEvaluator[PrimitiveLiteralExpressionElement] { + override def evaluateType(a: PrimitiveLiteralExpressionElement, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = + a.value.womType.validNel + } - implicit val noneLiteralTypeEvaluator: TypeEvaluator[NoneLiteralElement.type] = new TypeEvaluator[NoneLiteralElement.type] { - override def evaluateType(a: NoneLiteralElement.type, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = - WomOptionalType(WomNothingType).validNel - } + implicit val noneLiteralTypeEvaluator: TypeEvaluator[NoneLiteralElement.type] = + new TypeEvaluator[NoneLiteralElement.type] { + override def evaluateType(a: NoneLiteralElement.type, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = + WomOptionalType(WomNothingType).validNel + } implicit val objectLiteralTypeEvaluator: TypeEvaluator[ObjectLiteral] = new TypeEvaluator[ObjectLiteral] { - override def evaluateType(a: ObjectLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = WomObjectType.validNel + override def evaluateType(a: ObjectLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = WomObjectType.validNel } implicit val stringLiteralTypeEvaluator: TypeEvaluator[StringLiteral] = new TypeEvaluator[StringLiteral] { - override def evaluateType(a: StringLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = WomStringType.validNel + override def evaluateType(a: StringLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = WomStringType.validNel } implicit val stringExpressionTypeEvaluator: TypeEvaluator[StringExpression] = new TypeEvaluator[StringExpression] { - override def evaluateType(a: StringExpression, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = WomStringType.validNel + override def evaluateType(a: StringExpression, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = WomStringType.validNel } implicit val mapLiteralTypeEvaluator: TypeEvaluator[MapLiteral] = new TypeEvaluator[MapLiteral] { - override def evaluateType(a: MapLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: MapLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val keyTypes = a.elements.keySet.toList.traverse { x: ExpressionElement => x.evaluateType(linkedValues) } val commonKeyType: ErrorOr[WomType] = keyTypes.map(WomType.homogeneousTypeFromTypes) @@ -55,8 +63,9 @@ object LiteralEvaluators { } implicit val arrayLiteralTypeEvaluator: TypeEvaluator[ArrayLiteral] = new TypeEvaluator[ArrayLiteral] { - override def evaluateType(a: ArrayLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: ArrayLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val types = a.elements.toList.traverse { x: ExpressionElement => x.evaluateType(linkedValues) } val commonType: ErrorOr[WomType] = types.map(WomType.homogeneousTypeFromTypes) @@ -66,8 +75,9 @@ object LiteralEvaluators { } implicit val pairLiteralTypeEvaluator: TypeEvaluator[PairLiteral] = new TypeEvaluator[PairLiteral] { - override def evaluateType(a: PairLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: PairLiteral, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = { val leftType = a.left.evaluateType(linkedValues) val rightType = a.right.evaluateType(linkedValues) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LookupEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LookupEvaluators.scala index 43a563dd3ee..0221390e8ca 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LookupEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/LookupEvaluators.scala @@ -15,56 +15,72 @@ import wom.types._ object LookupEvaluators { implicit val identifierLookupTypeEvaluator: TypeEvaluator[IdentifierLookup] = new TypeEvaluator[IdentifierLookup] { - override def evaluateType(a: IdentifierLookup, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: IdentifierLookup, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = linkedValues.collectFirst { case (UnlinkedIdentifierHook(id), gen) if a.identifier == id => gen.womType } match { case Some(womType) => womType.validNel - case None => s"Type evaluation failure. No suitable type found for identifier lookup '${a.identifier}' amongst {${linkedValues.map(_._2.linkableName).mkString(", ")}}".invalidNel + case None => + s"Type evaluation failure. No suitable type found for identifier lookup '${a.identifier}' amongst {${linkedValues + .map(_._2.linkableName) + .mkString(", ")}}".invalidNel } - } } - implicit val expressionMemberAccessEvaluator: TypeEvaluator[ExpressionMemberAccess] = new TypeEvaluator[ExpressionMemberAccess] { - override def evaluateType(a: ExpressionMemberAccess, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - val baseType = a.expression.evaluateType(linkedValues) - baseType flatMap { doLookup(_, a.memberAccessTail) } + implicit val expressionMemberAccessEvaluator: TypeEvaluator[ExpressionMemberAccess] = + new TypeEvaluator[ExpressionMemberAccess] { + override def evaluateType(a: ExpressionMemberAccess, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + val baseType = a.expression.evaluateType(linkedValues) + baseType flatMap { doLookup(_, a.memberAccessTail) } + } } - } - implicit val identifierMemberAccessEvaluator: TypeEvaluator[IdentifierMemberAccess] = new TypeEvaluator[IdentifierMemberAccess] { - override def evaluateType(a: IdentifierMemberAccess, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { - val generatedValueHandle = linkedValues.get(UnlinkedCallOutputOrIdentifierAndMemberAccessHook(a.first, a.second)) + implicit val identifierMemberAccessEvaluator: TypeEvaluator[IdentifierMemberAccess] = + new TypeEvaluator[IdentifierMemberAccess] { + override def evaluateType(a: IdentifierMemberAccess, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] + )(implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + val generatedValueHandle = + linkedValues.get(UnlinkedCallOutputOrIdentifierAndMemberAccessHook(a.first, a.second)) - generatedValueHandle match { - case Some(GeneratedIdentifierValueHandle(a.first, womType)) => doLookup(womType, NonEmptyList(a.second, a.memberAccessTail.toList)) - case Some(GeneratedCallOutputValueHandle(a.first, a.second, womType)) => NonEmptyList.fromList(a.memberAccessTail.toList) match { - case Some(tailList) => doLookup(womType, tailList) - case None => womType.validNel + generatedValueHandle match { + case Some(GeneratedIdentifierValueHandle(a.first, womType)) => + doLookup(womType, NonEmptyList(a.second, a.memberAccessTail.toList)) + case Some(GeneratedCallOutputValueHandle(a.first, a.second, womType)) => + NonEmptyList.fromList(a.memberAccessTail.toList) match { + case Some(tailList) => doLookup(womType, tailList) + case None => womType.validNel + } + case _ => + s"Type evaluation failure. No suitable type found for identifier lookup '${a.first}' or '${a.first}.${a.second}' amongst {${linkedValues + .map(_._2.linkableName) + .mkString(", ")}}".invalidNel } - case _ => s"Type evaluation failure. No suitable type found for identifier lookup '${a.first}' or '${a.first}.${a.second}' amongst {${linkedValues.map(_._2.linkableName).mkString(", ")}}".invalidNel } } - } implicit val indexAccessTypeEvaluator: TypeEvaluator[IndexAccess] = new TypeEvaluator[IndexAccess] { - override def evaluateType(a: IndexAccess, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: IndexAccess, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = (a.expressionElement.evaluateType(linkedValues), a.index.evaluateType(linkedValues), a.index.validNel) flatMapN { case (a: WomArrayType, WomIntegerType, _) => a.memberType.validNel - case (WomMapType(keyType, valueType), lookupType, _) if keyType.isCoerceableFrom(lookupType) => valueType.validNel - case (WomCompositeType(typeMap, _), WomStringType, StringLiteral(str)) => typeMap.get(str) match { - case Some(innerType) => innerType.validNel - case None => s"Type evaluation failed. No such field '$str' for expression $a".invalidNel - } + case (WomMapType(keyType, valueType), lookupType, _) if keyType.isCoerceableFrom(lookupType) => + valueType.validNel + case (WomCompositeType(typeMap, _), WomStringType, StringLiteral(str)) => + typeMap.get(str) match { + case Some(innerType) => innerType.validNel + case None => s"Type evaluation failed. No such field '$str' for expression $a".invalidNel + } case (WomObjectType, WomStringType, _) => WomAnyType.validNel case (WomAnyType, _, _) => WomAnyType.validNel - case (otherObject, otherKey, _) => s"Type evaluation failed for $a. Cannot dereference a ${otherObject.stableName} value using a ${otherKey.stableName} key".invalidNel + case (otherObject, otherKey, _) => + s"Type evaluation failed for $a. Cannot dereference a ${otherObject.stableName} value using a ${otherKey.stableName} key".invalidNel } - } } /** @@ -81,7 +97,8 @@ object LookupEvaluators { val tail = NonEmptyList.fromList(lookupChain.tail) val thisValue: ErrorOr[WomType] = womType match { - case WomCompositeType(typeMap, _) => typeMap.get(key).toErrorOr(s"No such field '$key' on type ${womType.stableName}.") + case WomCompositeType(typeMap, _) => + typeMap.get(key).toErrorOr(s"No such field '$key' on type ${womType.stableName}.") case WomObjectType => WomAnyType.validNel case WomPairType(left, _) if key == "left" => left.validNel case WomPairType(_, right) if key == "right" => right.validNel diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/TernaryIfEvaluator.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/TernaryIfEvaluator.scala index ba90a5c0bf0..ca4b1d61a7f 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/TernaryIfEvaluator.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/TernaryIfEvaluator.scala @@ -13,14 +13,15 @@ import wom.types.{WomBooleanType, WomType} object TernaryIfEvaluator { implicit val ternaryIfEvaluator: TypeEvaluator[TernaryIf] = new TypeEvaluator[TernaryIf] { - override def evaluateType(a: TernaryIf, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: TernaryIf, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.condition.evaluateType(linkedValues) flatMap { case WomBooleanType => (a.ifTrue.evaluateType(linkedValues): ErrorOr[WomType], - a.ifFalse.evaluateType(linkedValues): ErrorOr[WomType]) mapN { (tType, fType) => WomType.homogeneousTypeFromTypes(Seq(tType, fType)) } + a.ifFalse.evaluateType(linkedValues): ErrorOr[WomType] + ) mapN { (tType, fType) => WomType.homogeneousTypeFromTypes(Seq(tType, fType)) } case other => s"Condition should have evaluated to a Boolean but instead got ${other.stableName}".invalidNel } - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/UnaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/UnaryOperatorEvaluators.scala index 70368a692f5..3c2cb62721e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/UnaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/types/UnaryOperatorEvaluators.scala @@ -19,9 +19,9 @@ object UnaryOperatorEvaluators { implicit val logicalNotEvaluator: TypeEvaluator[LogicalNot] = forOperation(_.not) private def forOperation[A <: UnaryOperation](op: WomType => Try[WomType]) = new TypeEvaluator[A] { - override def evaluateType(a: A, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionTypeEvaluator: TypeEvaluator[ExpressionElement]): ErrorOr[WomType] = { + override def evaluateType(a: A, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])(implicit + expressionTypeEvaluator: TypeEvaluator[ExpressionElement] + ): ErrorOr[WomType] = a.argument.evaluateType(linkedValues) flatMap { op(_).toErrorOr } - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/BinaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/BinaryOperatorEvaluators.scala index 151ba26a4a6..6f34ed91230 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/BinaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/BinaryOperatorEvaluators.scala @@ -16,8 +16,10 @@ import wom.values.{WomBoolean, WomOptionalValue, WomValue} import scala.util.Try object BinaryOperatorEvaluators { - implicit val logicalOrEvaluator: ValueEvaluator[LogicalOr] = forOperationWithShortCircuit(_.or(_), shortCircuit = { case WomBoolean(true) => WomBoolean(true) }) - implicit val logicalAndEvaluator: ValueEvaluator[LogicalAnd] = forOperationWithShortCircuit(_.and(_), shortCircuit = { case WomBoolean(false) => WomBoolean(false) }) + implicit val logicalOrEvaluator: ValueEvaluator[LogicalOr] = + forOperationWithShortCircuit(_.or(_), shortCircuit = { case WomBoolean(true) => WomBoolean(true) }) + implicit val logicalAndEvaluator: ValueEvaluator[LogicalAnd] = + forOperationWithShortCircuit(_.and(_), shortCircuit = { case WomBoolean(false) => WomBoolean(false) }) implicit val equalsEvaluator: ValueEvaluator[Equals] = forOperation(_.equals(_)) implicit val notEqualsEvaluator: ValueEvaluator[NotEquals] = forOperation(_.notEquals(_)) implicit val lessThanEvaluator: ValueEvaluator[LessThan] = forOperation(_.lessThan(_)) @@ -33,14 +35,14 @@ object BinaryOperatorEvaluators { private def forOperation[A <: BinaryOperation](op: (WomValue, WomValue) => Try[WomValue]): ValueEvaluator[A] = forOperationWithShortCircuit(op, PartialFunction.empty[WomValue, WomValue]) - private def forOperationWithShortCircuit[A <: BinaryOperation](op: (WomValue, WomValue) => Try[WomValue], - shortCircuit: PartialFunction[WomValue, WomValue]) = new ValueEvaluator[A] { + shortCircuit: PartialFunction[WomValue, WomValue] + ) = new ValueEvaluator[A] { override def evaluateValue(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = a.left.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) flatMap { left => if (shortCircuit.isDefinedAt(left.value)) { EvaluatedValue(shortCircuit(left.value), left.sideEffectFiles).validNel @@ -50,10 +52,13 @@ object BinaryOperatorEvaluators { // Allow unsupplied optionals, but only if we're instantiating a command: val handleOptionals = rawResult.recover { - case OptionalNotSuppliedException(_) if forCommandInstantiationOptions.isDefined => WomOptionalValue(WomStringType, None) + case OptionalNotSuppliedException(_) if forCommandInstantiationOptions.isDefined => + WomOptionalValue(WomStringType, None) } - handleOptionals.toErrorOr map { newValue => EvaluatedValue(newValue, left.sideEffectFiles ++ right.sideEffectFiles) } + handleOptionals.toErrorOr map { newValue => + EvaluatedValue(newValue, left.sideEffectFiles ++ right.sideEffectFiles) + } } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala index f40c8a9431a..ffaf16ad0d2 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala @@ -16,7 +16,19 @@ import wdl4s.parser.MemoryUnit import wom.expression.IoFunctionSet import wom.types._ import wom.values.WomArray.WomArrayLike -import wom.values.{WomArray, WomBoolean, WomFloat, WomInteger, WomMap, WomObject, WomOptionalValue, WomPair, WomSingleFile, WomString, WomValue} +import wom.values.{ + WomArray, + WomBoolean, + WomFloat, + WomInteger, + WomMap, + WomObject, + WomOptionalValue, + WomPair, + WomSingleFile, + WomString, + WomValue +} import wom.types.coercion.ops._ import wom.types.coercion.defaults._ import wom.types.coercion.WomTypeCoercer @@ -37,8 +49,8 @@ object EngineFunctionEvaluators { override def evaluateValue(a: StdoutElement.type, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = EvaluatedValue(WomSingleFile(ioFunctionSet.pathFunctions.stdout), Seq.empty).validNel } @@ -46,123 +58,134 @@ object EngineFunctionEvaluators { override def evaluateValue(a: StderrElement.type, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = EvaluatedValue(WomSingleFile(ioFunctionSet.pathFunctions.stderr), Seq.empty).validNel } private val ReadWaitTimeout = 60.seconds - private def readFile(fileToRead: WomSingleFile, ioFunctionSet: IoFunctionSet, sizeLimit: Int) = { - Try(Await.result(ioFunctionSet.readFile(fileToRead.value, Option(sizeLimit), failOnOverflow = true), ReadWaitTimeout)) - } + private def readFile(fileToRead: WomSingleFile, ioFunctionSet: IoFunctionSet, sizeLimit: Int) = + Try( + Await.result(ioFunctionSet.readFile(fileToRead.value, Option(sizeLimit), failOnOverflow = true), ReadWaitTimeout) + ) implicit val readLinesFunctionEvaluator: ValueEvaluator[ReadLines] = new ValueEvaluator[ReadLines] { override def evaluateValue(a: ReadLines, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processValidatedSingleValue[WomSingleFile, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomSingleFile, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { - //validate + // validate read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readLinesLimit) // Users expect an empty file to return zero lines [] not [""] - lines = if (read.nonEmpty) { - read.split(System.lineSeparator).toList - } else { - List.empty - } + lines = + if (read.nonEmpty) { + read.split(System.lineSeparator).toList + } else { + List.empty + } } yield EvaluatedValue(WomArray(lines map WomString.apply), Seq.empty) tryResult.toErrorOr.contextualizeErrors(s"""read_lines("${fileToRead.value}")""") } - } } implicit val readTsvFunctionEvaluator: ValueEvaluator[ReadTsv] = new ValueEvaluator[ReadTsv] { override def evaluateValue(a: ReadTsv, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processValidatedSingleValue[WomSingleFile, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomSingleFile, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readTsvLimit) tsv <- Try(WomArray.fromTsv(read)) } yield EvaluatedValue(tsv, Seq.empty) tryResult.toErrorOr.contextualizeErrors(s"""read_tsv("${fileToRead.value}")""") } - } } implicit val readMapFunctionEvaluator: ValueEvaluator[ReadMap] = new ValueEvaluator[ReadMap] { override def evaluateValue(a: ReadMap, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomMap]] = { - processValidatedSingleValue[WomSingleFile, WomMap](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomMap]] = + processValidatedSingleValue[WomSingleFile, WomMap]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readMapLimit) map <- WomMap.fromTsv(read) } yield EvaluatedValue(map, Seq.empty) tryResult.toErrorOr.contextualizeErrors(s"""read_map("${fileToRead.value}")""") } - } } implicit val readObjectFunctionEvaluator: ValueEvaluator[ReadObject] = new ValueEvaluator[ReadObject] { override def evaluateValue(a: ReadObject, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomObject]] = { - processValidatedSingleValue[WomSingleFile, WomObject](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomObject]] = + processValidatedSingleValue[WomSingleFile, WomObject]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readObjectLimit) obj <- WomObject.fromTsv(read) } yield obj val rightSize: ErrorOr[WomObject] = tryResult.toErrorOr flatMap { case oneItem: Array[WomObject] if oneItem.length == 1 => oneItem.head.validNel - case other: Array[WomObject] => s"Exactly 1 TSV object expected in input file (ie 2 lines: headers and data), but instead got an array of ${other.length} entries.".invalidNel + case other: Array[WomObject] => + s"Exactly 1 TSV object expected in input file (ie 2 lines: headers and data), but instead got an array of ${other.length} entries.".invalidNel } rightSize.map(EvaluatedValue(_, Seq.empty)).contextualizeErrors(s"""read_object("${fileToRead.value}")""") } - } } implicit val readObjectsFunctionEvaluator: ValueEvaluator[ReadObjects] = new ValueEvaluator[ReadObjects] { override def evaluateValue(a: ReadObjects, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processValidatedSingleValue[WomSingleFile, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomSingleFile, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readObjectLimit) objects <- WomObject.fromTsv(read) } yield WomArray(objects.toIndexedSeq) - tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_objects("${fileToRead.value}")""") + tryResult + .map(EvaluatedValue(_, Seq.empty)) + .toErrorOr + .contextualizeErrors(s"""read_objects("${fileToRead.value}")""") } - } } implicit val readJsonFunctionEvaluator: ValueEvaluator[ReadJson] = new ValueEvaluator[ReadJson] { override def evaluateValue(a: ReadJson, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { - def convertJsonToWom(jsValue: JsValue): Try[WomValue] = { + def convertJsonToWom(jsValue: JsValue): Try[WomValue] = jsValue match { - case _: JsNumber => WomIntegerType.coerceRawValue(jsValue).recoverWith { case _ => WomFloatType.coerceRawValue(jsValue) } + case _: JsNumber => + WomIntegerType.coerceRawValue(jsValue).recoverWith { case _ => WomFloatType.coerceRawValue(jsValue) } case _: JsString => WomStringType.coerceRawValue(jsValue) case _: JsBoolean => WomBooleanType.coerceRawValue(jsValue) case _: JsArray => WomArrayType(WomAnyType).coerceRawValue(jsValue) case _ => WomObjectType.coerceRawValue(jsValue) } - } def readJson(fileToRead: WomSingleFile): ErrorOr[EvaluatedValue[WomValue]] = { val tryResult: Try[WomValue] = for { @@ -171,13 +194,15 @@ object EngineFunctionEvaluators { womValue <- convertJsonToWom(jsValue) } yield womValue - tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_json("${fileToRead.value}")""") + tryResult + .map(EvaluatedValue(_, Seq.empty)) + .toErrorOr + .contextualizeErrors(s"""read_json("${fileToRead.value}")""") } - def convertToSingleFile(womValue: WomValue): ErrorOr[WomSingleFile] = { + def convertToSingleFile(womValue: WomValue): ErrorOr[WomSingleFile] = if (womValue.coercionDefined[WomSingleFile]) womValue.coerceToType[WomSingleFile] else s"Expected File argument but got ${womValue.womType.stableName}".invalidNel - } for { evaluatedValue <- a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) @@ -191,63 +216,79 @@ object EngineFunctionEvaluators { override def evaluateValue(a: ReadInt, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = { - processValidatedSingleValue[WomSingleFile, WomInteger](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = + processValidatedSingleValue[WomSingleFile, WomInteger]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readIntLimit) asInt <- Try(read.trim.toInt) } yield WomInteger(asInt) - tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_int("${fileToRead.value}")""") + tryResult + .map(EvaluatedValue(_, Seq.empty)) + .toErrorOr + .contextualizeErrors(s"""read_int("${fileToRead.value}")""") } - } } implicit val readStringFunctionEvaluator: ValueEvaluator[ReadString] = new ValueEvaluator[ReadString] { override def evaluateValue(a: ReadString, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { - processValidatedSingleValue[WomSingleFile, WomString](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = + processValidatedSingleValue[WomSingleFile, WomString]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readStringLimit) } yield WomString(read.trim) - tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_string("${fileToRead.value}")""") + tryResult + .map(EvaluatedValue(_, Seq.empty)) + .toErrorOr + .contextualizeErrors(s"""read_string("${fileToRead.value}")""") } - } } implicit val readFloatFunctionEvaluator: ValueEvaluator[ReadFloat] = new ValueEvaluator[ReadFloat] { override def evaluateValue(a: ReadFloat, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomFloat]] = { - processValidatedSingleValue[WomSingleFile, WomFloat](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomFloat]] = + processValidatedSingleValue[WomSingleFile, WomFloat]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readFloatLimit) asFloat <- Try(read.trim.toDouble) } yield WomFloat(asFloat) - tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_float("${fileToRead.value}")""") + tryResult + .map(EvaluatedValue(_, Seq.empty)) + .toErrorOr + .contextualizeErrors(s"""read_float("${fileToRead.value}")""") } - } } implicit val readBooleanFunctionEvaluator: ValueEvaluator[ReadBoolean] = new ValueEvaluator[ReadBoolean] { override def evaluateValue(a: ReadBoolean, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomBoolean]] = { - processValidatedSingleValue[WomSingleFile, WomBoolean](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { fileToRead => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomBoolean]] = + processValidatedSingleValue[WomSingleFile, WomBoolean]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { fileToRead => val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readBoolLimit) asBool <- Try(read.trim.toBoolean) } yield WomBoolean(asBool) - tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_boolean("${fileToRead.value}")""") + tryResult + .map(EvaluatedValue(_, Seq.empty)) + .toErrorOr + .contextualizeErrors(s"""read_boolean("${fileToRead.value}")""") } - } } private val WriteWaitTimeout = 10.minutes @@ -260,17 +301,26 @@ object EngineFunctionEvaluators { override def evaluateValue(a: WriteLines, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { val functionName = "write_lines" - processValidatedSingleValue[WomArray, WomSingleFile](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { stringsToWrite => + processValidatedSingleValue[WomArray, WomSingleFile]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { stringsToWrite => val tryResult = for { - serialized <- ValueEvaluation.serializeWomValue(functionName, stringsToWrite, defaultIfOptionalEmpty = WomArray(WomArrayType(WomStringType), Seq.empty)) + serialized <- ValueEvaluation.serializeWomValue(functionName, + stringsToWrite, + defaultIfOptionalEmpty = + WomArray(WomArrayType(WomStringType), Seq.empty) + ) written <- writeContent(functionName, ioFunctionSet, serialized) } yield written - tryResult.map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))).toErrorOr.contextualizeErrors(s"""$functionName(...)""") - } (coercer = WomArrayType(WomStringType)) + tryResult + .map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))) + .toErrorOr + .contextualizeErrors(s"""$functionName(...)""") + }(coercer = WomArrayType(WomStringType)) } } @@ -278,18 +328,26 @@ object EngineFunctionEvaluators { override def evaluateValue(a: WriteTsv, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { val functionName = "write_tsv" - processValidatedSingleValue[WomArray, WomSingleFile](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { objectToWrite => - + processValidatedSingleValue[WomArray, WomSingleFile]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { objectToWrite => val tryResult = for { - serialized <- ValueEvaluation.serializeWomValue(functionName, objectToWrite, defaultIfOptionalEmpty = WomArray(WomArrayType(WomStringType), List.empty[WomValue])) + serialized <- ValueEvaluation.serializeWomValue(functionName, + objectToWrite, + defaultIfOptionalEmpty = + WomArray(WomArrayType(WomStringType), List.empty[WomValue]) + ) written <- writeContent(functionName, ioFunctionSet, serialized) } yield written - tryResult.map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))).toErrorOr.contextualizeErrors(s"""$functionName(...)""") - } (coercer = WomArrayType(WomAnyType)) + tryResult + .map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))) + .toErrorOr + .contextualizeErrors(s"""$functionName(...)""") + }(coercer = WomArrayType(WomAnyType)) } } @@ -297,16 +355,24 @@ object EngineFunctionEvaluators { override def evaluateValue(a: WriteMap, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { val functionName = "write_map" - processValidatedSingleValue[WomMap, WomSingleFile](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { mapToWrite: WomMap => + processValidatedSingleValue[WomMap, WomSingleFile]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { mapToWrite: WomMap => val tryResult = for { - serialized <- ValueEvaluation.serializeWomValue(functionName, mapToWrite, defaultIfOptionalEmpty = WomMap(Map.empty)) + serialized <- ValueEvaluation.serializeWomValue(functionName, + mapToWrite, + defaultIfOptionalEmpty = WomMap(Map.empty) + ) written <- writeContent(functionName, ioFunctionSet, serialized) } yield written - tryResult.map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))).toErrorOr.contextualizeErrors(s"""$functionName(...)""") + tryResult + .map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))) + .toErrorOr + .contextualizeErrors(s"""$functionName(...)""") } } } @@ -315,16 +381,24 @@ object EngineFunctionEvaluators { override def evaluateValue(a: WriteObject, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { val functionName = "write_object" - processValidatedSingleValue[WomObject, WomSingleFile](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { objectToWrite: WomObject => + processValidatedSingleValue[WomObject, WomSingleFile]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { objectToWrite: WomObject => val tryResult = for { - serialized <- ValueEvaluation.serializeWomValue(functionName, objectToWrite, defaultIfOptionalEmpty = WomObject(Map.empty)) + serialized <- ValueEvaluation.serializeWomValue(functionName, + objectToWrite, + defaultIfOptionalEmpty = WomObject(Map.empty) + ) written <- writeContent(functionName, ioFunctionSet, serialized) } yield written - tryResult.map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))).toErrorOr.contextualizeErrors(s"""$functionName(...)""") + tryResult + .map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))) + .toErrorOr + .contextualizeErrors(s"""$functionName(...)""") } } } @@ -333,17 +407,25 @@ object EngineFunctionEvaluators { override def evaluateValue(a: WriteObjects, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { val functionName = "write_objects" - processValidatedSingleValue[WomArray, WomSingleFile](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { objectToWrite => + processValidatedSingleValue[WomArray, WomSingleFile]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { objectToWrite => val tryResult = for { - serialized <- ValueEvaluation.serializeWomValue(functionName, objectToWrite, defaultIfOptionalEmpty = WomArray(List(WomObject(Map.empty)))) + serialized <- ValueEvaluation.serializeWomValue(functionName, + objectToWrite, + defaultIfOptionalEmpty = WomArray(List(WomObject(Map.empty))) + ) written <- writeContent(functionName, ioFunctionSet, serialized) } yield written - tryResult.map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))).toErrorOr.contextualizeErrors(s"""$functionName(...)""") - } (coercer = WomArrayType(WomObjectType)) + tryResult + .map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))) + .toErrorOr + .contextualizeErrors(s"""$functionName(...)""") + }(coercer = WomArrayType(WomObjectType)) } } @@ -351,8 +433,8 @@ object EngineFunctionEvaluators { override def evaluateValue(a: WriteJson, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomSingleFile]] = { val functionName = "write_json" def convertToSingleFile(objectToWrite: WomValue): ErrorOr[EvaluatedValue[WomSingleFile]] = { @@ -361,26 +443,30 @@ object EngineFunctionEvaluators { written <- writeContent(functionName, ioFunctionSet, serialized.compactPrint) } yield written - tryResult.map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))).toErrorOr.contextualizeErrors(s"""$functionName(...)""") + tryResult + .map(v => EvaluatedValue(v, Seq(CommandSetupSideEffectFile(v)))) + .toErrorOr + .contextualizeErrors(s"""$functionName(...)""") } - def evaluateParam(womValue: WomValue): ErrorOr[EvaluatedValue[WomSingleFile]] = { + def evaluateParam(womValue: WomValue): ErrorOr[EvaluatedValue[WomSingleFile]] = womValue match { - case WomBoolean(_) | WomString(_) | WomInteger(_) | WomFloat(_) | WomPair(_, _) => convertToSingleFile(womValue) + case WomBoolean(_) | WomString(_) | WomInteger(_) | WomFloat(_) | WomPair(_, _) => + convertToSingleFile(womValue) case v if v.coercionDefined[WomObject] => v.coerceToType[WomObject].flatMap(convertToSingleFile) case v if v.coercionDefined[WomArray] => v.coerceToType[WomArray].flatMap(convertToSingleFile) - case _ => (s"The '$functionName' method expects one of 'Boolean', 'String', 'Integer', 'Float', 'Object', 'Pair[_, _]', " + - s"'Map[_, _] or 'Array[_]' argument but instead got '${womValue.womType.friendlyName}'.").invalidNel + case _ => + (s"The '$functionName' method expects one of 'Boolean', 'String', 'Integer', 'Float', 'Object', 'Pair[_, _]', " + + s"'Map[_, _] or 'Array[_]' argument but instead got '${womValue.womType.friendlyName}'.").invalidNel } - } val evaluatedSingleFile: ErrorOr[(EvaluatedValue[WomSingleFile], Seq[CommandSetupSideEffectFile])] = for { evaluatedValue <- a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) evaluatedSingleFile <- evaluateParam(evaluatedValue.value) } yield (evaluatedSingleFile, evaluatedValue.sideEffectFiles) - evaluatedSingleFile map { - case (result, previousSideEffectFiles) => result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFiles) + evaluatedSingleFile map { case (result, previousSideEffectFiles) => + result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFiles) } } } @@ -389,57 +475,62 @@ object EngineFunctionEvaluators { override def evaluateValue(a: Range, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processValidatedSingleValue[WomInteger, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { integer => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomInteger, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { integer => val array = WomArray( womType = WomArrayType(WomIntegerType, guaranteedNonEmpty = integer.value > 0), value = (0 until integer.value).map(WomInteger) ) EvaluatedValue(array, Seq.empty).validNel } - } } implicit val transposeFunctionEvaluator: ValueEvaluator[Transpose] = new ValueEvaluator[Transpose] { override def evaluateValue(a: Transpose, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processValidatedSingleValue[WomArray, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { array => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomArray, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { array => EngineFunctions.transpose(array).map(EvaluatedValue(_, Seq.empty)).toErrorOr } - } } implicit val lengthFunctionEvaluator: ValueEvaluator[Length] = new ValueEvaluator[Length] { override def evaluateValue(a: Length, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = { - processValidatedSingleValue[WomArray, WomInteger](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { a => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = + processValidatedSingleValue[WomArray, WomInteger]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { a => EvaluatedValue(WomInteger(a.value.size), Seq.empty).validNel } - } } implicit val flattenFunctionEvaluator: ValueEvaluator[Flatten] = new ValueEvaluator[Flatten] { override def evaluateValue(a: Flatten, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { def flatValues(v: WomValue): ErrorOr[Seq[WomValue]] = v match { case WomArrayLike(arrayLike) => arrayLike.value.validNel case other => s"inner item ${other.toWomString} was not an array-like".invalidNel } - processValidatedSingleValue[WomArray, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { array => - val expandedValidation = array.value.toList.traverse{ flatValues } + processValidatedSingleValue[WomArray, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { array => + val expandedValidation = array.value.toList.traverse(flatValues) expandedValidation map { expanded => EvaluatedValue(WomArray(expanded.flatten), Seq.empty) } - } (coercer = WomArrayType(WomArrayType(WomAnyType))) + }(coercer = WomArrayType(WomArrayType(WomAnyType))) } } @@ -447,85 +538,93 @@ object EngineFunctionEvaluators { override def evaluateValue(a: SelectFirst, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = { - processValidatedSingleValue[WomArray, WomValue](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { array => - val firstValue = array.value collectFirst { - case WomOptionalValue(_, Some(yay)) => yay + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomValue]] = + processValidatedSingleValue[WomArray, WomValue]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { array => + val firstValue = array.value collectFirst { case WomOptionalValue(_, Some(yay)) => + yay } firstValue match { case Some(first) => EvaluatedValue(first, Seq.empty).validNel - case None => s"select_first was called with ${array.size} empty values. We needed at least one to be filled.".invalidNel + case None => + s"select_first was called with ${array.size} empty values. We needed at least one to be filled.".invalidNel } - } (coercer = WomArrayType(WomOptionalType(WomAnyType))) - } + }(coercer = WomArrayType(WomOptionalType(WomAnyType))) } implicit val selectAllFunctionEvaluator: ValueEvaluator[SelectAll] = new ValueEvaluator[SelectAll] { override def evaluateValue(a: SelectAll, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processValidatedSingleValue[WomArray, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { array => - val goodValues = array.value.collect { - case WomOptionalValue.Flattened(Some(value)) => value + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processValidatedSingleValue[WomArray, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { array => + val goodValues = array.value.collect { case WomOptionalValue.Flattened(Some(value)) => + value } EvaluatedValue(WomArray(goodValues), Seq.empty).validNel - } (coercer = WomArrayType(WomOptionalType(WomAnyType))) - } + }(coercer = WomArrayType(WomOptionalType(WomAnyType))) } implicit val definedFunctionEvaluator: ValueEvaluator[Defined] = new ValueEvaluator[Defined] { override def evaluateValue(a: Defined, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomBoolean]] = { - processValidatedSingleValue[WomOptionalValue, WomBoolean](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { optionalValue => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomBoolean]] = + processValidatedSingleValue[WomOptionalValue, WomBoolean]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { optionalValue => EvaluatedValue(WomBoolean(optionalValue.value.isDefined), Seq.empty).validNel } - } } implicit val floorFunctionEvaluator: ValueEvaluator[Floor] = new ValueEvaluator[Floor] { override def evaluateValue(a: Floor, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = { - processValidatedSingleValue[WomFloat, WomInteger](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { float => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = + processValidatedSingleValue[WomFloat, WomInteger]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { float => EvaluatedValue(WomInteger(math.floor(float.value).toInt), Seq.empty).validNel } - } } implicit val ceilFunctionEvaluator: ValueEvaluator[Ceil] = new ValueEvaluator[Ceil] { override def evaluateValue(a: Ceil, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = { - processValidatedSingleValue[WomFloat, WomInteger](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { float => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = + processValidatedSingleValue[WomFloat, WomInteger]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { float => EvaluatedValue(WomInteger(math.ceil(float.value).toInt), Seq.empty).validNel } - } } implicit val roundFunctionEvaluator: ValueEvaluator[Round] = new ValueEvaluator[Round] { override def evaluateValue(a: Round, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = { - processValidatedSingleValue[WomFloat, WomInteger](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { float => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomInteger]] = + processValidatedSingleValue[WomFloat, WomInteger]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { float => EvaluatedValue(WomInteger(math.round(float.value).toInt), Seq.empty).validNel } - } } implicit val globFunctionValueEvaluator: ValueEvaluator[Glob] = new ValueEvaluator[Glob] { + /** * Evaluate a value from an A * @@ -535,24 +634,28 @@ object EngineFunctionEvaluators { * @param forCommandInstantiationOptions Supplied only if we're evaluating this A as part of command instantiation. * @return An evaluated value set - the value itself and any files which were produced as part of the evaluation. */ - override def evaluateValue(a: Glob, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - processValidatedSingleValue[WomString, WomArray](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { globString => + override def evaluateValue(a: Glob, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + processValidatedSingleValue[WomString, WomArray]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { globString => for { globbed <- Try(Await.result(ioFunctionSet.glob(globString.valueString), ReadWaitTimeout)).toErrorOr files = globbed map WomSingleFile array = WomArray(files) } yield EvaluatedValue(array, Seq.empty) } - } } implicit val sizeFunctionEvaluator: ValueEvaluator[Size] = new ValueEvaluator[Size] { override def evaluateValue(a: Size, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomFloat]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomFloat]] = { // Inner function: get the memory unit from the second (optional) parameter def toUnit(womValue: WomValue): ErrorOr[MemoryUnit] = Try(MemoryUnit.fromSuffix(womValue.valueString)).toErrorOr @@ -566,26 +669,38 @@ object EngineFunctionEvaluators { // Inner function: Get the file size, allowing for unpacking of optionals and arrays def optionalSafeFileSize(value: WomValue): ErrorOr[Long] = value match { case f if f.isInstanceOf[WomSingleFile] || WomSingleFileType.isCoerceableFrom(f.womType) => - f.coerceToType[WomSingleFile] flatMap { file => Try(Await.result(ioFunctionSet.size(file.valueString), Duration.Inf)).toErrorOr } + f.coerceToType[WomSingleFile] flatMap { file => + Try(Await.result(ioFunctionSet.size(file.valueString), Duration.Inf)).toErrorOr + } case WomOptionalValue(f, Some(o)) if isOptionalOfFileType(f) => optionalSafeFileSize(o) case WomOptionalValue(f, None) if isOptionalOfFileType(f) => 0L.validNel - case WomArray(WomArrayType(womType), values) if isOptionalOfFileType(womType) => values.toList.traverse(optionalSafeFileSize).map(_.sum) - case _ => s"The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got ${value.womType.stableName}.".invalidNel + case WomArray(WomArrayType(womType), values) if isOptionalOfFileType(womType) => + values.toList.traverse(optionalSafeFileSize).map(_.sum) + case _ => + s"The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got ${value.womType.stableName}.".invalidNel } // Inner function: get the file size and convert into the requested memory unit - def fileSize(womValue: ErrorOr[EvaluatedValue[_ <: WomValue]], convertToOption: Option[ErrorOr[EvaluatedValue[_ <: WomValue]]]): ErrorOr[EvaluatedValue[WomFloat]] = { - val convertTo: ErrorOr[EvaluatedValue[_ <: WomValue]] = convertToOption.getOrElse(EvaluatedValue(WomString("B"), Seq.empty).validNel) + def fileSize(womValue: ErrorOr[EvaluatedValue[_ <: WomValue]], + convertToOption: Option[ErrorOr[EvaluatedValue[_ <: WomValue]]] + ): ErrorOr[EvaluatedValue[WomFloat]] = { + val convertTo: ErrorOr[EvaluatedValue[_ <: WomValue]] = + convertToOption.getOrElse(EvaluatedValue(WomString("B"), Seq.empty).validNel) for { value <- womValue evaluatedUnitValue <- convertTo convertToUnit <- toUnit(evaluatedUnitValue.value) fileSize <- optionalSafeFileSize(value.value) - } yield EvaluatedValue(WomFloat(MemorySize(fileSize.toDouble, MemoryUnit.Bytes).to(convertToUnit).amount), value.sideEffectFiles ++ evaluatedUnitValue.sideEffectFiles) + } yield EvaluatedValue(WomFloat(MemorySize(fileSize.toDouble, MemoryUnit.Bytes).to(convertToUnit).amount), + value.sideEffectFiles ++ evaluatedUnitValue.sideEffectFiles + ) } - val evaluatedFileValidation: ErrorOr[EvaluatedValue[_ <: WomValue]] = a.file.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) - fileSize(evaluatedFileValidation, a.unit map (_.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions))) + val evaluatedFileValidation: ErrorOr[EvaluatedValue[_ <: WomValue]] = + a.file.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + fileSize(evaluatedFileValidation, + a.unit map (_.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) + ) } } @@ -593,17 +708,22 @@ object EngineFunctionEvaluators { override def evaluateValue(a: Basename, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { def simpleBasename(fileNameAsString: WomString) = fileNameAsString.valueString.split('/').last a.suffixToRemove match { - case None => processValidatedSingleValue[WomString, WomString](a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { str => - EvaluatedValue(WomString(simpleBasename(str)), Seq.empty).validNel - } - case Some(suffixToRemove) => processTwoValidatedValues[WomString, WomString, WomString]( - a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), - suffixToRemove.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { (name, suffix) => + case None => + processValidatedSingleValue[WomString, WomString]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { str => + EvaluatedValue(WomString(simpleBasename(str)), Seq.empty).validNel + } + case Some(suffixToRemove) => + processTwoValidatedValues[WomString, WomString, WomString]( + a.param.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), + suffixToRemove.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { (name, suffix) => EvaluatedValue(WomString(simpleBasename(name).stripSuffix(suffix.valueString)), Seq.empty).validNel } } @@ -614,91 +734,127 @@ object EngineFunctionEvaluators { override def evaluateValue(a: Zip, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processTwoValidatedValues[WomArray, WomArray, WomArray](a.arg1.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), a.arg2.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { (arr1, arr2) => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processTwoValidatedValues[WomArray, WomArray, WomArray]( + a.arg1.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), + a.arg2.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { (arr1, arr2) => if (arr1.size == arr2.size) { val pairs = arr1.value.zip(arr2.value) map { case (a, b) => WomPair(a, b) } - EvaluatedValue(WomArray(WomArrayType(WomPairType(arr1.arrayType.memberType, arr2.arrayType.memberType)), pairs), Seq.empty).validNel + EvaluatedValue( + WomArray(WomArrayType(WomPairType(arr1.arrayType.memberType, arr2.arrayType.memberType)), pairs), + Seq.empty + ).validNel } else { s"Mismatching array sizes for zip function: ${arr1.size} vs ${arr2.size}".invalidNel } } - } } implicit val crossFunctionEvaluator: ValueEvaluator[Cross] = new ValueEvaluator[Cross] { override def evaluateValue(a: Cross, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processTwoValidatedValues[WomArray, WomArray, WomArray](a.arg1.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), a.arg2.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { (arr1, arr2) => + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processTwoValidatedValues[WomArray, WomArray, WomArray]( + a.arg1.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), + a.arg2.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { (arr1, arr2) => val pairs = for { a <- arr1.value b <- arr2.value } yield WomPair(a, b) - EvaluatedValue(WomArray(WomArrayType(WomPairType(arr1.arrayType.memberType, arr2.arrayType.memberType)), pairs), Seq.empty).validNel + EvaluatedValue(WomArray(WomArrayType(WomPairType(arr1.arrayType.memberType, arr2.arrayType.memberType)), pairs), + Seq.empty + ).validNel } - } } implicit val prefixFunctionEvaluator: ValueEvaluator[Prefix] = new ValueEvaluator[Prefix] { override def evaluateValue(a: Prefix, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = { - processTwoValidatedValues[WomString, WomArray, WomArray](a.prefix.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), a.array.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { (prefix, array) => - EvaluatedValue(WomArray(array.value.map(value => WomString(prefix.value + value.valueString))), Seq.empty).validNel + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomArray]] = + processTwoValidatedValues[WomString, WomArray, WomArray]( + a.prefix.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), + a.array.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { (prefix, array) => + EvaluatedValue(WomArray(array.value.map(value => WomString(prefix.value + value.valueString))), + Seq.empty + ).validNel } - } } implicit val subFunctionEvaluator: ValueEvaluator[Sub] = new ValueEvaluator[Sub] { override def evaluateValue(a: Sub, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = processThreeValidatedValues[WomString, WomString, WomString, WomString]( a.input.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), a.pattern.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), - a.replace.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) { (input, pattern, replace) => - ErrorOr(EvaluatedValue(WomString(pattern.valueString.r.replaceAllIn(input.valueString, replace.valueString)), Seq.empty)) + a.replace.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) { (input, pattern, replace) => + ErrorOr( + EvaluatedValue(WomString(pattern.valueString.r.replaceAllIn(input.valueString, replace.valueString)), + Seq.empty + ) + ) } - } } - def processValidatedSingleValue[A <: WomValue, B <: WomValue](arg: ErrorOr[EvaluatedValue[_]]) - (f: A => ErrorOr[EvaluatedValue[B]]) - (implicit coercer: WomTypeCoercer[A]): ErrorOr[EvaluatedValue[B]] = { + def processValidatedSingleValue[A <: WomValue, B <: WomValue]( + arg: ErrorOr[EvaluatedValue[_]] + )(f: A => ErrorOr[EvaluatedValue[B]])(implicit coercer: WomTypeCoercer[A]): ErrorOr[EvaluatedValue[B]] = arg flatMap { - case EvaluatedValue(a: WomValue, previousSideEffectFiles) if a.coercionDefined[A] => a.coerceToType[A] flatMap { f.apply } map { result => result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFiles) } + case EvaluatedValue(a: WomValue, previousSideEffectFiles) if a.coercionDefined[A] => + a.coerceToType[A] flatMap f.apply map { result => + result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFiles) + } case other => s"Expected ${coercer.toDisplayString} argument but got ${other.value.womType.stableName}".invalidNel } - } - def processTwoValidatedValues[A <: WomValue, B <: WomValue, R <: WomValue](arg1: ErrorOr[EvaluatedValue[_ <: WomValue]], arg2: ErrorOr[EvaluatedValue[_ <: WomValue]]) - (f: (A, B) => ErrorOr[EvaluatedValue[R]]) - (implicit coercerA: WomTypeCoercer[A], - coercerB: WomTypeCoercer[B]): ErrorOr[EvaluatedValue[R]] = { + def processTwoValidatedValues[A <: WomValue, B <: WomValue, R <: WomValue]( + arg1: ErrorOr[EvaluatedValue[_ <: WomValue]], + arg2: ErrorOr[EvaluatedValue[_ <: WomValue]] + )( + f: (A, B) => ErrorOr[EvaluatedValue[R]] + )(implicit coercerA: WomTypeCoercer[A], coercerB: WomTypeCoercer[B]): ErrorOr[EvaluatedValue[R]] = (arg1, arg2) flatMapN { - case (EvaluatedValue(a: WomValue, previousSideEffectFilesA), EvaluatedValue(b: WomValue, previousSideEffectFilesB)) if a.coercionDefined[A] && b.coercionDefined[B] => - (a.coerceToType[A], b.coerceToType[B]) flatMapN { f.apply } map { result => result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFilesA ++ previousSideEffectFilesB) } - case (otherA, otherB) => s"Expected (${coercerA.toDisplayString}, ${coercerB.toDisplayString}) argument but got (${otherA.value.womType.stableName}, ${otherB.value.womType.stableName})".invalidNel - } - } - - private def processThreeValidatedValues[A <: WomValue, B <: WomValue, C <: WomValue, R <: WomValue](arg1: ErrorOr[EvaluatedValue[_ <: WomValue]], arg2: ErrorOr[EvaluatedValue[_ <: WomValue]], arg3: ErrorOr[EvaluatedValue[_ <: WomValue]]) - (f: (A, B, C) => ErrorOr[EvaluatedValue[R]]) - (implicit coercerA: WomTypeCoercer[A], - coercerB: WomTypeCoercer[B], - coercerC: WomTypeCoercer[C]): ErrorOr[EvaluatedValue[R]] = { + case (EvaluatedValue(a: WomValue, previousSideEffectFilesA), + EvaluatedValue(b: WomValue, previousSideEffectFilesB) + ) if a.coercionDefined[A] && b.coercionDefined[B] => + (a.coerceToType[A], b.coerceToType[B]) flatMapN f.apply map { result => + result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFilesA ++ previousSideEffectFilesB) + } + case (otherA, otherB) => + s"Expected (${coercerA.toDisplayString}, ${coercerB.toDisplayString}) argument but got (${otherA.value.womType.stableName}, ${otherB.value.womType.stableName})".invalidNel + } + + private def processThreeValidatedValues[A <: WomValue, B <: WomValue, C <: WomValue, R <: WomValue]( + arg1: ErrorOr[EvaluatedValue[_ <: WomValue]], + arg2: ErrorOr[EvaluatedValue[_ <: WomValue]], + arg3: ErrorOr[EvaluatedValue[_ <: WomValue]] + )(f: (A, B, C) => ErrorOr[EvaluatedValue[R]])(implicit + coercerA: WomTypeCoercer[A], + coercerB: WomTypeCoercer[B], + coercerC: WomTypeCoercer[C] + ): ErrorOr[EvaluatedValue[R]] = (arg1, arg2, arg3) flatMapN { - case (EvaluatedValue(a, previousSideEffectFilesA), EvaluatedValue(b, previousSideEffectFilesB), EvaluatedValue(c, previousSideEffectFilesC)) if a.coercionDefined[A] && b.coercionDefined[B] && c.coercionDefined[C] => - (a.coerceToType[A], b.coerceToType[B], c.coerceToType[C]) flatMapN { f.apply } map { result => result.copy(sideEffectFiles = result.sideEffectFiles ++ previousSideEffectFilesA ++ previousSideEffectFilesB ++ previousSideEffectFilesC) } - case (otherA, otherB, otherC) => s"Expected (${coercerA.toDisplayString}, ${coercerB.toDisplayString}, ${coercerB.toDisplayString}) argument but got (${otherA.value.womType.stableName}, ${otherB.value.womType.stableName}, ${otherC.value.womType.stableName})".invalidNel + case (EvaluatedValue(a, previousSideEffectFilesA), + EvaluatedValue(b, previousSideEffectFilesB), + EvaluatedValue(c, previousSideEffectFilesC) + ) if a.coercionDefined[A] && b.coercionDefined[B] && c.coercionDefined[C] => + (a.coerceToType[A], b.coerceToType[B], c.coerceToType[C]) flatMapN f.apply map { result => + result.copy(sideEffectFiles = + result.sideEffectFiles ++ previousSideEffectFilesA ++ previousSideEffectFilesB ++ previousSideEffectFilesC + ) + } + case (otherA, otherB, otherC) => + s"Expected (${coercerA.toDisplayString}, ${coercerB.toDisplayString}, ${coercerB.toDisplayString}) argument but got (${otherA.value.womType.stableName}, ${otherB.value.womType.stableName}, ${otherC.value.womType.stableName})".invalidNel } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LiteralEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LiteralEvaluators.scala index ab2b5c0b448..4831c34e76b 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LiteralEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LiteralEvaluators.scala @@ -14,32 +14,31 @@ import wom.values.{WomArray, WomMap, WomObject, WomPair, WomString, WomValue} object LiteralEvaluators { - implicit val primitiveValueEvaluator: ValueEvaluator[PrimitiveLiteralExpressionElement] = new ValueEvaluator[PrimitiveLiteralExpressionElement] { - override def evaluateValue(a: PrimitiveLiteralExpressionElement, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - EvaluatedValue(a.value, Seq.empty).validNel + implicit val primitiveValueEvaluator: ValueEvaluator[PrimitiveLiteralExpressionElement] = + new ValueEvaluator[PrimitiveLiteralExpressionElement] { + override def evaluateValue(a: PrimitiveLiteralExpressionElement, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + EvaluatedValue(a.value, Seq.empty).validNel } - } implicit val stringLiteralEvaluator: ValueEvaluator[StringLiteral] = new ValueEvaluator[StringLiteral] { override def evaluateValue(a: StringLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[WomString]] = EvaluatedValue(WomString(a.value), Seq.empty).validNel - } } implicit val stringExpressionEvaluator: ValueEvaluator[StringExpression] = new ValueEvaluator[StringExpression] { override def evaluateValue(a: StringExpression, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { val evaluatedPieces = a.pieces.toList.traverse { case e: StringPlaceholder => e.expr.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) case s: StringLiteral => s.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) @@ -56,8 +55,8 @@ object LiteralEvaluators { override def evaluateValue(a: ObjectLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { val evaluated: ErrorOr[List[(String, EvaluatedValue[_])]] = a.elements.toList traverse { case (key, value) => value.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions).map(key -> _) @@ -75,12 +74,14 @@ object LiteralEvaluators { override def evaluateValue(a: MapLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - - val evaluated: ErrorOr[List[(EvaluatedValue[_], EvaluatedValue[_])]] = a.elements.toList traverse { case (key, value) => - (key.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), - value.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) mapN { (key, value) => key -> value} + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + + val evaluated: ErrorOr[List[(EvaluatedValue[_], EvaluatedValue[_])]] = a.elements.toList traverse { + case (key, value) => + (key.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), + value.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) mapN { (key, value) => key -> value } } evaluated map { kvps => @@ -95,8 +96,8 @@ object LiteralEvaluators { override def evaluateValue(a: ArrayLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { val evaluated: ErrorOr[Seq[EvaluatedValue[_]]] = a.elements.toList traverse { entry => entry.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) @@ -114,14 +115,12 @@ object LiteralEvaluators { override def evaluateValue(a: PairLiteral, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = (a.left.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), - a.right.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) mapN { (left, right) => - + a.right.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) mapN { (left, right) => EvaluatedValue(WomPair(left.value, right.value), left.sideEffectFiles ++ right.sideEffectFiles) } - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LookupEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LookupEvaluators.scala index 114c4051b93..216673417cf 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LookupEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/LookupEvaluators.scala @@ -14,15 +14,14 @@ import wom.values._ import wdl.transforms.base.wdlom2wdl.WdlWriter.ops._ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl._ - object LookupEvaluators { implicit val identifierLookupEvaluator: ValueEvaluator[IdentifierLookup] = new ValueEvaluator[IdentifierLookup] { override def evaluateValue(a: IdentifierLookup, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = inputs.get(a.identifier) match { case Some(value) => val mapped = forCommandInstantiationOptions.fold(value)(_.valueMapper(value)) @@ -30,49 +29,59 @@ object LookupEvaluators { case None => s"ValueEvaluator[IdentifierLookup]: No suitable input for '${a.identifier}' amongst {${inputs.keys.mkString(", ")}}".invalidNel } - } } - implicit val expressionMemberAccessEvaluator: ValueEvaluator[ExpressionMemberAccess] = new ValueEvaluator[ExpressionMemberAccess] { - override def evaluateValue(a: ExpressionMemberAccess, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - a.expression.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) flatMap { evaluated => - doLookup(evaluated.value, a.memberAccessTail) map { EvaluatedValue(_, evaluated.sideEffectFiles) } - } + implicit val expressionMemberAccessEvaluator: ValueEvaluator[ExpressionMemberAccess] = + new ValueEvaluator[ExpressionMemberAccess] { + override def evaluateValue(a: ExpressionMemberAccess, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + a.expression.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) flatMap { evaluated => + doLookup(evaluated.value, a.memberAccessTail) map { EvaluatedValue(_, evaluated.sideEffectFiles) } + } } - } - - implicit val identifierMemberAccessEvaluator: ValueEvaluator[IdentifierMemberAccess] = new ValueEvaluator[IdentifierMemberAccess] { - override def evaluateValue(a: IdentifierMemberAccess, - inputs: Map[String, WomValue], - ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - // Do the first lookup and decide whether any more lookups are needed: - val generatedValueAndLookups: ErrorOr[(WomValue, Seq[String])] = { - val callOutputKey = s"${a.first}.${a.second}" + implicit val identifierMemberAccessEvaluator: ValueEvaluator[IdentifierMemberAccess] = + new ValueEvaluator[IdentifierMemberAccess] { + override def evaluateValue(a: IdentifierMemberAccess, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit + expressionValueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + + // Do the first lookup and decide whether any more lookups are needed: + val generatedValueAndLookups: ErrorOr[(WomValue, Seq[String])] = { + val callOutputKey = s"${a.first}.${a.second}" + + if (inputs.keySet.contains(a.first)) { (inputs(a.first), List(a.second) ++ a.memberAccessTail).validNel } + else if (inputs.keySet.contains(callOutputKey)) { (inputs(callOutputKey), a.memberAccessTail).validNel } + else + s"No value found for member access lookup. Report this bug: Insufficient input values supplied by engine. Needed '${a.first}' or '$callOutputKey' but only received: '${inputs.keys + .mkString(", ")}'".invalidNel + } - if (inputs.keySet.contains(a.first)) { (inputs(a.first), List(a.second) ++ a.memberAccessTail).validNel } - else if (inputs.keySet.contains(callOutputKey)) { (inputs(callOutputKey), a.memberAccessTail).validNel } - else - s"No value found for member access lookup. Report this bug: Insufficient input values supplied by engine. Needed '${a.first}' or '$callOutputKey' but only received: '${inputs.keys.mkString(", ")}'".invalidNel + generatedValueAndLookups flatMap { case (foundValue, lookups) => + NonEmptyList.fromList(lookups.toList) match { + case Some(lookupNel) => doLookup(foundValue, lookupNel) map { EvaluatedValue(_, Seq.empty) } + case None => EvaluatedValue(foundValue, Seq.empty).validNel + } + } } - - generatedValueAndLookups flatMap { case (foundValue, lookups) => NonEmptyList.fromList(lookups.toList) match { - case Some(lookupNel) => doLookup(foundValue, lookupNel) map { EvaluatedValue(_, Seq.empty) } - case None => EvaluatedValue(foundValue, Seq.empty).validNel - }} } - } implicit val indexAccessValueEvaluator: ValueEvaluator[IndexAccess] = new ValueEvaluator[IndexAccess] { - override def evaluateValue(a: IndexAccess, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - (a.expressionElement.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), a.index.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions)) flatMapN { (lhs, rhs) => + override def evaluateValue(a: IndexAccess, + inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = + (a.expressionElement.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions), + a.index.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + ) flatMapN { (lhs, rhs) => val value: ErrorOr[WomValue] = (lhs.value, rhs.value) match { case (array: WomArray, WomInteger(index)) => if (array.value.length > index) @@ -80,12 +89,12 @@ object LookupEvaluators { else s"Bad array access ${a.toWdlV1}: Array size ${array.value.length} does not have an index value '$index'".invalidNel case (WomObject(values, _), WomString(index)) => - if(values.contains(index)) + if (values.contains(index)) values(index).validNel else s"Bad Object access ${a.toWdlV1}: Object with keys [${values.keySet.mkString(", ")}] does not have an index value [$index]".invalidNel case (WomMap(mapType, values), index) => - if(values.contains(index)) + if (values.contains(index)) values(index).validNel else s"Bad Map access ${a.toWdlV1}: This ${mapType.stableName} does not have a ${index.womType.stableName} index value [${index.toWomString}]".invalidNel @@ -95,7 +104,6 @@ object LookupEvaluators { value map { EvaluatedValue(_, lhs.sideEffectFiles ++ rhs.sideEffectFiles) } } - } } /** @@ -113,20 +121,29 @@ object LookupEvaluators { val thisValue: ErrorOr[WomValue] = womValue match { case WomObject(values, _) if values.contains(key) => values(key).validNel - case WomObject(_, WomCompositeType(typeMap, _)) if typeMap.contains(key) => typeMap(key) match { - case WomOptionalType(innerType) => WomOptionalValue(innerType, None).validNel - case other => s"Composite value was unexpectedly missing a field: '$key' (expected type ${other.stableName}). Report this bug! Static validation failed.".invalidNel - } - case WomObject(_, _: WomCompositeType) => s"No such field '$key' on type ${womValue.womType.stableName}. Report this bug! Static validation failed.".invalidNel + case WomObject(_, WomCompositeType(typeMap, _)) if typeMap.contains(key) => + typeMap(key) match { + case WomOptionalType(innerType) => WomOptionalValue(innerType, None).validNel + case other => + s"Composite value was unexpectedly missing a field: '$key' (expected type ${other.stableName}). Report this bug! Static validation failed.".invalidNel + } + case WomObject(_, _: WomCompositeType) => + s"No such field '$key' on type ${womValue.womType.stableName}. Report this bug! Static validation failed.".invalidNel case WomObject(_, _) => s"'Object'-type value did not contain the field '$key' at runtime".invalidNel case p: WomPair if key == "left" => p.left.validNel case p: WomPair if key == "right" => p.right.validNel - case WomMap(_, value) => Validated.fromOption( - o = value.collectFirst { - case (k, v) if k.valueString == key => v - }, - ifNone = NonEmptyList(s"Requested key '$key' not found in the Map. Available keys were: ${value.keySet.mkString("[ ", ",", "]")}", Nil)) - case _ => s"No such field '$key' on type ${womValue.womType.stableName}. Report this bug! Static validation failed.".invalidNel + case WomMap(_, value) => + Validated.fromOption( + o = value.collectFirst { + case (k, v) if k.valueString == key => v + }, + ifNone = NonEmptyList( + s"Requested key '$key' not found in the Map. Available keys were: ${value.keySet.mkString("[ ", ",", "]")}", + Nil + ) + ) + case _ => + s"No such field '$key' on type ${womValue.womType.stableName}. Report this bug! Static validation failed.".invalidNel } tail match { diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/TernaryIfEvaluator.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/TernaryIfEvaluator.scala index da51c3b7120..f07402a1efa 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/TernaryIfEvaluator.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/TernaryIfEvaluator.scala @@ -14,16 +14,19 @@ object TernaryIfEvaluator { override def evaluateValue(a: TernaryIf, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { - + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = a.condition.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) flatMap { case EvaluatedValue(WomBoolean(true), conditionSideEffectFiles) => - a.ifTrue.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions).map(result => result.copy(sideEffectFiles = result.sideEffectFiles ++ conditionSideEffectFiles)) + a.ifTrue + .evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + .map(result => result.copy(sideEffectFiles = result.sideEffectFiles ++ conditionSideEffectFiles)) case EvaluatedValue(WomBoolean(false), conditionSideEffectFiles) => - a.ifFalse.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions).map(result => result.copy(sideEffectFiles = result.sideEffectFiles ++ conditionSideEffectFiles)) - case other => s"Condition should have evaluated to a Boolean but instead got ${other.value.womType.stableName}".invalidNel + a.ifFalse + .evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) + .map(result => result.copy(sideEffectFiles = result.sideEffectFiles ++ conditionSideEffectFiles)) + case other => + s"Condition should have evaluated to a Boolean but instead got ${other.value.womType.stableName}".invalidNel } - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/UnaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/UnaryOperatorEvaluators.scala index 25395ae9239..b548183a800 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/UnaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/UnaryOperatorEvaluators.scala @@ -22,13 +22,12 @@ object UnaryOperatorEvaluators { override def evaluateValue(a: A, inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, - forCommandInstantiationOptions: Option[ForCommandInstantiationOptions]) - (implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = { + forCommandInstantiationOptions: Option[ForCommandInstantiationOptions] + )(implicit expressionValueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[EvaluatedValue[_ <: WomValue]] = a.argument.evaluateValue(inputs, ioFunctionSet, forCommandInstantiationOptions) flatMap { arg => op(arg.value).toErrorOr map { EvaluatedValue(_, arg.sideEffectFiles) } } - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/LinkedGraphMaker.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/LinkedGraphMaker.scala index 22862031a16..01573c30737 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/LinkedGraphMaker.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/LinkedGraphMaker.scala @@ -20,14 +20,16 @@ object LinkedGraphMaker { def make(nodes: Set[WorkflowGraphElement], externalHandles: Set[GeneratedValueHandle], typeAliases: Map[String, WomType], - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[LinkedGraph] = { + callables: Map[String, Callable] + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[LinkedGraph] = { - val generatedValuesByGraphNodeValidation: ErrorOr[Map[WorkflowGraphElement, Set[GeneratedValueHandle]]] = nodes.toList.traverse{ node => - node.generatedValueHandles(typeAliases, callables).map(node -> _) - } map (_.toMap) + val generatedValuesByGraphNodeValidation: ErrorOr[Map[WorkflowGraphElement, Set[GeneratedValueHandle]]] = + nodes.toList.traverse { node => + node.generatedValueHandles(typeAliases, callables).map(node -> _) + } map (_.toMap) - val consumedValuesByGraphNodeValidation: ErrorOr[Map[WorkflowGraphElement, Set[UnlinkedConsumedValueHook]]] = nodes.toList.traverse(n => n.graphElementConsumedValueHooks(typeAliases, callables).map(n -> _)).map(_.toMap) + val consumedValuesByGraphNodeValidation: ErrorOr[Map[WorkflowGraphElement, Set[UnlinkedConsumedValueHook]]] = + nodes.toList.traverse(n => n.graphElementConsumedValueHooks(typeAliases, callables).map(n -> _)).map(_.toMap) for { generatedValuesByGraphNode <- generatedValuesByGraphNodeValidation @@ -41,7 +43,8 @@ object LinkedGraphMaker { def getOrdering(linkedGraph: LinkedGraph): ErrorOr[List[WorkflowGraphElement]] = { - def nodeName(workflowGraphElement: WorkflowGraphElement): String = workflowGraphElement.toWdlV1.linesIterator.toList.headOption.getOrElse("Unnamed Element").replace("\"", "") + def nodeName(workflowGraphElement: WorkflowGraphElement): String = + workflowGraphElement.toWdlV1.linesIterator.toList.headOption.getOrElse("Unnamed Element").replace("\"", "") // Find the topological order in which we must create the graph nodes: val edges = linkedGraph.edges map { case LinkedGraphEdge(from, to) => DiEdge(from, to) } @@ -58,9 +61,9 @@ object LinkedGraphMaker { // c -> a // a -> b // we want to start the cycle with the edge "a -> b" - val edgeDict : Map[String, String] = - cycle.value.edges.map{ - case graph.EdgeT(from, to) => nodeName(from) -> nodeName(to) + val edgeDict: Map[String, String] = + cycle.value.edges.map { case graph.EdgeT(from, to) => + nodeName(from) -> nodeName(to) }.toMap val startPoint = edgeDict.keys.toVector.sorted.head var cursor = startPoint @@ -79,7 +82,9 @@ object LinkedGraphMaker { |${cycleReport.mkString(System.lineSeparator)}""".stripMargin.invalidNel case _ => - val edgeStrings = linkedGraph.edges map { case LinkedGraphEdge(from, to) => s""""${nodeName(from)}" -> "${nodeName(to)}"""" } + val edgeStrings = linkedGraph.edges map { case LinkedGraphEdge(from, to) => + s""""${nodeName(from)}" -> "${nodeName(to)}"""" + } // sort the edges for determinism val edges = edgeStrings.toVector.sorted s"""This workflow contains an elusive cyclic dependency amongst these edges: @@ -87,14 +92,16 @@ object LinkedGraphMaker { } // This asInstanceOf is not required, but it suppresses an incorrect intelliJ error highlight: - case Right(topologicalOrder) => topologicalOrder.toList.map(_.value).asInstanceOf[List[WorkflowGraphElement]].validNel + case Right(topologicalOrder) => + topologicalOrder.toList.map(_.value).asInstanceOf[List[WorkflowGraphElement]].validNel } } private def makeEdges(elements: Set[WorkflowGraphElement], - consumedValuesByGraphNode: Map[WorkflowGraphElement, Set[UnlinkedConsumedValueHook]], - consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], - graphElementByGeneratedValueHandle: Map[GeneratedValueHandle, WorkflowGraphElement]): Set[LinkedGraphEdge] = for { + consumedValuesByGraphNode: Map[WorkflowGraphElement, Set[UnlinkedConsumedValueHook]], + consumedValueLookup: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], + graphElementByGeneratedValueHandle: Map[GeneratedValueHandle, WorkflowGraphElement] + ): Set[LinkedGraphEdge] = for { downstreamElement <- elements hook <- consumedValuesByGraphNode(downstreamElement) upstreamHandle = consumedValueLookup(hook) @@ -105,23 +112,36 @@ object LinkedGraphMaker { typeAliases: Map[String, WomType], availableHandles: Set[GeneratedValueHandle], callables: Map[String, Callable] - ) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[Map[UnlinkedConsumedValueHook, GeneratedValueHandle]] = { - val consumedValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = nodes.toList.traverse(n => n.graphElementConsumedValueHooks(typeAliases, callables)).map(_.toSet.flatten) + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): ErrorOr[Map[UnlinkedConsumedValueHook, GeneratedValueHandle]] = { + val consumedValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = + nodes.toList.traverse(n => n.graphElementConsumedValueHooks(typeAliases, callables)).map(_.toSet.flatten) - consumedValidation.flatMap { consumed => makeConsumedValueLookup(consumed, availableHandles) } + consumedValidation.flatMap(consumed => makeConsumedValueLookup(consumed, availableHandles)) } - def makeConsumedValueLookup(consumedValues: Set[UnlinkedConsumedValueHook], availableHandles: Set[GeneratedValueHandle]): ErrorOr[Map[UnlinkedConsumedValueHook, GeneratedValueHandle]] = { + def makeConsumedValueLookup(consumedValues: Set[UnlinkedConsumedValueHook], + availableHandles: Set[GeneratedValueHandle] + ): ErrorOr[Map[UnlinkedConsumedValueHook, GeneratedValueHandle]] = { def isMatch(hook: UnlinkedConsumedValueHook, handle: GeneratedValueHandle): Boolean = (hook, handle) match { case (UnlinkedIdentifierHook(id1), GeneratedIdentifierValueHandle(id2, _)) => id1 == id2 - case (UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, _), GeneratedIdentifierValueHandle(id2, _)) if first == id2 => true - case (UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first1, second1), GeneratedCallOutputValueHandle(first2, second2, _)) if first1 == first2 && second1 == second2 => true - case (UnlinkedAfterCallHook(upstreamCallName), GeneratedCallFinishedHandle(finishedCallName)) if finishedCallName == upstreamCallName => true + case (UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, _), GeneratedIdentifierValueHandle(id2, _)) + if first == id2 => + true + case (UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first1, second1), + GeneratedCallOutputValueHandle(first2, second2, _) + ) if first1 == first2 && second1 == second2 => + true + case (UnlinkedAfterCallHook(upstreamCallName), GeneratedCallFinishedHandle(finishedCallName)) + if finishedCallName == upstreamCallName => + true case _ => false } - def findHandle(consumedValueHook: UnlinkedConsumedValueHook): ErrorOr[(UnlinkedConsumedValueHook, GeneratedValueHandle)] = { + def findHandle( + consumedValueHook: UnlinkedConsumedValueHook + ): ErrorOr[(UnlinkedConsumedValueHook, GeneratedValueHandle)] = { val maybeFoundHandle = availableHandles collectFirst { case handle if isMatch(consumedValueHook, handle) => handle } @@ -129,22 +149,25 @@ object LinkedGraphMaker { (maybeFoundHandle, consumedValueHook) match { case (Some(handle), hook) => (hook -> handle).validNel case (None, UnlinkedAfterCallHook(upstreamCallName)) => - val didYouMean = availableHandles.collect { - case after: GeneratedCallFinishedHandle => s"'${after.finishedCallName}'" - }.mkString("[", ", ", "]") - s"Cannot specify 'after $upstreamCallName': no such call exists. Available calls are: $didYouMean".invalidNel + val didYouMean = availableHandles + .collect { case after: GeneratedCallFinishedHandle => + s"'${after.finishedCallName}'" + } + .mkString("[", ", ", "]") + s"Cannot specify 'after $upstreamCallName': no such call exists. Available calls are: $didYouMean".invalidNel case (None, _) => - val didYouMean = availableHandles.map(h => s"'${h.linkableName}'").mkString("[", ", ", "]") - s"Cannot lookup value '${consumedValueHook.linkString}', it is never declared. Available values are: $didYouMean".invalidNel + val didYouMean = availableHandles.map(h => s"'${h.linkableName}'").mkString("[", ", ", "]") + s"Cannot lookup value '${consumedValueHook.linkString}', it is never declared. Available values are: $didYouMean".invalidNel } } - - consumedValues.toList.traverse { findHandle } map {_.toMap} + consumedValues.toList.traverse(findHandle) map { _.toMap } } - private def reverseMap(mapping: Map[WorkflowGraphElement, Set[GeneratedValueHandle]]): ErrorOr[Map[GeneratedValueHandle, WorkflowGraphElement]] = { + private def reverseMap( + mapping: Map[WorkflowGraphElement, Set[GeneratedValueHandle]] + ): ErrorOr[Map[GeneratedValueHandle, WorkflowGraphElement]] = { val reversed = for { nodeAndHandles <- mapping.toList node = nodeAndHandles._1 diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/package.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/package.scala index 3a9ccb96900..4a83b19e6cb 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/package.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/graph/package.scala @@ -18,130 +18,175 @@ import wom.callable.Callable.OutputDefinition import wom.types.{WomArrayType, WomOptionalType, WomType} package object graph { - implicit val graphElementUnlinkedValueGenerator: UnlinkedValueGenerator[WorkflowGraphElement] = new UnlinkedValueGenerator[WorkflowGraphElement] { - - override def generatedValueHandles(a: WorkflowGraphElement, - typeAliases: Map[String, WomType], - callables: Map[String, Callable]): ErrorOr[Set[GeneratedValueHandle]] = a match { - case DeclarationElement(typeElement, name, _) => - typeElement.determineWomType(typeAliases) map { t => Set(GeneratedIdentifierValueHandle(name, t)) } - case a: ScatterElement => a.generatedValueHandles(typeAliases, callables) - case a: IfElement => a.generatedValueHandles(typeAliases, callables) - case a: CallElement => a.generatedValueHandles(typeAliases, callables) - case other => s"Cannot generate generated values for WorkflowGraphNodeElement $other".invalidNel + implicit val graphElementUnlinkedValueGenerator: UnlinkedValueGenerator[WorkflowGraphElement] = + new UnlinkedValueGenerator[WorkflowGraphElement] { + + override def generatedValueHandles(a: WorkflowGraphElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + ): ErrorOr[Set[GeneratedValueHandle]] = a match { + case DeclarationElement(typeElement, name, _) => + typeElement.determineWomType(typeAliases) map { t => Set(GeneratedIdentifierValueHandle(name, t)) } + case a: ScatterElement => a.generatedValueHandles(typeAliases, callables) + case a: IfElement => a.generatedValueHandles(typeAliases, callables) + case a: CallElement => a.generatedValueHandles(typeAliases, callables) + case other => s"Cannot generate generated values for WorkflowGraphNodeElement $other".invalidNel + } } - } - - implicit val scatterElementUnlinkedValueGenerator: UnlinkedValueGenerator[ScatterElement] = new UnlinkedValueGenerator[ScatterElement] { - override def generatedValueHandles(a: ScatterElement, typeAliases: Map[String, WomType], callables: Map[String, Callable]): ErrorOr[Set[GeneratedValueHandle]] = { - a.graphElements.toList.traverse(_.generatedValueHandles(typeAliases, callables)).map(_.toSet.flatten) map { _.map { - case GeneratedIdentifierValueHandle(id, womType) => GeneratedIdentifierValueHandle(id, WomArrayType(womType)) - case GeneratedCallOutputValueHandle(first, second, womType) => GeneratedCallOutputValueHandle(first, second, WomArrayType(womType)) - case a: GeneratedCallFinishedHandle => a - } } + + implicit val scatterElementUnlinkedValueGenerator: UnlinkedValueGenerator[ScatterElement] = + new UnlinkedValueGenerator[ScatterElement] { + override def generatedValueHandles(a: ScatterElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + ): ErrorOr[Set[GeneratedValueHandle]] = + a.graphElements.toList.traverse(_.generatedValueHandles(typeAliases, callables)).map(_.toSet.flatten) map { + _.map { + case GeneratedIdentifierValueHandle(id, womType) => + GeneratedIdentifierValueHandle(id, WomArrayType(womType)) + case GeneratedCallOutputValueHandle(first, second, womType) => + GeneratedCallOutputValueHandle(first, second, WomArrayType(womType)) + case a: GeneratedCallFinishedHandle => a + } + } } - } - - implicit val IfElementUnlinkedValueGenerator: UnlinkedValueGenerator[IfElement] = new UnlinkedValueGenerator[IfElement] { - override def generatedValueHandles(a: IfElement, typeAliases: Map[String, WomType], callables: Map[String, Callable]): ErrorOr[Set[GeneratedValueHandle]] = { - a.graphElements.toList.traverse(_.generatedValueHandles(typeAliases, callables)).map(_.toSet.flatten) map { _.map { - case GeneratedIdentifierValueHandle(id, womType) => GeneratedIdentifierValueHandle(id, WomOptionalType(womType).flatOptionalType) - case GeneratedCallOutputValueHandle(first, second, womType) => GeneratedCallOutputValueHandle(first, second, WomOptionalType(womType).flatOptionalType) - case a: GeneratedCallFinishedHandle => a - } } + + implicit val IfElementUnlinkedValueGenerator: UnlinkedValueGenerator[IfElement] = + new UnlinkedValueGenerator[IfElement] { + override def generatedValueHandles(a: IfElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + ): ErrorOr[Set[GeneratedValueHandle]] = + a.graphElements.toList.traverse(_.generatedValueHandles(typeAliases, callables)).map(_.toSet.flatten) map { + _.map { + case GeneratedIdentifierValueHandle(id, womType) => + GeneratedIdentifierValueHandle(id, WomOptionalType(womType).flatOptionalType) + case GeneratedCallOutputValueHandle(first, second, womType) => + GeneratedCallOutputValueHandle(first, second, WomOptionalType(womType).flatOptionalType) + case a: GeneratedCallFinishedHandle => a + } + } } - } - implicit val callElementUnlinkedValueGenerator: UnlinkedValueGenerator[CallElement] = new UnlinkedValueGenerator[CallElement] { - override def generatedValueHandles(a: CallElement, typeAliases: Map[String, WomType], callables: Map[String, Callable]): ErrorOr[Set[GeneratedValueHandle]] = { - def callableOutputToHandle(callAlias: String)(callableOutput: OutputDefinition): GeneratedValueHandle = { - GeneratedCallOutputValueHandle(callAlias, callableOutput.name, callableOutput.womType) + implicit val callElementUnlinkedValueGenerator: UnlinkedValueGenerator[CallElement] = + new UnlinkedValueGenerator[CallElement] { + override def generatedValueHandles(a: CallElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + ): ErrorOr[Set[GeneratedValueHandle]] = { + def callableOutputToHandle(callAlias: String)(callableOutput: OutputDefinition): GeneratedValueHandle = + GeneratedCallOutputValueHandle(callAlias, callableOutput.name, callableOutput.womType) + + callables.get(a.callableReference) match { + case Some(callable) => + val callAlias = a.alias.getOrElse(callable.name) + val outputs = callable.outputs.map(callableOutputToHandle(callAlias)).toSet + (outputs + GeneratedCallFinishedHandle(callAlias)).validNel + case None => + s"Cannot generate outputs for 'call ${a.callableReference}'. No such callable exists in [${callables.keySet + .mkString(", ")}]".invalidNel + } } + } - callables.get(a.callableReference) match { - case Some(callable) => - val callAlias = a.alias.getOrElse(callable.name) - val outputs = callable.outputs.map(callableOutputToHandle(callAlias)).toSet - (outputs + GeneratedCallFinishedHandle(callAlias)).validNel - case None => s"Cannot generate outputs for 'call ${a.callableReference}'. No such callable exists in [${callables.keySet.mkString(", ")}]".invalidNel + implicit val graphElementUnlinkedValueConsumer: GraphElementValueConsumer[WorkflowGraphElement] = + new GraphElementValueConsumer[WorkflowGraphElement] { + override def graphElementConsumedValueHooks(a: WorkflowGraphElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): ErrorOr[Set[UnlinkedConsumedValueHook]] = a match { + case InputDeclarationElement(_, _, None) => Set.empty[UnlinkedConsumedValueHook].validNel + case DeclarationElement(_, _, Some(expr)) => expr.expressionConsumedValueHooks.validNel + case a: ScatterElement => a.graphElementConsumedValueHooks(typeAliases, callables) + case a: IfElement => a.graphElementConsumedValueHooks(typeAliases, callables) + case a: CallElement => a.graphElementConsumedValueHooks(typeAliases, callables) + // TODO fill in other expression types + case other => throw new Exception(s"Cannot generate consumed values for WorkflowGraphNodeElement $other") } } - } - - implicit val graphElementUnlinkedValueConsumer: GraphElementValueConsumer[WorkflowGraphElement] = new GraphElementValueConsumer[WorkflowGraphElement] { - override def graphElementConsumedValueHooks(a: WorkflowGraphElement, - typeAliases: Map[String, WomType], - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[Set[UnlinkedConsumedValueHook]] = a match { - case InputDeclarationElement(_, _, None) => Set.empty[UnlinkedConsumedValueHook].validNel - case DeclarationElement(_, _, Some(expr)) => expr.expressionConsumedValueHooks.validNel - case a: ScatterElement => a.graphElementConsumedValueHooks(typeAliases, callables) - case a: IfElement => a.graphElementConsumedValueHooks(typeAliases, callables) - case a: CallElement => a.graphElementConsumedValueHooks(typeAliases, callables) - // TODO fill in other expression types - case other => throw new Exception(s"Cannot generate consumed values for WorkflowGraphNodeElement $other") - } - } - - implicit val callElementUnlinkedValueConsumer: GraphElementValueConsumer[CallElement] = new GraphElementValueConsumer[CallElement] { - override def graphElementConsumedValueHooks(a: CallElement, - typeAliases: Map[String, WomType], - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[Set[UnlinkedConsumedValueHook]] = { - import wdl.transforms.base.linking.expression.consumed.LiteralEvaluators.kvPairUnlinkedValueConsumer - val consumedByInputSection: Set[UnlinkedConsumedValueHook] = a.body match { - case Some(callBodyElement: CallBodyElement) => callBodyElement.inputs.flatMap(_.expressionConsumedValueHooks).toSet - case None => Set.empty[UnlinkedConsumedValueHook] - } - - val consumedByAfterRequirement: Vector[UnlinkedConsumedValueHook] = a.afters map UnlinkedAfterCallHook.apply - (consumedByInputSection ++ consumedByAfterRequirement).validNel - } - } - - implicit val scatterElementUnlinkedValueConsumer: GraphElementValueConsumer[ScatterElement] = new GraphElementValueConsumer[ScatterElement] { - override def graphElementConsumedValueHooks(a: ScatterElement, - typeAliases: Map[String, WomType], - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[Set[UnlinkedConsumedValueHook]] = { - val bodyConsumedValuesValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = a.graphElements.toList.traverse(_.graphElementConsumedValueHooks(typeAliases, callables)).map(_.toSet.flatten) - val scatterExpressionHooks: Set[UnlinkedConsumedValueHook] = a.scatterExpression.expressionConsumedValueHooks - - val bodyGeneratedValuesValidation: ErrorOr[Set[String]] = a.graphElements.toList.traverse(_.generatedValueHandles(typeAliases, callables)).map(_.toSet.flatten.map(_.linkableName)) - - (bodyConsumedValuesValidation, bodyGeneratedValuesValidation) mapN { (bodyConsumedValues, bodyGeneratedValues) => - val unsatisfiedBodyElementHooks = bodyConsumedValues.filterNot { - case UnlinkedIdentifierHook(id) => bodyGeneratedValues.contains(id) || id == a.scatterVariableName - case UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) => - bodyGeneratedValues.contains(first) || bodyGeneratedValues.contains(s"$first.$second") || a.scatterVariableName == first - case after: UnlinkedAfterCallHook => bodyGeneratedValues.contains(after.linkString) + implicit val callElementUnlinkedValueConsumer: GraphElementValueConsumer[CallElement] = + new GraphElementValueConsumer[CallElement] { + override def graphElementConsumedValueHooks(a: CallElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): ErrorOr[Set[UnlinkedConsumedValueHook]] = { + import wdl.transforms.base.linking.expression.consumed.LiteralEvaluators.kvPairUnlinkedValueConsumer + val consumedByInputSection: Set[UnlinkedConsumedValueHook] = a.body match { + case Some(callBodyElement: CallBodyElement) => + callBodyElement.inputs.flatMap(_.expressionConsumedValueHooks).toSet + case None => Set.empty[UnlinkedConsumedValueHook] } - unsatisfiedBodyElementHooks ++ scatterExpressionHooks + val consumedByAfterRequirement: Vector[UnlinkedConsumedValueHook] = a.afters map UnlinkedAfterCallHook.apply + + (consumedByInputSection ++ consumedByAfterRequirement).validNel } } - } - - implicit val ifElementUnlinkedValueConsumer: GraphElementValueConsumer[IfElement] = new GraphElementValueConsumer[IfElement] { - override def graphElementConsumedValueHooks(a: IfElement, - typeAliases: Map[String, WomType], - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): ErrorOr[Set[UnlinkedConsumedValueHook]] = { - val bodyConsumedValuesValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = a.graphElements.toList.traverse(_.graphElementConsumedValueHooks(typeAliases, callables)).map(_.toSet.flatten) - val ifExpressionHooks: Set[UnlinkedConsumedValueHook] = a.conditionExpression.expressionConsumedValueHooks - - val bodyGeneratedValuesValidation: ErrorOr[Set[String]] = a.graphElements.toList.traverse(_.generatedValueHandles(typeAliases, callables)).map(_.toSet.flatten.map(_.linkableName)) - - (bodyConsumedValuesValidation, bodyGeneratedValuesValidation) mapN { (bodyConsumedValues, bodyGeneratedValues) => - val unsatisfiedBodyElementHooks = bodyConsumedValues.filterNot { - case UnlinkedIdentifierHook(id) => bodyGeneratedValues.contains(id) - case UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) => bodyGeneratedValues.contains(first) || bodyGeneratedValues.contains(s"$first.$second") - case after: UnlinkedAfterCallHook => bodyGeneratedValues.contains(after.linkString) + + implicit val scatterElementUnlinkedValueConsumer: GraphElementValueConsumer[ScatterElement] = + new GraphElementValueConsumer[ScatterElement] { + override def graphElementConsumedValueHooks(a: ScatterElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): ErrorOr[Set[UnlinkedConsumedValueHook]] = { + val bodyConsumedValuesValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = + a.graphElements.toList.traverse(_.graphElementConsumedValueHooks(typeAliases, callables)).map(_.toSet.flatten) + val scatterExpressionHooks: Set[UnlinkedConsumedValueHook] = a.scatterExpression.expressionConsumedValueHooks + + val bodyGeneratedValuesValidation: ErrorOr[Set[String]] = a.graphElements.toList + .traverse(_.generatedValueHandles(typeAliases, callables)) + .map(_.toSet.flatten.map(_.linkableName)) + + (bodyConsumedValuesValidation, bodyGeneratedValuesValidation) mapN { + (bodyConsumedValues, bodyGeneratedValues) => + val unsatisfiedBodyElementHooks = bodyConsumedValues.filterNot { + case UnlinkedIdentifierHook(id) => bodyGeneratedValues.contains(id) || id == a.scatterVariableName + case UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) => + bodyGeneratedValues.contains(first) || bodyGeneratedValues.contains( + s"$first.$second" + ) || a.scatterVariableName == first + case after: UnlinkedAfterCallHook => bodyGeneratedValues.contains(after.linkString) + } + + unsatisfiedBodyElementHooks ++ scatterExpressionHooks } + } + } - unsatisfiedBodyElementHooks ++ ifExpressionHooks + implicit val ifElementUnlinkedValueConsumer: GraphElementValueConsumer[IfElement] = + new GraphElementValueConsumer[IfElement] { + override def graphElementConsumedValueHooks(a: IfElement, + typeAliases: Map[String, WomType], + callables: Map[String, Callable] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement] + ): ErrorOr[Set[UnlinkedConsumedValueHook]] = { + val bodyConsumedValuesValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = + a.graphElements.toList.traverse(_.graphElementConsumedValueHooks(typeAliases, callables)).map(_.toSet.flatten) + val ifExpressionHooks: Set[UnlinkedConsumedValueHook] = a.conditionExpression.expressionConsumedValueHooks + + val bodyGeneratedValuesValidation: ErrorOr[Set[String]] = a.graphElements.toList + .traverse(_.generatedValueHandles(typeAliases, callables)) + .map(_.toSet.flatten.map(_.linkableName)) + + (bodyConsumedValuesValidation, bodyGeneratedValuesValidation) mapN { + (bodyConsumedValues, bodyGeneratedValues) => + val unsatisfiedBodyElementHooks = bodyConsumedValues.filterNot { + case UnlinkedIdentifierHook(id) => bodyGeneratedValues.contains(id) + case UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) => + bodyGeneratedValues.contains(first) || bodyGeneratedValues.contains(s"$first.$second") + case after: UnlinkedAfterCallHook => bodyGeneratedValues.contains(after.linkString) + } + + unsatisfiedBodyElementHooks ++ ifExpressionHooks + } } } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/typemakers/package.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/typemakers/package.scala index 6213f438089..00f85727f97 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/typemakers/package.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/typemakers/package.scala @@ -11,51 +11,51 @@ import wdl.model.draft3.graph.expression.WomTypeMaker.ops._ import wom.types._ package object typemakers { - implicit val primitiveTypeElementConverter: WomTypeMaker[PrimitiveTypeElement] = new WomTypeMaker[PrimitiveTypeElement] { - override def determineWomType(a: PrimitiveTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { - a.primitiveType.validNel + implicit val primitiveTypeElementConverter: WomTypeMaker[PrimitiveTypeElement] = + new WomTypeMaker[PrimitiveTypeElement] { + override def determineWomType(a: PrimitiveTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = + a.primitiveType.validNel } - } implicit val arrayTypeElementConverter: WomTypeMaker[ArrayTypeElement] = new WomTypeMaker[ArrayTypeElement] { - override def determineWomType(a: ArrayTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { + override def determineWomType(a: ArrayTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = a.inner.determineWomType(availableAliases) map { inner => WomArrayType(inner) } - } } implicit val mapTypeElementConverter: WomTypeMaker[MapTypeElement] = new WomTypeMaker[MapTypeElement] { - override def determineWomType(a: MapTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { - (a.keyType.determineWomType(availableAliases), - a.valueType.determineWomType(availableAliases)) mapN { (keyType, valueType) => WomMapType(keyType, valueType) } - } + override def determineWomType(a: MapTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = + (a.keyType.determineWomType(availableAliases), a.valueType.determineWomType(availableAliases)) mapN { + (keyType, valueType) => WomMapType(keyType, valueType) + } } implicit val optionalTypeElementConverter: WomTypeMaker[OptionalTypeElement] = new WomTypeMaker[OptionalTypeElement] { - override def determineWomType(a: OptionalTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { + override def determineWomType(a: OptionalTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = a.maybeType.determineWomType(availableAliases) map { inner => WomOptionalType(inner) } - } } implicit val nonEmptyTypeElementConverter: WomTypeMaker[NonEmptyTypeElement] = new WomTypeMaker[NonEmptyTypeElement] { - override def determineWomType(a: NonEmptyTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { + override def determineWomType(a: NonEmptyTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = a.arrayType.determineWomType(availableAliases) flatMap { case WomArrayType(memberType) => WomNonEmptyArrayType(memberType).validNel case other: WomType => s"Cannot declare a non-empty $other (+ is only applicable to Array[_] types)".invalidNel } - } } implicit val pairTypeElementConverter: WomTypeMaker[PairTypeElement] = new WomTypeMaker[PairTypeElement] { - override def determineWomType(a: PairTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { - (a.leftType.determineWomType(availableAliases), - a.rightType.determineWomType(availableAliases)) mapN { (keyType, valueType) => WomPairType(keyType, valueType) } - } + override def determineWomType(a: PairTypeElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = + (a.leftType.determineWomType(availableAliases), a.rightType.determineWomType(availableAliases)) mapN { + (keyType, valueType) => WomPairType(keyType, valueType) + } } implicit val structTypeElementConverter: WomTypeMaker[TypeAliasElement] = new WomTypeMaker[TypeAliasElement] { - override def determineWomType(a: TypeAliasElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = { - availableAliases.get(a.alias).toErrorOr(s"No struct definition for '${a.alias}' found in available structs: [${availableAliases.values.mkString(", ")}]") - } + override def determineWomType(a: TypeAliasElement, availableAliases: Map[String, WomType]): ErrorOr[WomType] = + availableAliases + .get(a.alias) + .toErrorOr( + s"No struct definition for '${a.alias}' found in available structs: [${availableAliases.values.mkString(", ")}]" + ) } implicit val typeElementToWomType: WomTypeMaker[TypeElement] = new WomTypeMaker[TypeElement] { diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriter.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriter.scala index 7df0cf44810..01090b1e7cc 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriter.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriter.scala @@ -10,6 +10,6 @@ trait WdlWriter[A] { object WdlWriter { // Stolen from WomGraph.scala def indent(s: String) = s.linesIterator.map(x => s" $x").mkString(System.lineSeparator) - def combine(ss: Iterable[String]) = ss.mkString(start="", sep=System.lineSeparator, end=System.lineSeparator) + def combine(ss: Iterable[String]) = ss.mkString(start = "", sep = System.lineSeparator, end = System.lineSeparator) def indentAndCombine(ss: Iterable[String]) = combine(ss.map(indent)) } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala index 0597b9c830e..70cc56b5a65 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala @@ -16,15 +16,14 @@ object WdlWriterImpl { import WdlWriter.ops._ implicit val ifWriter: WdlWriter[IfElement] = new WdlWriter[IfElement] { - override def toWdlV1(a: IfElement) = { + override def toWdlV1(a: IfElement) = s"""if (${a.conditionExpression.toWdlV1}) { |${indentAndCombine(a.graphElements.map(_.toWdlV1))}}""".stripMargin - } } implicit val stringPieceWriter: WdlWriter[StringPiece] = new WdlWriter[StringPiece] { override def toWdlV1(a: StringPiece): String = a match { - case a: StringLiteral => a.value + case a: StringLiteral => a.value case a: StringPlaceholder => "~{" + a.expr.toWdlV1 + "}" case NewlineEscape => "\\n" case TabEscape => "\\t" @@ -35,7 +34,8 @@ object WdlWriterImpl { } } - implicit val indexAccessWriter: WdlWriter[IndexAccess] = a => s"${expressionElementWriter.toWdlV1(a.expressionElement)}[${expressionElementWriter.toWdlV1(a.index)}]" + implicit val indexAccessWriter: WdlWriter[IndexAccess] = a => + s"${expressionElementWriter.toWdlV1(a.expressionElement)}[${expressionElementWriter.toWdlV1(a.index)}]" // Recursive references must be explicit implicit val expressionElementWriter: WdlWriter[ExpressionElement] = new WdlWriter[ExpressionElement] { @@ -45,25 +45,31 @@ object WdlWriterImpl { case a: StringExpression => "\"" + a.pieces.map(_.toWdlV1).mkString + "\"" case a: StringLiteral => "\"" + StringEscapeUtils.escapeJava(a.value) + "\"" case a: ObjectLiteral => - "object { " + a.elements.map { pair => - pair._1 + ": " + expressionElementWriter.toWdlV1(pair._2) - }.mkString(", ") + " }" + "object { " + a.elements + .map { pair => + pair._1 + ": " + expressionElementWriter.toWdlV1(pair._2) + } + .mkString(", ") + " }" case a: ArrayLiteral => "[" + a.elements.map(expressionElementWriter.toWdlV1).mkString(", ") + "]" case a: MapLiteral => - "{ " + a.elements.map { pair => - expressionElementWriter.toWdlV1(pair._1) + ": " + expressionElementWriter.toWdlV1(pair._2) - }.mkString(", ") + " }" + "{ " + a.elements + .map { pair => + expressionElementWriter.toWdlV1(pair._1) + ": " + expressionElementWriter.toWdlV1(pair._2) + } + .mkString(", ") + " }" case a: PairLiteral => s"(${expressionElementWriter.toWdlV1(a.left)}, ${expressionElementWriter.toWdlV1(a.right)})" case a: UnaryOperation => a.toWdlV1 case a: BinaryOperation => a.toWdlV1 case a: TernaryIf => - s"if ${expressionElementWriter.toWdlV1(a.condition)} then ${expressionElementWriter.toWdlV1(a.ifTrue)} else ${expressionElementWriter.toWdlV1(a.ifFalse)}" + s"if ${expressionElementWriter.toWdlV1(a.condition)} then ${expressionElementWriter.toWdlV1(a.ifTrue)} else ${expressionElementWriter + .toWdlV1(a.ifFalse)}" case a: FunctionCallElement => a.toWdlV1 case a: IdentifierLookup => a.identifier case a: IdentifierMemberAccess => a.toWdlV1 - case a: ExpressionMemberAccess => s"${expressionElementWriter.toWdlV1(a.expression)}.${a.memberAccessTail.toList.mkString(".")}" + case a: ExpressionMemberAccess => + s"${expressionElementWriter.toWdlV1(a.expression)}.${a.memberAccessTail.toList.mkString(".")}" case a: IndexAccess => a.toWdlV1 case a: ExpressionLiteralElement => a.expression } @@ -71,38 +77,37 @@ object WdlWriterImpl { implicit val unaryOperationWriter: WdlWriter[UnaryOperation] = new WdlWriter[UnaryOperation] { override def toWdlV1(a: UnaryOperation): String = a match { - case a: LogicalNot => s"!(${a.argument.toWdlV1})" + case a: LogicalNot => s"!(${a.argument.toWdlV1})" case a: UnaryNegation => s"-(${a.argument.toWdlV1})" - case a: UnaryPlus => s"+(${a.argument.toWdlV1})" + case a: UnaryPlus => s"+(${a.argument.toWdlV1})" } } implicit val identifierMemberAccessWriter: WdlWriter[IdentifierMemberAccess] = new WdlWriter[IdentifierMemberAccess] { - override def toWdlV1(a: IdentifierMemberAccess): String = { + override def toWdlV1(a: IdentifierMemberAccess): String = s"${a.first}.${a.second}" + (if (a.memberAccessTail.nonEmpty) { - "." + a.memberAccessTail.mkString(".") - } else { - "" - }) - } + "." + a.memberAccessTail.mkString(".") + } else { + "" + }) } implicit val binaryOperationWriter: WdlWriter[BinaryOperation] = new WdlWriter[BinaryOperation] { override def toWdlV1(a: BinaryOperation) = { val op = a match { - case _: LogicalOr => "||" - case _: LogicalAnd => "&&" - case _: Equals => "==" - case _: NotEquals => "!=" - case _: LessThan => "<" - case _: LessThanOrEquals => "<=" - case _: GreaterThan => ">" + case _: LogicalOr => "||" + case _: LogicalAnd => "&&" + case _: Equals => "==" + case _: NotEquals => "!=" + case _: LessThan => "<" + case _: LessThanOrEquals => "<=" + case _: GreaterThan => ">" case _: GreaterThanOrEquals => ">=" - case _: Add => "+" - case _: Subtract => "-" - case _: Multiply => "*" - case _: Divide => "/" - case _: Remainder => "%" + case _: Add => "+" + case _: Subtract => "-" + case _: Multiply => "*" + case _: Divide => "/" + case _: Remainder => "%" } s"(${a.left.toWdlV1} $op ${a.right.toWdlV1})" @@ -127,14 +132,13 @@ object WdlWriterImpl { } implicit val callBodyElement: WdlWriter[CallBodyElement] = new WdlWriter[CallBodyElement] { - override def toWdlV1(a: CallBodyElement): String = { + override def toWdlV1(a: CallBodyElement): String = if (a.inputs.nonEmpty) { s"""input: |${indent(indent(a.inputs.map(_.toWdlV1).mkString(",\n")))}""".stripMargin } else { "" } - } } object CallElementWriter { @@ -166,19 +170,22 @@ object WdlWriterImpl { } } - implicit val intermediateValueDeclarationElementWriter: WdlWriter[IntermediateValueDeclarationElement] = new WdlWriter[IntermediateValueDeclarationElement] { - override def toWdlV1(a: IntermediateValueDeclarationElement) = - s"${a.typeElement.toWdlV1} ${a.name} = ${a.expression.toWdlV1}" - } + implicit val intermediateValueDeclarationElementWriter: WdlWriter[IntermediateValueDeclarationElement] = + new WdlWriter[IntermediateValueDeclarationElement] { + override def toWdlV1(a: IntermediateValueDeclarationElement) = + s"${a.typeElement.toWdlV1} ${a.name} = ${a.expression.toWdlV1}" + } implicit val typeElementWriter: WdlWriter[TypeElement] = new WdlWriter[TypeElement] { override def toWdlV1(a: TypeElement) = a match { case a: PrimitiveTypeElement => a.primitiveType.toWdlV1 case a: ArrayTypeElement => s"Array[${typeElementWriter.toWdlV1(a.inner)}]" - case a: MapTypeElement => s"Map[${typeElementWriter.toWdlV1(a.keyType)}, ${typeElementWriter.toWdlV1(a.valueType)}]" + case a: MapTypeElement => + s"Map[${typeElementWriter.toWdlV1(a.keyType)}, ${typeElementWriter.toWdlV1(a.valueType)}]" case a: OptionalTypeElement => s"${typeElementWriter.toWdlV1(a.maybeType)}?" case a: NonEmptyTypeElement => s"${typeElementWriter.toWdlV1(a.arrayType)}+" - case a: PairTypeElement => s"Pair[${typeElementWriter.toWdlV1(a.leftType)}, ${typeElementWriter.toWdlV1(a.rightType)}]" + case a: PairTypeElement => + s"Pair[${typeElementWriter.toWdlV1(a.leftType)}, ${typeElementWriter.toWdlV1(a.rightType)}]" case _: ObjectTypeElement.type => "Object" case a: TypeAliasElement => a.alias } @@ -188,58 +195,60 @@ object WdlWriterImpl { override def toWdlV1(a: WomPrimitiveType) = a.stableName } - implicit val workflowDefinitionElementWriter: WdlWriter[WorkflowDefinitionElement] = new WdlWriter[WorkflowDefinitionElement] { - override def toWdlV1(a: WorkflowDefinitionElement) = { - val inputs = a.inputsSection match { - case Some(i) => i.toWdlV1 - case None => "" - } - val outputs = a.outputsSection match { - case Some(o) => o.toWdlV1 - case None => "" + implicit val workflowDefinitionElementWriter: WdlWriter[WorkflowDefinitionElement] = + new WdlWriter[WorkflowDefinitionElement] { + override def toWdlV1(a: WorkflowDefinitionElement) = { + val inputs = a.inputsSection match { + case Some(i) => i.toWdlV1 + case None => "" + } + val outputs = a.outputsSection match { + case Some(o) => o.toWdlV1 + case None => "" + } + + // Readability / cosmetic reordering + // TODO: use graph ordering + // https://github.com/broadinstitute/cromwell/issues/3796 + val inputDeclarationElements: List[InputDeclarationElement] = + a.graphElements.toList.filterByType[InputDeclarationElement] + val intermediateValueDeclarationElements: List[IntermediateValueDeclarationElement] = + a.graphElements.toList.filterByType[IntermediateValueDeclarationElement] + val ifElements: List[IfElement] = + a.graphElements.toList.filterByType[IfElement] + val scatterElements: List[ScatterElement] = + a.graphElements.toList.filterByType[ScatterElement] + val callElements: List[CallElement] = + a.graphElements.toList.filterByType[CallElement] + val outputDeclarationElements: List[OutputDeclarationElement] = + a.graphElements.toList.filterByType[OutputDeclarationElement] + + val combined: List[WorkflowGraphElement] = inputDeclarationElements ++ + intermediateValueDeclarationElements ++ + ifElements ++ + scatterElements ++ + callElements ++ + outputDeclarationElements + + s"""workflow ${a.name} { + |${indent(inputs)} + |${indentAndCombine(combined.map(_.toWdlV1))} + |${indent(outputs)} + |}""".stripMargin } - - // Readability / cosmetic reordering - // TODO: use graph ordering - // https://github.com/broadinstitute/cromwell/issues/3796 - val inputDeclarationElements: List[InputDeclarationElement] = - a.graphElements.toList.filterByType[InputDeclarationElement] - val intermediateValueDeclarationElements: List[IntermediateValueDeclarationElement] = - a.graphElements.toList.filterByType[IntermediateValueDeclarationElement] - val ifElements: List[IfElement] = - a.graphElements.toList.filterByType[IfElement] - val scatterElements: List[ScatterElement] = - a.graphElements.toList.filterByType[ScatterElement] - val callElements: List[CallElement] = - a.graphElements.toList.filterByType[CallElement] - val outputDeclarationElements: List[OutputDeclarationElement] = - a.graphElements.toList.filterByType[OutputDeclarationElement] - - val combined: List[WorkflowGraphElement] = inputDeclarationElements ++ - intermediateValueDeclarationElements ++ - ifElements ++ - scatterElements ++ - callElements ++ - outputDeclarationElements - - s"""workflow ${a.name} { - |${indent(inputs)} - |${indentAndCombine(combined.map(_.toWdlV1))} - |${indent(outputs)} - |}""".stripMargin } - } - implicit val runtimeAttributesSectionElementWriter: WdlWriter[RuntimeAttributesSectionElement] = new WdlWriter[RuntimeAttributesSectionElement] { - override def toWdlV1(a: RuntimeAttributesSectionElement): String = { - val runtimeMap = a.runtimeAttributes map { pair => - s"${pair.key}: ${pair.value.toWdlV1}" - } + implicit val runtimeAttributesSectionElementWriter: WdlWriter[RuntimeAttributesSectionElement] = + new WdlWriter[RuntimeAttributesSectionElement] { + override def toWdlV1(a: RuntimeAttributesSectionElement): String = { + val runtimeMap = a.runtimeAttributes map { pair => + s"${pair.key}: ${pair.value.toWdlV1}" + } - s"""runtime { - |${indentAndCombine(runtimeMap)}}""".stripMargin + s"""runtime { + |${indentAndCombine(runtimeMap)}}""".stripMargin + } } - } implicit val metaValueElementWriter: WdlWriter[MetaValueElement] = new WdlWriter[MetaValueElement] { override def toWdlV1(a: MetaValueElement): String = a match { @@ -249,14 +258,15 @@ object WdlWriterImpl { case a: MetaValueElementInteger => a.value.toString case a: MetaValueElementString => "\"" + a.value + "\"" case a: MetaValueElementObject => - "{" + a.value.map { pair => - s"${pair._1}: ${metaValueElementWriter.toWdlV1(pair._2)}" - }.mkString(", ") + "}" + "{" + a.value + .map { pair => + s"${pair._1}: ${metaValueElementWriter.toWdlV1(pair._2)}" + } + .mkString(", ") + "}" case a: MetaValueElementArray => "[" + a.value.map(metaValueElementWriter.toWdlV1).mkString(", ") + "]" } } - implicit val metaSectionElementWriter: WdlWriter[MetaSectionElement] = new WdlWriter[MetaSectionElement] { override def toWdlV1(a: MetaSectionElement): String = { val map = a.meta.map { pair => @@ -268,62 +278,62 @@ object WdlWriterImpl { } } - implicit val parameterMetaSectionElementWriter: WdlWriter[ParameterMetaSectionElement] = new WdlWriter[ParameterMetaSectionElement] { - override def toWdlV1(a: ParameterMetaSectionElement): String = { - val map = a.metaAttributes.map { pair => - s"${pair._1}: ${pair._2.toWdlV1}" + implicit val parameterMetaSectionElementWriter: WdlWriter[ParameterMetaSectionElement] = + new WdlWriter[ParameterMetaSectionElement] { + override def toWdlV1(a: ParameterMetaSectionElement): String = { + val map = a.metaAttributes.map { pair => + s"${pair._1}: ${pair._2.toWdlV1}" + } + s"""parameter_meta { + |${indentAndCombine(map)} + |}""".stripMargin } - s"""parameter_meta { - |${indentAndCombine(map)} - |}""".stripMargin } - } - implicit val taskDefinitionTypeElementWriter: WdlWriter[TaskDefinitionElement] = new WdlWriter[TaskDefinitionElement] { - override def toWdlV1(a: TaskDefinitionElement) = { - val inputs = a.inputsSection match { - case Some(i) => i.toWdlV1 - case None => "" - } - val outputs = a.outputsSection match { - case Some(o) => o.toWdlV1 - case None => "" - } - val runtime = a.runtimeSection match { - case Some(r) => r.toWdlV1 - case None => "" - } - val meta = a.metaSection match { - case Some(m) => m.toWdlV1 - case None => "" - } - val parameterMeta = a.parameterMetaSection match { - case Some(p) => p.toWdlV1 - case None => "" + implicit val taskDefinitionTypeElementWriter: WdlWriter[TaskDefinitionElement] = + new WdlWriter[TaskDefinitionElement] { + override def toWdlV1(a: TaskDefinitionElement) = { + val inputs = a.inputsSection match { + case Some(i) => i.toWdlV1 + case None => "" + } + val outputs = a.outputsSection match { + case Some(o) => o.toWdlV1 + case None => "" + } + val runtime = a.runtimeSection match { + case Some(r) => r.toWdlV1 + case None => "" + } + val meta = a.metaSection match { + case Some(m) => m.toWdlV1 + case None => "" + } + val parameterMeta = a.parameterMetaSection match { + case Some(p) => p.toWdlV1 + case None => "" + } + + s"""task ${a.name} { + |${indent(inputs)} + |${indentAndCombine(a.declarations.map(_.toWdlV1))} + |${indent(outputs)} + |${a.commandSection.toWdlV1} + |${indent(runtime)} + |${indent(meta)} + |${indent(parameterMeta)}}""".stripMargin } - - s"""task ${a.name} { - |${indent(inputs)} - |${indentAndCombine(a.declarations.map(_.toWdlV1))} - |${indent(outputs)} - |${a.commandSection.toWdlV1} - |${indent(runtime)} - |${indent(meta)} - |${indent(parameterMeta)}}""".stripMargin } - } implicit val commandSectionElementWriter: WdlWriter[CommandSectionElement] = new WdlWriter[CommandSectionElement] { - override def toWdlV1(a: CommandSectionElement): String = { + override def toWdlV1(a: CommandSectionElement): String = s""" command <<< |${combine(a.parts.map(_.toWdlV1))} >>>""".stripMargin - } } implicit val commandSectionLineWriter: WdlWriter[CommandSectionLine] = new WdlWriter[CommandSectionLine] { - override def toWdlV1(a: CommandSectionLine): String = { + override def toWdlV1(a: CommandSectionLine): String = a.parts.map(_.toWdlV1).mkString - } } implicit val commandPartElementWriter: WdlWriter[CommandPartElement] = new WdlWriter[CommandPartElement] { @@ -339,49 +349,49 @@ object WdlWriterImpl { } } - implicit val placeholderAttributeSetWriter: WdlWriter[PlaceholderAttributeSet] = new WdlWriter[PlaceholderAttributeSet] { - override def toWdlV1(a: PlaceholderAttributeSet): String = { - val attrStrings = Map( - "sep" -> a.sepAttribute, - "true" -> a.trueAttribute, - "false" -> a.falseAttribute, - "default" -> a.defaultAttribute - ).collect({ case (attrKey: String, Some(value)) => attrKey + "=\"" + value + "\"" }) + implicit val placeholderAttributeSetWriter: WdlWriter[PlaceholderAttributeSet] = + new WdlWriter[PlaceholderAttributeSet] { + override def toWdlV1(a: PlaceholderAttributeSet): String = { + val attrStrings = Map( + "sep" -> a.sepAttribute, + "true" -> a.trueAttribute, + "false" -> a.falseAttribute, + "default" -> a.defaultAttribute + ).collect { case (attrKey: String, Some(value)) => attrKey + "=\"" + value + "\"" } - if (attrStrings.isEmpty) "" else attrStrings.mkString(start = "", sep = " ", end = " ") + if (attrStrings.isEmpty) "" else attrStrings.mkString(start = "", sep = " ", end = " ") + } } - } implicit val inputsSectionElementWriter: WdlWriter[InputsSectionElement] = new WdlWriter[InputsSectionElement] { - override def toWdlV1(a: InputsSectionElement): String = { + override def toWdlV1(a: InputsSectionElement): String = s"""input { |${indentAndCombine(a.inputDeclarations.map(_.toWdlV1))}}""".stripMargin - } } - implicit val inputDeclarationElementWriter: WdlWriter[InputDeclarationElement] = new WdlWriter[InputDeclarationElement] { - override def toWdlV1(a: InputDeclarationElement): String = { - val expression = a.expression match { - case Some(expr) => s" = ${expr.toWdlV1}" - case None => "" - } + implicit val inputDeclarationElementWriter: WdlWriter[InputDeclarationElement] = + new WdlWriter[InputDeclarationElement] { + override def toWdlV1(a: InputDeclarationElement): String = { + val expression = a.expression match { + case Some(expr) => s" = ${expr.toWdlV1}" + case None => "" + } - s"${a.typeElement.toWdlV1} ${a.name}$expression" + s"${a.typeElement.toWdlV1} ${a.name}$expression" + } } - } implicit val outputsSectionElementWriter: WdlWriter[OutputsSectionElement] = new WdlWriter[OutputsSectionElement] { - override def toWdlV1(a: OutputsSectionElement): String = { + override def toWdlV1(a: OutputsSectionElement): String = s"""output { |${indentAndCombine(a.outputs.map(_.toWdlV1))}}""".stripMargin - } } - implicit val outputDeclarationElementWriter: WdlWriter[OutputDeclarationElement] = new WdlWriter[OutputDeclarationElement] { - override def toWdlV1(a: OutputDeclarationElement): String = { - s"${a.typeElement.toWdlV1} ${a.name} = ${a.expression.toWdlV1}" + implicit val outputDeclarationElementWriter: WdlWriter[OutputDeclarationElement] = + new WdlWriter[OutputDeclarationElement] { + override def toWdlV1(a: OutputDeclarationElement): String = + s"${a.typeElement.toWdlV1} ${a.name} = ${a.expression.toWdlV1}" } - } implicit val functionCallElementWriter: WdlWriter[FunctionCallElement] = new WdlWriter[FunctionCallElement] { override def toWdlV1(a: FunctionCallElement): String = a match { @@ -394,71 +404,73 @@ object WdlWriterImpl { } } - implicit val oneParamFunctionCallElementWriter: WdlWriter[OneParamFunctionCallElement] = new WdlWriter[OneParamFunctionCallElement] { - override def toWdlV1(a: OneParamFunctionCallElement): String = { - val fn = a match { - case _: ReadLines => "read_lines" - case _: ReadTsv => "read_tsv" - case _: ReadMap => "read_map" - case _: ReadObject => "read_object" - case _: ReadObjects => "read_objects" - case _: ReadJson => "read_json" - case _: ReadInt => "read_int" - case _: ReadString => "read_string" - case _: ReadFloat => "read_float" - case _: ReadBoolean => "read_boolean" - case _: WriteLines => "write_lines" - case _: WriteTsv => "write_tsv" - case _: WriteMap => "write_map" - case _: WriteObject => "write_object" - case _: WriteObjects => "write_objects" - case _: WriteJson => "write_json" - case _: Range => "range" - case _: Transpose => "transpose" - case _: Length => "length" - case _: Flatten => "flatten" - case _: SelectFirst => "select_first" - case _: SelectAll => "select_all" - case _: Defined => "defined" - case _: Floor => "floor" - case _: Ceil => "ceil" - case _: Round => "round" - case _: Glob => "glob" - case _: Keys => "keys" - case _: AsMap => "as_map" - case _: AsPairs => "as_pairs" - case _: CollectByKey => "collect_by_key" + implicit val oneParamFunctionCallElementWriter: WdlWriter[OneParamFunctionCallElement] = + new WdlWriter[OneParamFunctionCallElement] { + override def toWdlV1(a: OneParamFunctionCallElement): String = { + val fn = a match { + case _: ReadLines => "read_lines" + case _: ReadTsv => "read_tsv" + case _: ReadMap => "read_map" + case _: ReadObject => "read_object" + case _: ReadObjects => "read_objects" + case _: ReadJson => "read_json" + case _: ReadInt => "read_int" + case _: ReadString => "read_string" + case _: ReadFloat => "read_float" + case _: ReadBoolean => "read_boolean" + case _: WriteLines => "write_lines" + case _: WriteTsv => "write_tsv" + case _: WriteMap => "write_map" + case _: WriteObject => "write_object" + case _: WriteObjects => "write_objects" + case _: WriteJson => "write_json" + case _: Range => "range" + case _: Transpose => "transpose" + case _: Length => "length" + case _: Flatten => "flatten" + case _: SelectFirst => "select_first" + case _: SelectAll => "select_all" + case _: Defined => "defined" + case _: Floor => "floor" + case _: Ceil => "ceil" + case _: Round => "round" + case _: Glob => "glob" + case _: Keys => "keys" + case _: AsMap => "as_map" + case _: AsPairs => "as_pairs" + case _: CollectByKey => "collect_by_key" + } + + s"$fn(${a.param.toWdlV1})" } - - s"$fn(${a.param.toWdlV1})" } - } - implicit val oneOrTwoParamFunctionCallElementWriter: WdlWriter[OneOrTwoParamFunctionCallElement] = new WdlWriter[OneOrTwoParamFunctionCallElement] { - override def toWdlV1(a: OneOrTwoParamFunctionCallElement): String = { - (a, a.secondParam) match { - case (_: Size, Some(unit)) => s"size(${a.firstParam.toWdlV1}, ${unit.toWdlV1})" - case (_: Size, None) => s"size(${a.firstParam.toWdlV1})" - case (_: Basename, Some(suffix)) => s"basename(${a.firstParam.toWdlV1}, ${suffix.toWdlV1})" - case (_: Basename, None) => s"basename(${a.firstParam.toWdlV1})" - } + implicit val oneOrTwoParamFunctionCallElementWriter: WdlWriter[OneOrTwoParamFunctionCallElement] = + new WdlWriter[OneOrTwoParamFunctionCallElement] { + override def toWdlV1(a: OneOrTwoParamFunctionCallElement): String = + (a, a.secondParam) match { + case (_: Size, Some(unit)) => s"size(${a.firstParam.toWdlV1}, ${unit.toWdlV1})" + case (_: Size, None) => s"size(${a.firstParam.toWdlV1})" + case (_: Basename, Some(suffix)) => s"basename(${a.firstParam.toWdlV1}, ${suffix.toWdlV1})" + case (_: Basename, None) => s"basename(${a.firstParam.toWdlV1})" + } } - } - implicit val twoParamFunctionCallElementWriter: WdlWriter[TwoParamFunctionCallElement] = new WdlWriter[TwoParamFunctionCallElement] { - override def toWdlV1(a: TwoParamFunctionCallElement): String = { - def functionCall(name: String) = s"$name(${a.arg1.toWdlV1}, ${a.arg2.toWdlV1})" - - a match { - case _: Zip => functionCall("zip") - case _: Cross => functionCall("cross") - case _: Prefix => functionCall("prefix") - case _: Min => functionCall("min") - case _: Max => functionCall("max") - case _: Sep => functionCall("sep") + implicit val twoParamFunctionCallElementWriter: WdlWriter[TwoParamFunctionCallElement] = + new WdlWriter[TwoParamFunctionCallElement] { + override def toWdlV1(a: TwoParamFunctionCallElement): String = { + def functionCall(name: String) = s"$name(${a.arg1.toWdlV1}, ${a.arg2.toWdlV1})" + + a match { + case _: Zip => functionCall("zip") + case _: Cross => functionCall("cross") + case _: Prefix => functionCall("prefix") + case _: Min => functionCall("min") + case _: Max => functionCall("max") + case _: Sep => functionCall("sep") + } } } - } implicit val structElementWriter: WdlWriter[StructElement] = new WdlWriter[StructElement] { override def toWdlV1(a: StructElement): String = @@ -471,33 +483,33 @@ object WdlWriterImpl { } implicit val importElementWriter: WdlWriter[ImportElement] = new WdlWriter[ImportElement] { - override def toWdlV1(a: ImportElement): String = { + override def toWdlV1(a: ImportElement): String = a.namespace match { case Some(namespace) => s"""import "${a.importUrl}" as $namespace""" - case None => s"""import "${a.importUrl}"""" + case None => s"""import "${a.importUrl}"""" } - } } implicit val fileElementWriter: WdlWriter[FileElement] = new WdlWriter[FileElement] { - override def toWdlV1(a: FileElement) = { + override def toWdlV1(a: FileElement) = "version 1.0" + System.lineSeparator + combine(a.imports.map(_.toWdlV1)) + combine(a.structs.map(_.toWdlV1)) + combine(a.workflows.map(_.toWdlV1)) + combine(a.tasks.map(_.toWdlV1)) - } } implicit val kvPairWriter: WdlWriter[KvPair] = new WdlWriter[KvPair] { override def toWdlV1(a: KvPair): String = s"${a.key} = ${a.value.toWdlV1}" } - implicit val primitiveLiteralExpressionElementWriter: WdlWriter[PrimitiveLiteralExpressionElement] = new WdlWriter[PrimitiveLiteralExpressionElement] { - override def toWdlV1(a: PrimitiveLiteralExpressionElement) = a.value.toWomString - } + implicit val primitiveLiteralExpressionElementWriter: WdlWriter[PrimitiveLiteralExpressionElement] = + new WdlWriter[PrimitiveLiteralExpressionElement] { + override def toWdlV1(a: PrimitiveLiteralExpressionElement) = a.value.toWomString + } - implicit val noneLiteralExpressionElementWriter: WdlWriter[NoneLiteralElement.type] = new WdlWriter[NoneLiteralElement.type] { - override def toWdlV1(a: NoneLiteralElement.type): String = "None" - } + implicit val noneLiteralExpressionElementWriter: WdlWriter[NoneLiteralElement.type] = + new WdlWriter[NoneLiteralElement.type] { + override def toWdlV1(a: NoneLiteralElement.type): String = "None" + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/CommandPartElementToWomCommandPart.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/CommandPartElementToWomCommandPart.scala index 3a34bbefd96..ed12e23c853 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/CommandPartElementToWomCommandPart.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/CommandPartElementToWomCommandPart.scala @@ -24,13 +24,15 @@ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl._ object CommandPartElementToWomCommandPart { def convert(commandPart: CommandPartElement, typeAliases: Map[String, WomType], - availableHandles: Set[GeneratedValueHandle]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[CommandPart] = commandPart match { + availableHandles: Set[GeneratedValueHandle] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[CommandPart] = commandPart match { case s: StringCommandPartElement => WdlomWomStringCommandPart(s).validNel - case p: PlaceholderCommandPartElement => { + case p: PlaceholderCommandPartElement => val attributes = p.attributes val consumedValues = p.expressionElement.expressionConsumedValueHooks @@ -39,10 +41,11 @@ object CommandPartElementToWomCommandPart { womExpression <- p.expressionElement.makeWomExpression(typeAliases, consumedValueLookup) _ <- validatePlaceholderType(womExpression, attributes) } yield WdlomWomPlaceholderCommandPart(attributes, womExpression.asInstanceOf[WdlomWomExpression]) - } } - private def validatePlaceholderType(womExpression: WomExpression, attributes: PlaceholderAttributeSet): ErrorOr[Unit] = ().validNel + private def validatePlaceholderType(womExpression: WomExpression, + attributes: PlaceholderAttributeSet + ): ErrorOr[Unit] = ().validNel } case class WdlomWomStringCommandPart(stringCommandPartElement: StringCommandPartElement) extends CommandPart { @@ -50,10 +53,12 @@ case class WdlomWomStringCommandPart(stringCommandPartElement: StringCommandPart override def instantiate(inputsMap: Map[LocalName, WomValue], functions: IoFunctionSet, valueMapper: WomValue => WomValue, - runtimeEnvironment: RuntimeEnvironment): ErrorOr[List[InstantiatedCommand]] = List(InstantiatedCommand(stringCommandPartElement.value)).validNel + runtimeEnvironment: RuntimeEnvironment + ): ErrorOr[List[InstantiatedCommand]] = List(InstantiatedCommand(stringCommandPartElement.value)).validNel } -case class WdlomWomPlaceholderCommandPart(attributes: PlaceholderAttributeSet, expression: WdlomWomExpression) extends CommandPart { +case class WdlomWomPlaceholderCommandPart(attributes: PlaceholderAttributeSet, expression: WdlomWomExpression) + extends CommandPart { def attributesToString: String = attributes.toWdlV1 // Yes, it's sad that we need to put ${} here, but otherwise we won't cache hit from draft-2 command sections override def toString: String = "${" + s"$attributesToString${expression.expressionElement.toWdlV1}" + "}" @@ -61,29 +66,44 @@ case class WdlomWomPlaceholderCommandPart(attributes: PlaceholderAttributeSet, e override def instantiate(inputsMap: Map[LocalName, WomValue], functions: IoFunctionSet, valueMapper: WomValue => WomValue, - runtimeEnvironment: RuntimeEnvironment): ErrorOr[List[InstantiatedCommand]] = { + runtimeEnvironment: RuntimeEnvironment + ): ErrorOr[List[InstantiatedCommand]] = { val inputsValues = inputsMap map { case (localName, value) => localName.value -> value } - expression.evaluateValueForPlaceholder(inputsValues, functions, ForCommandInstantiationOptions(valueMapper)) flatMap { evaluatedExpression => + expression.evaluateValueForPlaceholder(inputsValues, + functions, + ForCommandInstantiationOptions(valueMapper) + ) flatMap { evaluatedExpression => instantiateFromValue(evaluatedExpression, valueMapper).map(List(_)) } } - private def instantiateFromValue(value: EvaluatedValue[_], valueMapper: WomValue => WomValue): ErrorOr[InstantiatedCommand] = value.value match { + private def instantiateFromValue(value: EvaluatedValue[_], + valueMapper: WomValue => WomValue + ): ErrorOr[InstantiatedCommand] = value.value match { case WomBoolean(b) if attributes.trueAttribute.isDefined && attributes.falseAttribute.isDefined => InstantiatedCommand( - commandString = if (b) { attributes.trueAttribute.get } else { attributes.falseAttribute.get }, + commandString = if (b) { attributes.trueAttribute.get } + else { attributes.falseAttribute.get }, createdFiles = value.sideEffectFiles.toList ).validNel - case p: WomPrimitive => InstantiatedCommand(valueMapper(p).valueString, createdFiles = value.sideEffectFiles.toList).validNel + case p: WomPrimitive => + InstantiatedCommand(valueMapper(p).valueString, createdFiles = value.sideEffectFiles.toList).validNel case WomOptionalValue(_, Some(v)) => instantiateFromValue(value.copy(value = v), valueMapper) - case WomOptionalValue(_, None) => attributes.defaultAttribute match { - case Some(default) => InstantiatedCommand(commandString = default, createdFiles = value.sideEffectFiles.toList).validNel - case None => InstantiatedCommand(commandString = "", createdFiles = value.sideEffectFiles.toList).validNel - } - case WomArray(WomArrayType(_ : WomPrimitiveType), arrayValue) => attributes.sepAttribute match { - case Some(separator) => InstantiatedCommand(commandString = arrayValue.map(valueMapper(_).valueString).mkString(separator), createdFiles = value.sideEffectFiles.toList).validNel - case None => "Array value was given but no 'sep' attribute was provided".invalidNel - } - case other => s"Cannot interpolate ${other.womType.stableName} into a command string with attribute set [$attributes]".invalidNel + case WomOptionalValue(_, None) => + attributes.defaultAttribute match { + case Some(default) => + InstantiatedCommand(commandString = default, createdFiles = value.sideEffectFiles.toList).validNel + case None => InstantiatedCommand(commandString = "", createdFiles = value.sideEffectFiles.toList).validNel + } + case WomArray(WomArrayType(_: WomPrimitiveType), arrayValue) => + attributes.sepAttribute match { + case Some(separator) => + InstantiatedCommand(commandString = arrayValue.map(valueMapper(_).valueString).mkString(separator), + createdFiles = value.sideEffectFiles.toList + ).validNel + case None => "Array value was given but no 'sep' attribute was provided".invalidNel + } + case other => + s"Cannot interpolate ${other.womType.stableName} into a command string with attribute set [$attributes]".invalidNel } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/FileElementToWomBundle.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/FileElementToWomBundle.scala index e92aa2d2784..04f45d1fbab 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/FileElementToWomBundle.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/FileElementToWomBundle.scala @@ -26,93 +26,110 @@ import wom.types.WomType object FileElementToWomBundle { - implicit val fileElementToWomBundle: WomBundleMaker[FileElementToWomBundleInputs] = new WomBundleMaker[FileElementToWomBundleInputs] { + implicit val fileElementToWomBundle: WomBundleMaker[FileElementToWomBundleInputs] = + new WomBundleMaker[FileElementToWomBundleInputs] { - override def toWomBundle(a: FileElementToWomBundleInputs): Checked[WomBundle] = { + override def toWomBundle(a: FileElementToWomBundleInputs): Checked[WomBundle] = { - def toWorkflowInner(imports: Vector[WomBundle], tasks: Vector[TaskDefinitionElement], structs: Map[String, WomType]): ErrorOr[WomBundle] = { + def toWorkflowInner(imports: Vector[WomBundle], + tasks: Vector[TaskDefinitionElement], + structs: Map[String, WomType] + ): ErrorOr[WomBundle] = { - val allStructs = structs ++ imports.flatMap(_.typeAliases) + val allStructs = structs ++ imports.flatMap(_.typeAliases) - val localTasksValidation: ErrorOr[Map[String, Callable]] = { - val validatedTasksVector: ErrorOr[Vector[(String, Callable)]] = tasks.traverse { taskDefinition => - a.taskConverter - .run(TaskDefinitionElementToWomInputs(taskDefinition, structs)) - .map(t => t.name -> t).toValidated - } + val localTasksValidation: ErrorOr[Map[String, Callable]] = { + val validatedTasksVector: ErrorOr[Vector[(String, Callable)]] = tasks.traverse { taskDefinition => + a.taskConverter + .run(TaskDefinitionElementToWomInputs(taskDefinition, structs)) + .map(t => t.name -> t) + .toValidated + } - validatedTasksVector flatMap { tasksVector => + validatedTasksVector flatMap { tasksVector => + val duplicateTaskNames = tasksVector.groupBy(_._1).collect { case (x, list) if list.size > 1 => x } - val duplicateTaskNames = tasksVector.groupBy(_._1).collect { case (x, list) if list.size > 1 => x } + NonEmptyList.fromList(duplicateTaskNames.toList) match { + case None => tasksVector.toMap.validNel + case Some(duplicates) => + duplicates.map(x => s"Cannot reuse the same task name ('$x') more than once").invalid - NonEmptyList.fromList(duplicateTaskNames.toList) match { - case None => tasksVector.toMap.validNel - case Some(duplicates) => duplicates.map { x => s"Cannot reuse the same task name ('$x') more than once" }.invalid + } } - } - } - localTasksValidation flatMap { localTaskMapping => - - val workflowsValidation: ErrorOr[Vector[WorkflowDefinition]] = { - a.fileElement.workflows.toVector.traverse { workflowDefinition => - - val convertInputs = WorkflowDefinitionConvertInputs(workflowDefinition, - allStructs, - localTaskMapping ++ imports.flatMap(_.allCallables), - a.convertNestedScatterToSubworkflow) - a.workflowConverter.run(convertInputs).toValidated + localTasksValidation flatMap { localTaskMapping => + val workflowsValidation: ErrorOr[Vector[WorkflowDefinition]] = + a.fileElement.workflows.toVector.traverse { workflowDefinition => + val convertInputs = WorkflowDefinitionConvertInputs(workflowDefinition, + allStructs, + localTaskMapping ++ imports.flatMap(_.allCallables), + a.convertNestedScatterToSubworkflow + ) + a.workflowConverter.run(convertInputs).toValidated + } + + workflowsValidation map { workflows => + val primary: Option[Callable] = + if (workflows.size == 1) { + workflows.headOption + } else if (workflows.isEmpty && tasks.size == 1) { + localTaskMapping.headOption map { case (_, callable) => callable } + } else None + + val bundledCallableMap = (localTaskMapping.values.toSet ++ workflows).map(c => c.name -> c).toMap + + WomBundle(primary, bundledCallableMap, allStructs, imports.flatMap(_.resolvedImportRecords).toSet) } } + } - workflowsValidation map { workflows => - val primary: Option[Callable] = - if (workflows.size == 1) { - workflows.headOption - } else if (workflows.isEmpty && tasks.size == 1) { - localTaskMapping.headOption map { case (_, callable) => callable } - } else None - - val bundledCallableMap = (localTaskMapping.values.toSet ++ workflows).map(c => c.name -> c).toMap + val taskDefValidation: ErrorOr[Vector[TaskDefinitionElement]] = a.fileElement.tasks.toVector.validNel + val importsValidation: ErrorOr[Vector[WomBundle]] = a.fileElement.imports.toVector.traverse { + importWomBundle(_, a.workflowOptionsJson, a.importResolvers, a.languageFactories) + } - WomBundle(primary, bundledCallableMap, allStructs, imports.flatMap(_.resolvedImportRecords).toSet) + (importsValidation flatMap { imports => + val structsValidation: ErrorOr[Map[String, WomType]] = StructEvaluation.convert( + StructEvaluationInputs(a.fileElement.structs, imports.flatMap(_.typeAliases).toMap) + ) + (taskDefValidation, structsValidation) flatMapN { (tasks, structs) => + toWorkflowInner(imports, tasks, structs) } - } + }).toEither } - - val taskDefValidation: ErrorOr[Vector[TaskDefinitionElement]] = a.fileElement.tasks.toVector.validNel - val importsValidation: ErrorOr[Vector[WomBundle]] = a.fileElement.imports.toVector.traverse { importWomBundle(_, a.workflowOptionsJson, a.importResolvers, a.languageFactories) } - - (importsValidation flatMap { imports => - val structsValidation: ErrorOr[Map[String, WomType]] = StructEvaluation.convert(StructEvaluationInputs(a.fileElement.structs, imports.flatMap(_.typeAliases).toMap)) - (taskDefValidation, structsValidation) flatMapN { (tasks, structs) => toWorkflowInner(imports, tasks, structs) } - }).toEither } - } def convert(a: FileElementToWomBundleInputs): Checked[WomBundle] = a.toWomBundle private def importWomBundle(importElement: ImportElement, optionsJson: WorkflowOptionsJson, importResolvers: List[ImportResolver], - languageFactories: List[LanguageFactory]): ErrorOr[WomBundle] = { - val compoundImportResolver: CheckedAtoB[ImportResolutionRequest, ResolvedImportBundle] = CheckedAtoB.firstSuccess(importResolvers.map(_.resolver), s"resolve import '${importElement.importUrl}'") + languageFactories: List[LanguageFactory] + ): ErrorOr[WomBundle] = { + val compoundImportResolver: CheckedAtoB[ImportResolutionRequest, ResolvedImportBundle] = + CheckedAtoB.firstSuccess(importResolvers.map(_.resolver), s"resolve import '${importElement.importUrl}'") val languageFactoryKleislis: List[CheckedAtoB[ResolvedImportBundle, WomBundle]] = languageFactories map { factory => CheckedAtoB.fromCheck { resolutionBundle: ResolvedImportBundle => - factory.getWomBundle(resolutionBundle.source, Option(resolutionBundle.resolvedImportRecord), optionsJson, resolutionBundle.newResolvers, languageFactories) + factory.getWomBundle(resolutionBundle.source, + Option(resolutionBundle.resolvedImportRecord), + optionsJson, + resolutionBundle.newResolvers, + languageFactories + ) } } - val compoundLanguageFactory: CheckedAtoB[ResolvedImportBundle, WomBundle] = CheckedAtoB.firstSuccess(languageFactoryKleislis, s"convert imported '${importElement.importUrl}' to WOM") + val compoundLanguageFactory: CheckedAtoB[ResolvedImportBundle, WomBundle] = + CheckedAtoB.firstSuccess(languageFactoryKleislis, s"convert imported '${importElement.importUrl}' to WOM") val overallConversion = compoundImportResolver andThen compoundLanguageFactory overallConversion .run(ImportResolutionRequest(importElement.importUrl, importResolvers)) - .map { applyNamespace(_, importElement) } - .flatMap { respectImportRenames(_, importElement.structRenames) } + .map(applyNamespace(_, importElement)) + .flatMap(respectImportRenames(_, importElement.structRenames)) .contextualizeErrors(s"import '${importElement.importUrl}'") .toValidated } @@ -138,16 +155,18 @@ object FileElementToWomBundle { } womBundle.copy(typeAliases = newStructs).validNelCheck } else { - s"Cannot import and rename: [${unexpectedAliases.mkString(", ")}] because the set of imported structs was: [${importedStructs.keySet.mkString(", ")}]".invalidNelCheck + s"Cannot import and rename: [${unexpectedAliases.mkString(", ")}] because the set of imported structs was: [${importedStructs.keySet + .mkString(", ")}]".invalidNelCheck } } } -final case class FileElementToWomBundleInputs(fileElement: FileElement, - workflowOptionsJson: WorkflowOptionsJson, - convertNestedScatterToSubworkflow : Boolean, - importResolvers: List[ImportResolver], - languageFactories: List[LanguageFactory], - workflowConverter: CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition], - taskConverter: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] - ) +final case class FileElementToWomBundleInputs( + fileElement: FileElement, + workflowOptionsJson: WorkflowOptionsJson, + convertNestedScatterToSubworkflow: Boolean, + importResolvers: List[ImportResolver], + languageFactories: List[LanguageFactory], + workflowConverter: CheckedAtoB[WorkflowDefinitionConvertInputs, WorkflowDefinition], + taskConverter: CheckedAtoB[TaskDefinitionElementToWomInputs, CallableTaskDefinition] +) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/StructEvaluation.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/StructEvaluation.scala index da1ccdefb7f..f144a31aad9 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/StructEvaluation.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/StructEvaluation.scala @@ -13,7 +13,9 @@ import wdl.transforms.base.linking.typemakers._ object StructEvaluation { def convert(a: StructEvaluationInputs): ErrorOr[Map[String, WomType]] = { - val validKnownAliases: Map[String, ErrorOr[WomType]] = a.knownTypeAliases map { case (key, value) => key -> value.validNel } + val validKnownAliases: Map[String, ErrorOr[WomType]] = a.knownTypeAliases map { case (key, value) => + key -> value.validNel + } val withNewStructs = a.structSections.foldLeft(validKnownAliases)(structFoldFunction) def unpackMapEntry(entry: (String, ErrorOr[WomType])): ErrorOr[(String, WomType)] = entry._2 map { entry._1 -> _ } @@ -21,10 +23,12 @@ object StructEvaluation { withNewStructs.toList.traverse(unpackMapEntry).map(_.toMap) } - private def structFoldFunction(current: Map[String, ErrorOr[WomType]], next: StructElement): Map[String, ErrorOr[WomType]] = { + private def structFoldFunction(current: Map[String, ErrorOr[WomType]], + next: StructElement + ): Map[String, ErrorOr[WomType]] = { - val currentValid = current collect { - case (key, Valid(value)) => key -> value + val currentValid = current collect { case (key, Valid(value)) => + key -> value } def convertStructEntryElement(structEntryElement: StructEntryElement): ErrorOr[(String, WomType)] = @@ -32,9 +36,10 @@ object StructEvaluation { val elementsValidation: ErrorOr[List[(String, WomType)]] = next.entries.toList.traverse(convertStructEntryElement) - current + (next.name -> (elementsValidation map { elements => WomCompositeType(elements.toMap, Option(next.name)) } )) + current + (next.name -> (elementsValidation map { elements => + WomCompositeType(elements.toMap, Option(next.name)) + })) } case class StructEvaluationInputs(structSections: Seq[StructElement], knownTypeAliases: Map[String, WomType]) } - diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/TaskDefinitionElementToWomTaskDefinition.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/TaskDefinitionElementToWomTaskDefinition.scala index 91a51497b5e..576ce210da5 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/TaskDefinitionElementToWomTaskDefinition.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/TaskDefinitionElementToWomTaskDefinition.scala @@ -28,13 +28,16 @@ import wdl.transforms.base.linking.typemakers._ object TaskDefinitionElementToWomTaskDefinition extends Util { - final case class TaskDefinitionElementToWomInputs(taskDefinitionElement: TaskDefinitionElement, typeAliases: Map[String, WomType]) - - def convert(b: TaskDefinitionElementToWomInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[CallableTaskDefinition] = { + final case class TaskDefinitionElementToWomInputs(taskDefinitionElement: TaskDefinitionElement, + typeAliases: Map[String, WomType] + ) + + def convert(b: TaskDefinitionElementToWomInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[CallableTaskDefinition] = { val a = eliminateInputDependencies(b) val inputElements = a.taskDefinitionElement.inputsSection.map(_.inputDeclarations).getOrElse(Seq.empty) @@ -42,39 +45,68 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { val outputElements = a.taskDefinitionElement.outputsSection.map(_.outputs).getOrElse(Seq.empty) val conversion = ( - createTaskGraph(inputElements, declarations, outputElements, a.taskDefinitionElement.parameterMetaSection, a.typeAliases), - validateParameterMetaEntries(a.taskDefinitionElement.parameterMetaSection, a.taskDefinitionElement.inputsSection, a.taskDefinitionElement.outputsSection) + createTaskGraph(inputElements, + declarations, + outputElements, + a.taskDefinitionElement.parameterMetaSection, + a.typeAliases + ), + validateParameterMetaEntries(a.taskDefinitionElement.parameterMetaSection, + a.taskDefinitionElement.inputsSection, + a.taskDefinitionElement.outputsSection + ) ) flatMapN { (taskGraph, _) => val validRuntimeAttributes: ErrorOr[RuntimeAttributes] = a.taskDefinitionElement.runtimeSection match { case Some(attributeSection) => createRuntimeAttributes(attributeSection, taskGraph.linkedGraph) case None => RuntimeAttributes(Map.empty).validNel } - val validCommand: ErrorOr[Seq[CommandPart]] = { - expandLines(a.taskDefinitionElement.commandSection.parts).toList.traverse { parts => - CommandPartElementToWomCommandPart.convert(parts, taskGraph.linkedGraph.typeAliases, taskGraph.linkedGraph.generatedHandles) - }.map(_.toSeq) - } + val validCommand: ErrorOr[Seq[CommandPart]] = + expandLines(a.taskDefinitionElement.commandSection.parts).toList + .traverse { parts => + CommandPartElementToWomCommandPart.convert(parts, + taskGraph.linkedGraph.typeAliases, + taskGraph.linkedGraph.generatedHandles + ) + } + .map(_.toSeq) - val (meta, parameterMeta) = processMetaSections(a.taskDefinitionElement.metaSection, a.taskDefinitionElement.parameterMetaSection) + val (meta, parameterMeta) = + processMetaSections(a.taskDefinitionElement.metaSection, a.taskDefinitionElement.parameterMetaSection) (validRuntimeAttributes, validCommand) mapN { (runtime, command) => - CallableTaskDefinition(a.taskDefinitionElement.name, Function.const(command.validNel), runtime, meta, parameterMeta, taskGraph.outputs, taskGraph.inputs, Set.empty, Map.empty, sourceLocation = a.taskDefinitionElement.sourceLocation) + CallableTaskDefinition( + a.taskDefinitionElement.name, + Function.const(command.validNel), + runtime, + meta, + parameterMeta, + taskGraph.outputs, + taskGraph.inputs, + Set.empty, + Map.empty, + sourceLocation = a.taskDefinitionElement.sourceLocation + ) } } conversion.contextualizeErrors(s"process task definition '${b.taskDefinitionElement.name}'") } - private def validateParameterMetaEntries(parameterMetaSectionElement: Option[ParameterMetaSectionElement], inputs: Option[InputsSectionElement], outputs: Option[OutputsSectionElement]): ErrorOr[Unit] = { - val validKeys: List[String] = inputs.toList.flatMap(_.inputDeclarations.map(_.name)) ++ outputs.toList.flatMap(_.outputs.map(_.name)) + private def validateParameterMetaEntries(parameterMetaSectionElement: Option[ParameterMetaSectionElement], + inputs: Option[InputsSectionElement], + outputs: Option[OutputsSectionElement] + ): ErrorOr[Unit] = { + val validKeys: List[String] = + inputs.toList.flatMap(_.inputDeclarations.map(_.name)) ++ outputs.toList.flatMap(_.outputs.map(_.name)) val errors = parameterMetaSectionElement.toList.flatMap { pmse => val keys = pmse.metaAttributes.keySet.toList val duplicationErrors = keys.groupBy(identity).collect { case (name, list) if list.size > 1 => s"Found ${list.size} parameter meta entries for '$name' (expected 0 or 1)" } val notValidKeyErrors = keys.collect { - case name if !validKeys.contains(name) => s"Invalid parameter_meta entry for '$name': not an input or output parameter" + case name if !validKeys.contains(name) => + s"Invalid parameter_meta entry for '$name': not an input or output parameter" } duplicationErrors.toList ++ notValidKeyErrors } @@ -84,28 +116,40 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { } } - private def eliminateInputDependencies(a: TaskDefinitionElementToWomInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): TaskDefinitionElementToWomInputs = { - case class NewInputElementsSet(original: InputDeclarationElement, newInput: InputDeclarationElement, newDeclaration: IntermediateValueDeclarationElement) + private def eliminateInputDependencies( + a: TaskDefinitionElementToWomInputs + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): TaskDefinitionElementToWomInputs = { + case class NewInputElementsSet(original: InputDeclarationElement, + newInput: InputDeclarationElement, + newDeclaration: IntermediateValueDeclarationElement + ) - val inputElementsWithUpstreams: Seq[NewInputElementsSet] = a.taskDefinitionElement.inputsSection.map(_.inputDeclarations).getOrElse(Seq.empty) collect { - case ide @ InputDeclarationElement(typeElement,name, Some(expression)) if expression.expressionConsumedValueHooks.nonEmpty => - val input = InputDeclarationElement(OptionalTypeElement(typeElement), name, None) + val inputElementsWithUpstreams: Seq[NewInputElementsSet] = + a.taskDefinitionElement.inputsSection.map(_.inputDeclarations).getOrElse(Seq.empty) collect { + case ide @ InputDeclarationElement(typeElement, name, Some(expression)) + if expression.expressionConsumedValueHooks.nonEmpty => + val input = InputDeclarationElement(OptionalTypeElement(typeElement), name, None) - val selecterExpression = SelectFirst(ArrayLiteral(Seq(IdentifierLookup(name), expression))) - val intermediate = IntermediateValueDeclarationElement(typeElement, s"__$name", selecterExpression) + val selecterExpression = SelectFirst(ArrayLiteral(Seq(IdentifierLookup(name), expression))) + val intermediate = IntermediateValueDeclarationElement(typeElement, s"__$name", selecterExpression) - NewInputElementsSet(ide, input, intermediate) - } + NewInputElementsSet(ide, input, intermediate) + } if (inputElementsWithUpstreams.nonEmpty) { val newInputsSection: Option[InputsSectionElement] = a.taskDefinitionElement.inputsSection.map { inputsSection => - InputsSectionElement(inputsSection.inputDeclarations.filterNot(inputElementsWithUpstreams.map(_.original).contains) ++ inputElementsWithUpstreams.map(_.newInput)) + InputsSectionElement( + inputsSection.inputDeclarations.filterNot( + inputElementsWithUpstreams.map(_.original).contains + ) ++ inputElementsWithUpstreams.map(_.newInput) + ) } - val newDeclarationsSection: Seq[IntermediateValueDeclarationElement] = a.taskDefinitionElement.declarations ++ inputElementsWithUpstreams.map(_.newDeclaration) + val newDeclarationsSection: Seq[IntermediateValueDeclarationElement] = + a.taskDefinitionElement.declarations ++ inputElementsWithUpstreams.map(_.newDeclaration) val newTaskDefinitionElement = { - val withNewInputElements = a.taskDefinitionElement.copy(inputsSection = newInputsSection, declarations = newDeclarationsSection) + val withNewInputElements = + a.taskDefinitionElement.copy(inputsSection = newInputsSection, declarations = newDeclarationsSection) val identifierRenames = inputElementsWithUpstreams.map(ie => ie.original.name -> ie.newDeclaration.name).toMap renameIdentifierAccesses(withNewInputElements, identifierRenames) } @@ -117,29 +161,37 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { private def renameIdentifierAccesses(taskDefinitionElement: TaskDefinitionElement, renames: Map[String, String]) = { - val updatedInputs = taskDefinitionElement.inputsSection.map { inputsSection => InputsSectionElement( - inputsSection.inputDeclarations map { id => - id.copy(expression = id.expression.map(_.renameIdentifiers(renames))) - } - )} + val updatedInputs = taskDefinitionElement.inputsSection.map { inputsSection => + InputsSectionElement( + inputsSection.inputDeclarations map { id => + id.copy(expression = id.expression.map(_.renameIdentifiers(renames))) + } + ) + } - val updatedOutputs = taskDefinitionElement.outputsSection.map { outputsSection => OutputsSectionElement( - outputsSection.outputs map { od => - od.copy(expression = od.expression.renameIdentifiers(renames)) - } - )} + val updatedOutputs = taskDefinitionElement.outputsSection.map { outputsSection => + OutputsSectionElement( + outputsSection.outputs map { od => + od.copy(expression = od.expression.renameIdentifiers(renames)) + } + ) + } - val updatedCommand = CommandSectionElement( - taskDefinitionElement.commandSection.parts.map { line => CommandSectionLine(line.parts.map { + val updatedCommand = CommandSectionElement(taskDefinitionElement.commandSection.parts.map { line => + CommandSectionLine(line.parts.map { case s: StringCommandPartElement => s - case p: PlaceholderCommandPartElement => p.copy(expressionElement = p.expressionElement.renameIdentifiers(renames)) - })}) - - val updatedRuntime = taskDefinitionElement.runtimeSection.map { runtimeSection => RuntimeAttributesSectionElement( - runtimeSection.runtimeAttributes.map { - case KvPair(key, value) => KvPair(key, value.renameIdentifiers(renames)) - } - )} + case p: PlaceholderCommandPartElement => + p.copy(expressionElement = p.expressionElement.renameIdentifiers(renames)) + }) + }) + + val updatedRuntime = taskDefinitionElement.runtimeSection.map { runtimeSection => + RuntimeAttributesSectionElement( + runtimeSection.runtimeAttributes.map { case KvPair(key, value) => + KvPair(key, value.renameIdentifiers(renames)) + } + ) + } taskDefinitionElement.copy( inputsSection = updatedInputs, @@ -169,35 +221,42 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { } } - private final case class TaskGraph(inputs: List[Callable.InputDefinition], outputs: List[Callable.OutputDefinition], linkedGraph: LinkedGraph) + final private case class TaskGraph(inputs: List[Callable.InputDefinition], + outputs: List[Callable.OutputDefinition], + linkedGraph: LinkedGraph + ) private def createTaskGraph(inputs: Seq[InputDeclarationElement], declarations: Seq[IntermediateValueDeclarationElement], outputs: Seq[OutputDeclarationElement], parameterMeta: Option[ParameterMetaSectionElement], - typeAliases: Map[String, WomType]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[TaskGraph] = { + typeAliases: Map[String, WomType] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[TaskGraph] = { val combined: Set[WorkflowGraphElement] = (inputs ++ declarations ++ outputs).toSet LinkedGraphMaker.make(combined, Set.empty, typeAliases, Map.empty) flatMap { linked => val ordered = LinkedGraphMaker.getOrdering(linked) - def foldFunction(currentGraphValidation: ErrorOr[TaskGraph], next: WorkflowGraphElement): ErrorOr[TaskGraph] = { + def foldFunction(currentGraphValidation: ErrorOr[TaskGraph], next: WorkflowGraphElement): ErrorOr[TaskGraph] = currentGraphValidation flatMap { accumulator => addToTaskGraph(next, accumulator) } - } def findParameterMeta(declName: String): Option[MetaValueElement] = - parameterMeta.flatMap { _.metaAttributes.get(declName) } + parameterMeta.flatMap(_.metaAttributes.get(declName)) def addToTaskGraph(element: WorkflowGraphElement, accumulator: TaskGraph): ErrorOr[TaskGraph] = element match { case IntermediateValueDeclarationElement(womTypeElement, name, expression) => val typeValidation = womTypeElement.determineWomType(linked.typeAliases) - val expressionValidation: ErrorOr[WomExpression] = expression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) + val expressionValidation: ErrorOr[WomExpression] = + expression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) (typeValidation, expressionValidation) mapN { (womType, womExpression) => - accumulator.copy(inputs = accumulator.inputs :+ FixedInputDefinitionWithDefault(name, womType, womExpression)) + accumulator.copy(inputs = + accumulator.inputs :+ FixedInputDefinitionWithDefault(name, womType, womExpression) + ) } case InputDeclarationElement(womTypeElement, name, None) => womTypeElement.determineWomType(linked.typeAliases) map { womType => @@ -207,12 +266,20 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { } accumulator.copy(inputs = accumulator.inputs :+ newInput) } - case InputDeclarationElement(womTypeElement, name, Some(expression)) if expression.expressionConsumedValueHooks.isEmpty => + case InputDeclarationElement(womTypeElement, name, Some(expression)) + if expression.expressionConsumedValueHooks.isEmpty => val typeValidation = womTypeElement.determineWomType(linked.typeAliases) - val expressionValidation: ErrorOr[WomExpression] = expression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) + val expressionValidation: ErrorOr[WomExpression] = + expression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) (typeValidation, expressionValidation) mapN { (womType, womExpression) => - accumulator.copy(inputs = accumulator.inputs :+ OverridableInputDefinitionWithDefault(name, womType, womExpression, findParameterMeta(name))) + accumulator.copy(inputs = + accumulator.inputs :+ OverridableInputDefinitionWithDefault(name, + womType, + womExpression, + findParameterMeta(name) + ) + ) } // In this case, the expression has upstream dependencies. Since WOM won't allow that, make this an optional input and fixed declaration pair: @@ -222,20 +289,23 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { val newInputType = WomOptionalType(womType).flatOptionalType val newInputDefinition = OptionalInputDefinition(newInputName, newInputType, findParameterMeta(name)) - val intermediateExpression: ExpressionElement = SelectFirst(ArrayLiteral(Seq(IdentifierLookup(newInputName), expression))) + val intermediateExpression: ExpressionElement = + SelectFirst(ArrayLiteral(Seq(IdentifierLookup(newInputName), expression))) - val intermediateWomExpression: ErrorOr[WomExpression] = intermediateExpression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) + val intermediateWomExpression: ErrorOr[WomExpression] = + intermediateExpression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) intermediateWomExpression map { womExpression => - val intermediateDefinition = FixedInputDefinitionWithDefault(name, womType, womExpression, findParameterMeta(name)) + val intermediateDefinition = + FixedInputDefinitionWithDefault(name, womType, womExpression, findParameterMeta(name)) accumulator.copy(inputs = accumulator.inputs :+ newInputDefinition :+ intermediateDefinition) } } - case OutputDeclarationElement(womTypeElement, name, expression) => val typeValidation = womTypeElement.determineWomType(linked.typeAliases) - val expressionValidation: ErrorOr[WomExpression] = expression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) + val expressionValidation: ErrorOr[WomExpression] = + expression.makeWomExpression(linked.typeAliases, linked.consumedValueLookup) (typeValidation, expressionValidation) mapN { (womType, womExpression) => accumulator.copy(outputs = accumulator.outputs :+ OutputDefinition(name, womType, womExpression)) @@ -249,19 +319,23 @@ object TaskDefinitionElementToWomTaskDefinition extends Util { } } - private def createRuntimeAttributes(attributes: RuntimeAttributesSectionElement, linkedGraph: LinkedGraph) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[RuntimeAttributes] = { + private def createRuntimeAttributes(attributes: RuntimeAttributesSectionElement, linkedGraph: LinkedGraph)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[RuntimeAttributes] = { def processSingleRuntimeAttribute(kvPair: KvPair): ErrorOr[(String, WomExpression)] = for { - consumedValueLookup <- LinkedGraphMaker.makeConsumedValueLookup(kvPair.value.expressionConsumedValueHooks, linkedGraph.generatedHandles) + consumedValueLookup <- LinkedGraphMaker.makeConsumedValueLookup(kvPair.value.expressionConsumedValueHooks, + linkedGraph.generatedHandles + ) womExpression <- kvPair.value.makeWomExpression(linkedGraph.typeAliases, consumedValueLookup) } yield kvPair.key -> womExpression - - attributes.runtimeAttributes.toList.traverse(processSingleRuntimeAttribute).map(atts => RuntimeAttributes(atts.toMap)) + attributes.runtimeAttributes.toList + .traverse(processSingleRuntimeAttribute) + .map(atts => RuntimeAttributes(atts.toMap)) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WomBundleToWomExecutable.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WomBundleToWomExecutable.scala index 26b40110b8e..11a4a9dadc4 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WomBundleToWomExecutable.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WomBundleToWomExecutable.scala @@ -10,6 +10,10 @@ import wom.transforms.WomExecutableMaker object WomBundleToWomExecutable { implicit val draft3WomBundleToWomExecutable: WomExecutableMaker[WomBundle] = new WomExecutableMaker[WomBundle] { - override def toWomExecutable(a: WomBundle, inputs: Option[WorkflowJson], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] = WdlSharedInputParsing.buildWomExecutable(a, inputs, ioFunctions, strictValidation) + override def toWomExecutable(a: WomBundle, + inputs: Option[WorkflowJson], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] = WdlSharedInputParsing.buildWomExecutable(a, inputs, ioFunctions, strictValidation) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WorkflowDefinitionElementToWomWorkflowDefinition.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WorkflowDefinitionElementToWomWorkflowDefinition.scala index b1f04d902d8..a7571f9670a 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WorkflowDefinitionElementToWomWorkflowDefinition.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/WorkflowDefinitionElementToWomWorkflowDefinition.scala @@ -16,7 +16,7 @@ import wom.callable.MetaValueElement.MetaValueElementBoolean import wom.callable.{Callable, WorkflowDefinition} import wom.graph.GraphNodePort.OutputPort import wom.graph.expression.AnonymousExpressionNode -import wom.graph.{CallNode, GraphNode, WomIdentifier, Graph => WomGraph} +import wom.graph.{CallNode, Graph => WomGraph, GraphNode, WomIdentifier} import wom.types.WomType object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { @@ -24,23 +24,29 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { final case class WorkflowDefinitionConvertInputs(definitionElement: WorkflowDefinitionElement, typeAliases: Map[String, WomType], callables: Map[String, Callable], - convertNestedScatterToSubworkflow : Boolean) + convertNestedScatterToSubworkflow: Boolean + ) - def convert(b: WorkflowDefinitionConvertInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[WorkflowDefinition] = { + def convert(b: WorkflowDefinitionConvertInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[WorkflowDefinition] = { val a: WorkflowDefinitionConvertInputs = eliminateInputDependencies(b) - val (meta, parameterMeta) = processMetaSections(a.definitionElement.metaSection, a.definitionElement.parameterMetaSection) + val (meta, parameterMeta) = + processMetaSections(a.definitionElement.metaSection, a.definitionElement.parameterMetaSection) - val allowNestedInputs: Boolean = { - meta.get("allowNestedInputs").flatMap { - case x: MetaValueElementBoolean => Option(x.value) - case _ => None - }}.getOrElse(false) + val allowNestedInputs: Boolean = + meta + .get("allowNestedInputs") + .flatMap { + case x: MetaValueElementBoolean => Option(x.value) + case _ => None + } + .getOrElse(false) // Make the set of workflow graph elements, including: // - Top-level graph elements @@ -51,11 +57,19 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { a.definitionElement.inputsSection.toSeq.flatMap(_.inputDeclarations) ++ a.definitionElement.outputsSection.toSeq.flatMap(_.outputs) - val innerGraph: ErrorOr[WomGraph] = convertGraphElements(GraphLikeConvertInputs(graphNodeElements, Set.empty, Map.empty, a.typeAliases, a.definitionElement.name, - insideAScatter = false, - convertNestedScatterToSubworkflow = b.convertNestedScatterToSubworkflow, - allowNestedInputs = allowNestedInputs, - a.callables)) + val innerGraph: ErrorOr[WomGraph] = convertGraphElements( + GraphLikeConvertInputs( + graphNodeElements, + Set.empty, + Map.empty, + a.typeAliases, + a.definitionElement.name, + insideAScatter = false, + convertNestedScatterToSubworkflow = b.convertNestedScatterToSubworkflow, + allowNestedInputs = allowNestedInputs, + a.callables + ) + ) // NB: isEmpty means "not isDefined". We specifically do NOT add defaults if the output section is defined but empty. val withDefaultOutputs: ErrorOr[WomGraph] = if (a.definitionElement.outputsSection.isEmpty) { innerGraph map { WomGraphMakerTools.addDefaultOutputs(_, Some(WomIdentifier(a.definitionElement.name))) } @@ -63,8 +77,8 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { innerGraph } - (withDefaultOutputs map { - ig => WorkflowDefinition(a.definitionElement.name, ig, meta, parameterMeta, b.definitionElement.sourceLocation) + (withDefaultOutputs map { ig => + WorkflowDefinition(a.definitionElement.name, ig, meta, parameterMeta, b.definitionElement.sourceLocation) }).contextualizeErrors(s"process workflow definition '${a.definitionElement.name}'") } @@ -76,13 +90,15 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { insideAScatter: Boolean, convertNestedScatterToSubworkflow: Boolean, allowNestedInputs: Boolean, - callables: Map[String, Callable]) + callables: Map[String, Callable] + ) - def convertGraphElements(a: GraphLikeConvertInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[WomGraph] = { + def convertGraphElements(a: GraphLikeConvertInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[WomGraph] = { val seedGeneratedValueHandles = for { seedNode <- a.seedNodes @@ -92,8 +108,20 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { val finished = a.externalUpstreamCalls map { c => GeneratedCallFinishedHandle(c._2.localName) } for { - linkedGraph <- LinkedGraphMaker.make(nodes = a.graphElements, seedGeneratedValueHandles ++ finished, typeAliases = a.typeAliases, callables = a.callables) - womGraph <- makeWomGraph(linkedGraph, a.seedNodes, a.externalUpstreamCalls, a.workflowName, a.insideAScatter, a.convertNestedScatterToSubworkflow, a.allowNestedInputs, a.callables) + linkedGraph <- LinkedGraphMaker.make(nodes = a.graphElements, + seedGeneratedValueHandles ++ finished, + typeAliases = a.typeAliases, + callables = a.callables + ) + womGraph <- makeWomGraph(linkedGraph, + a.seedNodes, + a.externalUpstreamCalls, + a.workflowName, + a.insideAScatter, + a.convertNestedScatterToSubworkflow, + a.allowNestedInputs, + a.callables + ) } yield womGraph } @@ -102,15 +130,19 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { externalUpstreamCalls: Map[String, CallNode], workflowName: String, insideAScatter: Boolean, - convertNestedScatterToSubworkflow : Boolean, + convertNestedScatterToSubworkflow: Boolean, allowNestedInputs: Boolean, - callables: Map[String, Callable]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[WomGraph] = { - - def graphNodeCreationFold(currentValidation: ErrorOr[List[GraphNode]], next: WorkflowGraphElement): ErrorOr[List[GraphNode]] = { + callables: Map[String, Callable] + )(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[WomGraph] = { + + def graphNodeCreationFold(currentValidation: ErrorOr[List[GraphNode]], + next: WorkflowGraphElement + ): ErrorOr[List[GraphNode]] = { def outputName(node: GraphNode, port: OutputPort): String = port.identifier.localName.value currentValidation flatMap { currentList => @@ -129,40 +161,65 @@ object WorkflowDefinitionElementToWomWorkflowDefinition extends Util { val generatedGraphNodesValidation: ErrorOr[Set[GraphNode]] = WorkflowGraphElementToGraphNode.convert( - GraphNodeMakerInputs(next, upstreamCallNodes, linkedGraph.consumedValueLookup, availableValues, linkedGraph.typeAliases, workflowName, insideAScatter, convertNestedScatterToSubworkflow, allowNestedInputs, callables)) + GraphNodeMakerInputs( + next, + upstreamCallNodes, + linkedGraph.consumedValueLookup, + availableValues, + linkedGraph.typeAliases, + workflowName, + insideAScatter, + convertNestedScatterToSubworkflow, + allowNestedInputs, + callables + ) + ) generatedGraphNodesValidation map { nextGraphNodes: Set[GraphNode] => currentList ++ nextGraphNodes } } } - val graphNodesValidation = LinkedGraphMaker.getOrdering(linkedGraph) flatMap { ordering: List[WorkflowGraphElement] => - ordering.foldLeft[ErrorOr[List[GraphNode]]](seedNodes.toList.validNel)(graphNodeCreationFold) + val graphNodesValidation = LinkedGraphMaker.getOrdering(linkedGraph) flatMap { + ordering: List[WorkflowGraphElement] => + ordering.foldLeft[ErrorOr[List[GraphNode]]](seedNodes.toList.validNel)(graphNodeCreationFold) } graphNodesValidation flatMap { graphNodes => WomGraph.validateAndConstruct(graphNodes.toSet) } } - private def eliminateInputDependencies(a: WorkflowDefinitionConvertInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): WorkflowDefinitionConvertInputs = { - case class NewInputElementsSet(original: InputDeclarationElement, newInput: InputDeclarationElement, newDeclaration: IntermediateValueDeclarationElement) + private def eliminateInputDependencies( + a: WorkflowDefinitionConvertInputs + )(implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement]): WorkflowDefinitionConvertInputs = { + case class NewInputElementsSet(original: InputDeclarationElement, + newInput: InputDeclarationElement, + newDeclaration: IntermediateValueDeclarationElement + ) - val inputElementsWithUpstreams: Seq[NewInputElementsSet] = a.definitionElement.inputsSection.map(_.inputDeclarations).getOrElse(Seq.empty) collect { - case ide @ InputDeclarationElement(typeElement,name, Some(expression)) if expression.expressionConsumedValueHooks.nonEmpty => - val input = InputDeclarationElement(OptionalTypeElement(typeElement), name, None) + val inputElementsWithUpstreams: Seq[NewInputElementsSet] = + a.definitionElement.inputsSection.map(_.inputDeclarations).getOrElse(Seq.empty) collect { + case ide @ InputDeclarationElement(typeElement, name, Some(expression)) + if expression.expressionConsumedValueHooks.nonEmpty => + val input = InputDeclarationElement(OptionalTypeElement(typeElement), name, None) - val selecterExpression = SelectFirst(ArrayLiteral(Seq(IdentifierLookup(name), expression))) - val intermediate = IntermediateValueDeclarationElement(typeElement, s"__$name", selecterExpression) + val selecterExpression = SelectFirst(ArrayLiteral(Seq(IdentifierLookup(name), expression))) + val intermediate = IntermediateValueDeclarationElement(typeElement, s"__$name", selecterExpression) - NewInputElementsSet(ide, input, intermediate) - } + NewInputElementsSet(ide, input, intermediate) + } if (inputElementsWithUpstreams.nonEmpty) { val newInputsSection: Option[InputsSectionElement] = a.definitionElement.inputsSection.map { inputsSection => - InputsSectionElement(inputsSection.inputDeclarations.filterNot(inputElementsWithUpstreams.map(_.original).contains) ++ inputElementsWithUpstreams.map(_.newInput)) + InputsSectionElement( + inputsSection.inputDeclarations.filterNot( + inputElementsWithUpstreams.map(_.original).contains + ) ++ inputElementsWithUpstreams.map(_.newInput) + ) } - val newGraphNodeSet: Set[WorkflowGraphElement] = a.definitionElement.graphElements ++ inputElementsWithUpstreams.map(_.newDeclaration) + val newGraphNodeSet: Set[WorkflowGraphElement] = + a.definitionElement.graphElements ++ inputElementsWithUpstreams.map(_.newDeclaration) val newWorkflowDefinitionElement: WorkflowDefinitionElement = { - val withNewInputElements = a.definitionElement.copy(inputsSection = newInputsSection, graphElements = newGraphNodeSet) + val withNewInputElements = + a.definitionElement.copy(inputsSection = newInputsSection, graphElements = newGraphNodeSet) val identifierRenames = inputElementsWithUpstreams.map(ie => ie.original.name -> ie.newDeclaration.name).toMap withNewInputElements.renameIdentifiers(identifierRenames) } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/WdlomWomExpression.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/WdlomWomExpression.scala index 13eff3236ae..f94c15aa7b7 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/WdlomWomExpression.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/WdlomWomExpression.scala @@ -14,18 +14,23 @@ import wom.expression.{FileEvaluation, IoFunctionSet, WomExpression} import wom.types._ import wom.values.WomValue -final case class WdlomWomExpression private (expressionElement: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]) extends WomExpression { +final case class WdlomWomExpression private (expressionElement: ExpressionElement, + linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle] +)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] +) extends WomExpression { override def sourceString: String = expressionElement.toWdlV1 - override def inputs: Set[String] = { + override def inputs: Set[String] = expressionElement.expressionConsumedValueHooks map { hook => linkedValues(hook).linkableName } - } - def evaluateValueForPlaceholder(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet, forCommandInstantiationOptions: ForCommandInstantiationOptions): ErrorOr[EvaluatedValue[_]] = + def evaluateValueForPlaceholder(inputValues: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + forCommandInstantiationOptions: ForCommandInstantiationOptions + ): ErrorOr[EvaluatedValue[_]] = expressionElement.evaluateValue(inputValues, ioFunctionSet, Option(forCommandInstantiationOptions)) override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = @@ -35,20 +40,28 @@ final case class WdlomWomExpression private (expressionElement: ExpressionElemen // NB types can be determined using the linked values, so we don't need the inputMap: override def evaluateType(inputMap: Map[String, WomType]): ErrorOr[WomType] = evaluatedType - override def evaluateFiles(inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = { - expressionElement.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) map { _ map { - FileEvaluation(_, optional = areAllFileTypesInWomTypeOptional(coerceTo), secondary = false) - }} - } + override def evaluateFiles(inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = + expressionElement.evaluateFilesNeededToEvaluate(inputs, ioFunctionSet, coerceTo) map { + _ map { + FileEvaluation(_, optional = areAllFileTypesInWomTypeOptional(coerceTo), secondary = false) + } + } } object WdlomWomExpression { - def make(expressionElement: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle]) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[WdlomWomExpression] = { + def make(expressionElement: ExpressionElement, linkedValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle])( + implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[WdlomWomExpression] = { val candidate = WdlomWomExpression(expressionElement, linkedValues) - candidate.evaluatedType.contextualizeErrors(s"process expression '${candidate.sourceString}'") map { _ => candidate } + candidate.evaluatedType.contextualizeErrors(s"process expression '${candidate.sourceString}'") map { _ => + candidate + } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/BinaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/BinaryOperatorEvaluators.scala index c862e2abc0e..15fcdac8115 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/BinaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/BinaryOperatorEvaluators.scala @@ -12,20 +12,23 @@ object BinaryOperatorEvaluators { implicit val lessThanEvaluator: IdentifierLookupRenamer[LessThan] = forOperation(LessThan) implicit val lessThanOrEqualEvaluator: IdentifierLookupRenamer[LessThanOrEquals] = forOperation(LessThanOrEquals) implicit val greaterThanEvaluator: IdentifierLookupRenamer[GreaterThan] = forOperation(GreaterThan) - implicit val greaterThanOrEqualEvaluator: IdentifierLookupRenamer[GreaterThanOrEquals] = forOperation(GreaterThanOrEquals) + implicit val greaterThanOrEqualEvaluator: IdentifierLookupRenamer[GreaterThanOrEquals] = forOperation( + GreaterThanOrEquals + ) implicit val addEvaluator: IdentifierLookupRenamer[Add] = forOperation(Add) implicit val subtractEvaluator: IdentifierLookupRenamer[Subtract] = forOperation(Subtract) implicit val multiplyEvaluator: IdentifierLookupRenamer[Multiply] = forOperation(Multiply) implicit val divideEvaluator: IdentifierLookupRenamer[Divide] = forOperation(Divide) implicit val remainderEvaluator: IdentifierLookupRenamer[Remainder] = forOperation(Remainder) - private def forOperation[A <: BinaryOperation](constructor: (ExpressionElement, ExpressionElement) => A) = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = - constructor.apply( - expressionElementRenamer.renameIdentifiers(a.left, renamingMap)(expressionElementRenamer), - expressionElementRenamer.renameIdentifiers(a.right, renamingMap)(expressionElementRenamer) - ) - } + private def forOperation[A <: BinaryOperation](constructor: (ExpressionElement, ExpressionElement) => A) = + new IdentifierLookupRenamer[A] { + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = + constructor.apply( + expressionElementRenamer.renameIdentifiers(a.left, renamingMap)(expressionElementRenamer), + expressionElementRenamer.renameIdentifiers(a.right, renamingMap)(expressionElementRenamer) + ) + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/EngineFunctionEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/EngineFunctionEvaluators.scala index 186843b134b..6efcf60cc4f 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/EngineFunctionEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/EngineFunctionEvaluators.scala @@ -3,7 +3,6 @@ package wdl.transforms.base.wdlom2wom.expression.renaming import wdl.model.draft3.elements.ExpressionElement._ import wdl.model.draft3.elements.ExpressionElement - object EngineFunctionEvaluators { implicit val stdoutRenamer: IdentifierLookupRenamer[StdoutElement.type] = forZeroParamFunction[StdoutElement.type] @@ -46,51 +45,58 @@ object EngineFunctionEvaluators { implicit val subRenamer: IdentifierLookupRenamer[Sub] = forThreeParamFunction(Sub) - private def forZeroParamFunction[A <: ExpressionElement]: IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = a - } + private def forZeroParamFunction[A <: ExpressionElement]: IdentifierLookupRenamer[A] = + new IdentifierLookupRenamer[A] { + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = a + } - private def forOneParamFunction[A <: OneParamFunctionCallElement](constructor: ExpressionElement => A): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = { + private def forOneParamFunction[A <: OneParamFunctionCallElement]( + constructor: ExpressionElement => A + ): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = constructor.apply( expressionElementRenamer.renameIdentifiers(a.param, renamingMap)(expressionElementRenamer) ) - } } - private def forOneOrTwoParamFunction[A <: OneOrTwoParamFunctionCallElement](constructor: (ExpressionElement, Option[ExpressionElement]) => A): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = { + private def forOneOrTwoParamFunction[A <: OneOrTwoParamFunctionCallElement]( + constructor: (ExpressionElement, Option[ExpressionElement]) => A + ): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = constructor.apply( expressionElementRenamer.renameIdentifiers(a.firstParam, renamingMap)(expressionElementRenamer), a.secondParam.map(expressionElementRenamer.renameIdentifiers(_, renamingMap)(expressionElementRenamer)) ) - } } - private def forTwoParamFunction[A <: TwoParamFunctionCallElement](constructor: (ExpressionElement, ExpressionElement) => A): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = { + private def forTwoParamFunction[A <: TwoParamFunctionCallElement]( + constructor: (ExpressionElement, ExpressionElement) => A + ): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = constructor.apply( expressionElementRenamer.renameIdentifiers(a.arg1, renamingMap)(expressionElementRenamer), expressionElementRenamer.renameIdentifiers(a.arg2, renamingMap)(expressionElementRenamer) ) - } } - private def forThreeParamFunction[A <: ThreeParamFunctionCallElement](constructor: (ExpressionElement, ExpressionElement, ExpressionElement) => A): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = { + private def forThreeParamFunction[A <: ThreeParamFunctionCallElement]( + constructor: (ExpressionElement, ExpressionElement, ExpressionElement) => A + ): IdentifierLookupRenamer[A] = new IdentifierLookupRenamer[A] { + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = constructor.apply( expressionElementRenamer.renameIdentifiers(a.arg1, renamingMap)(expressionElementRenamer), expressionElementRenamer.renameIdentifiers(a.arg2, renamingMap)(expressionElementRenamer), expressionElementRenamer.renameIdentifiers(a.arg3, renamingMap)(expressionElementRenamer) ) - } } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/IdentifierLookupRenamer.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/IdentifierLookupRenamer.scala index 0fd301f92f0..c75da3ab685 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/IdentifierLookupRenamer.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/IdentifierLookupRenamer.scala @@ -5,5 +5,7 @@ import wdl.model.draft3.elements.ExpressionElement @typeclass trait IdentifierLookupRenamer[A <: ExpressionElement] { - def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A + def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LiteralEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LiteralEvaluators.scala index b3158f92f3d..8611caba92e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LiteralEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LiteralEvaluators.scala @@ -3,56 +3,72 @@ package wdl.transforms.base.wdlom2wom.expression.renaming import wdl.model.draft3.elements.ExpressionElement import wdl.model.draft3.elements.ExpressionElement._ - object LiteralEvaluators { - implicit val primitiveIdentifierLookupRenamer: IdentifierLookupRenamer[PrimitiveLiteralExpressionElement] = new IdentifierLookupRenamer[PrimitiveLiteralExpressionElement] { - override def renameIdentifiers(a: PrimitiveLiteralExpressionElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): PrimitiveLiteralExpressionElement = a - } + implicit val primitiveIdentifierLookupRenamer: IdentifierLookupRenamer[PrimitiveLiteralExpressionElement] = + new IdentifierLookupRenamer[PrimitiveLiteralExpressionElement] { + override def renameIdentifiers(a: PrimitiveLiteralExpressionElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): PrimitiveLiteralExpressionElement = a + } - implicit val stringLiteralEvaluator: IdentifierLookupRenamer[StringLiteral] = new IdentifierLookupRenamer[StringLiteral] { - override def renameIdentifiers(a: StringLiteral, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): StringLiteral = a - } + implicit val stringLiteralEvaluator: IdentifierLookupRenamer[StringLiteral] = + new IdentifierLookupRenamer[StringLiteral] { + override def renameIdentifiers(a: StringLiteral, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): StringLiteral = a + } - implicit val stringExpressionEvaluator: IdentifierLookupRenamer[StringExpression] = new IdentifierLookupRenamer[StringExpression] { - override def renameIdentifiers(a: StringExpression, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): StringExpression = StringExpression( - a.pieces map { - case sp: StringPlaceholder => StringPlaceholder(expressionElementRenamer.renameIdentifiers(sp.expr, renamingMap)(expressionElementRenamer)) - case other => other - } - ) - } + implicit val stringExpressionEvaluator: IdentifierLookupRenamer[StringExpression] = + new IdentifierLookupRenamer[StringExpression] { + override def renameIdentifiers(a: StringExpression, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): StringExpression = StringExpression( + a.pieces map { + case sp: StringPlaceholder => + StringPlaceholder( + expressionElementRenamer.renameIdentifiers(sp.expr, renamingMap)(expressionElementRenamer) + ) + case other => other + } + ) + } - implicit val objectLiteralEvaluator: IdentifierLookupRenamer[ObjectLiteral] = new IdentifierLookupRenamer[ObjectLiteral] { - override def renameIdentifiers(a: ObjectLiteral, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): ObjectLiteral = ObjectLiteral( - a.elements map { - case (key, value) => key -> expressionElementRenamer.renameIdentifiers(value, renamingMap)(expressionElementRenamer) - } - ) - } + implicit val objectLiteralEvaluator: IdentifierLookupRenamer[ObjectLiteral] = + new IdentifierLookupRenamer[ObjectLiteral] { + override def renameIdentifiers(a: ObjectLiteral, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): ObjectLiteral = ObjectLiteral( + a.elements map { case (key, value) => + key -> expressionElementRenamer.renameIdentifiers(value, renamingMap)(expressionElementRenamer) + } + ) + } implicit val mapLiteralEvaluator: IdentifierLookupRenamer[MapLiteral] = new IdentifierLookupRenamer[MapLiteral] { - override def renameIdentifiers(a: MapLiteral, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): MapLiteral = MapLiteral( - a.elements map { - case (key, value) => expressionElementRenamer.renameIdentifiers(key, renamingMap)(expressionElementRenamer) -> expressionElementRenamer.renameIdentifiers(value, renamingMap)(expressionElementRenamer) + override def renameIdentifiers(a: MapLiteral, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): MapLiteral = MapLiteral( + a.elements map { case (key, value) => + expressionElementRenamer.renameIdentifiers(key, renamingMap)( + expressionElementRenamer + ) -> expressionElementRenamer.renameIdentifiers(value, renamingMap)(expressionElementRenamer) } ) } - implicit val arrayLiteralEvaluator: IdentifierLookupRenamer[ArrayLiteral] = new IdentifierLookupRenamer[ArrayLiteral] { - override def renameIdentifiers(a: ArrayLiteral, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): ArrayLiteral = ArrayLiteral( - a.elements map { e => expressionElementRenamer.renameIdentifiers(e, renamingMap)(expressionElementRenamer) } - ) - } + implicit val arrayLiteralEvaluator: IdentifierLookupRenamer[ArrayLiteral] = + new IdentifierLookupRenamer[ArrayLiteral] { + override def renameIdentifiers(a: ArrayLiteral, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): ArrayLiteral = ArrayLiteral( + a.elements map { e => expressionElementRenamer.renameIdentifiers(e, renamingMap)(expressionElementRenamer) } + ) + } implicit val pairLiteralEvaluator: IdentifierLookupRenamer[PairLiteral] = new IdentifierLookupRenamer[PairLiteral] { - override def renameIdentifiers(a: PairLiteral, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): PairLiteral = PairLiteral( + override def renameIdentifiers(a: PairLiteral, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): PairLiteral = PairLiteral( expressionElementRenamer.renameIdentifiers(a.left, renamingMap)(expressionElementRenamer), expressionElementRenamer.renameIdentifiers(a.right, renamingMap)(expressionElementRenamer) ) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LookupEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LookupEvaluators.scala index f1b2151a8c7..924bde53f80 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LookupEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/LookupEvaluators.scala @@ -3,38 +3,48 @@ package wdl.transforms.base.wdlom2wom.expression.renaming import wdl.model.draft3.elements.ExpressionElement import wdl.model.draft3.elements.ExpressionElement._ - - object LookupEvaluators { - implicit val identifierLookupEvaluator: IdentifierLookupRenamer[IdentifierLookup] = new IdentifierLookupRenamer[IdentifierLookup] { - override def renameIdentifiers(a: IdentifierLookup, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): IdentifierLookup = IdentifierLookup( - if (renamingMap.contains(a.identifier)) - renamingMap(a.identifier) - else - a.identifier - ) - } - - implicit val expressionMemberAccessEvaluator: IdentifierLookupRenamer[ExpressionMemberAccess] = new IdentifierLookupRenamer[ExpressionMemberAccess] { - override def renameIdentifiers(a: ExpressionMemberAccess, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): ExpressionMemberAccess = ExpressionMemberAccess( - expressionElementRenamer.renameIdentifiers(a.expression, renamingMap)(expressionElementRenamer), - a.memberAccessTail - ) - } - - implicit val identifierMemberAccessEvaluator: IdentifierLookupRenamer[IdentifierMemberAccess] = new IdentifierLookupRenamer[IdentifierMemberAccess] { - override def renameIdentifiers(a: IdentifierMemberAccess, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): IdentifierMemberAccess = IdentifierMemberAccess( - if (renamingMap.contains(a.first)) renamingMap(a.first) else a.first, - a.second, - a.memberAccessTail - ) - } - - implicit val indexAccessIdentifierLookupRenamer: IdentifierLookupRenamer[IndexAccess] = new IdentifierLookupRenamer[IndexAccess] { - override def renameIdentifiers(a: IndexAccess, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): IndexAccess = IndexAccess( - expressionElementRenamer.renameIdentifiers(a.expressionElement, renamingMap)(expressionElementRenamer), - expressionElementRenamer.renameIdentifiers(a.index, renamingMap)(expressionElementRenamer) - ) - } + implicit val identifierLookupEvaluator: IdentifierLookupRenamer[IdentifierLookup] = + new IdentifierLookupRenamer[IdentifierLookup] { + override def renameIdentifiers(a: IdentifierLookup, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): IdentifierLookup = IdentifierLookup( + if (renamingMap.contains(a.identifier)) + renamingMap(a.identifier) + else + a.identifier + ) + } + + implicit val expressionMemberAccessEvaluator: IdentifierLookupRenamer[ExpressionMemberAccess] = + new IdentifierLookupRenamer[ExpressionMemberAccess] { + override def renameIdentifiers(a: ExpressionMemberAccess, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): ExpressionMemberAccess = ExpressionMemberAccess( + expressionElementRenamer.renameIdentifiers(a.expression, renamingMap)(expressionElementRenamer), + a.memberAccessTail + ) + } + + implicit val identifierMemberAccessEvaluator: IdentifierLookupRenamer[IdentifierMemberAccess] = + new IdentifierLookupRenamer[IdentifierMemberAccess] { + override def renameIdentifiers(a: IdentifierMemberAccess, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): IdentifierMemberAccess = IdentifierMemberAccess( + if (renamingMap.contains(a.first)) renamingMap(a.first) else a.first, + a.second, + a.memberAccessTail + ) + } + + implicit val indexAccessIdentifierLookupRenamer: IdentifierLookupRenamer[IndexAccess] = + new IdentifierLookupRenamer[IndexAccess] { + override def renameIdentifiers(a: IndexAccess, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): IndexAccess = IndexAccess( + expressionElementRenamer.renameIdentifiers(a.expressionElement, renamingMap)(expressionElementRenamer), + expressionElementRenamer.renameIdentifiers(a.index, renamingMap)(expressionElementRenamer) + ) + } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/TernaryIfEvaluator.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/TernaryIfEvaluator.scala index a22971854a0..df3b77c3ace 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/TernaryIfEvaluator.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/TernaryIfEvaluator.scala @@ -5,7 +5,9 @@ import wdl.model.draft3.elements.ExpressionElement._ object TernaryIfEvaluator { implicit val ternaryIfEvaluator: IdentifierLookupRenamer[TernaryIf] = new IdentifierLookupRenamer[TernaryIf] { - override def renameIdentifiers(a: TernaryIf, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): TernaryIf = TernaryIf( + override def renameIdentifiers(a: TernaryIf, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): TernaryIf = TernaryIf( expressionElementRenamer.renameIdentifiers(a.condition, renamingMap)(expressionElementRenamer), expressionElementRenamer.renameIdentifiers(a.ifTrue, renamingMap)(expressionElementRenamer), expressionElementRenamer.renameIdentifiers(a.ifFalse, renamingMap)(expressionElementRenamer) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/UnaryOperatorEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/UnaryOperatorEvaluators.scala index 9864d239751..71673dd67e2 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/UnaryOperatorEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/UnaryOperatorEvaluators.scala @@ -10,7 +10,9 @@ object UnaryOperatorEvaluators { implicit val logicalNotEvaluator: IdentifierLookupRenamer[LogicalNot] = forOperation(LogicalNot) private def forOperation[A <: UnaryOperation](constructor: ExpressionElement => A) = new IdentifierLookupRenamer[A] { - override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): A = + override def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): A = constructor.apply(expressionElementRenamer.renameIdentifiers(a.argument, renamingMap)(expressionElementRenamer)) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/package.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/package.scala index 2bc9b6b7488..a4fce5d1292 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/package.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/expression/renaming/package.scala @@ -13,90 +13,89 @@ import wdl.transforms.base.wdlom2wom.expression.renaming.UnaryOperatorEvaluators package object renaming { - implicit val expressionEvaluator: IdentifierLookupRenamer[ExpressionElement] = new IdentifierLookupRenamer[ExpressionElement] { - override def renameIdentifiers(a: ExpressionElement, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement]): ExpressionElement = { + implicit val expressionEvaluator: IdentifierLookupRenamer[ExpressionElement] = + new IdentifierLookupRenamer[ExpressionElement] { + override def renameIdentifiers(a: ExpressionElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement] + ): ExpressionElement = + a match { + // Literals: + case a: PrimitiveLiteralExpressionElement => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: StringLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: StringExpression => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ObjectLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: MapLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ArrayLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: PairLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - a match { - // Literals: - case a: PrimitiveLiteralExpressionElement => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: StringLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: StringExpression => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ObjectLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: MapLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ArrayLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: PairLiteral => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + // Lookups and member accesses: + case a: IdentifierLookup => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ExpressionMemberAccess => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: IdentifierMemberAccess => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: IndexAccess => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - // Lookups and member accesses: - case a: IdentifierLookup => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ExpressionMemberAccess => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: IdentifierMemberAccess => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: IndexAccess => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + // Unary operators: + case a: UnaryNegation => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: UnaryPlus => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: LogicalNot => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - // Unary operators: - case a: UnaryNegation => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: UnaryPlus => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: LogicalNot => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + // Binary operators (at some point we might want to split these into separate cases): + case a: LogicalOr => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: LogicalAnd => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Equals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: NotEquals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: LessThan => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: LessThanOrEquals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: GreaterThan => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: GreaterThanOrEquals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Add => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Subtract => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Multiply => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Divide => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Remainder => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - // Binary operators (at some point we might want to split these into separate cases): - case a: LogicalOr => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: LogicalAnd => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Equals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: NotEquals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: LessThan => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: LessThanOrEquals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: GreaterThan => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: GreaterThanOrEquals => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Add => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Subtract => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Multiply => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Divide => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Remainder => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: TernaryIf => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: TernaryIf => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + // Engine functions: + case a: StdoutElement.type => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: StderrElement.type => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - // Engine functions: - case a: StdoutElement.type => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: StderrElement.type => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadLines => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadTsv => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadMap => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadObject => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadObjects => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadJson => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadInt => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadString => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadFloat => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: ReadBoolean => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: WriteLines => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: WriteTsv => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: WriteMap => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: WriteObject => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: WriteObjects => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: WriteJson => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Range => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Transpose => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Length => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Flatten => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: SelectFirst => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: SelectAll => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Defined => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Floor => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Ceil => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Round => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Glob => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadLines => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadTsv => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadMap => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadObject => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadObjects => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadJson => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadInt => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadString => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadFloat => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: ReadBoolean => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: WriteLines => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: WriteTsv => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: WriteMap => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: WriteObject => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: WriteObjects => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: WriteJson => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Range => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Transpose => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Length => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Flatten => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: SelectFirst => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: SelectAll => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Defined => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Floor => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Ceil => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Round => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Glob => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Size => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Basename => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Size => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Basename => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Zip => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Cross => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + case a: Prefix => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Zip => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Cross => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - case a: Prefix => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - - case a: Sub => a.renameIdentifiers(renamingMap)(expressionElementRenamer) - } + case a: Sub => a.renameIdentifiers(renamingMap)(expressionElementRenamer) + } } - } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala index 322fc5ceba6..6aa36184aef 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala @@ -16,7 +16,12 @@ import wom.callable.Callable._ import wom.callable.{Callable, CallableTaskDefinition, TaskDefinition, WorkflowDefinition} import wom.graph.CallNode.{CallNodeAndNewNodes, InputDefinitionFold, InputDefinitionPointer} import wom.graph.GraphNodePort.OutputPort -import wom.graph.expression.{AnonymousExpressionNode, ExpressionNode, PlainAnonymousExpressionNode, TaskCallInputExpressionNode} +import wom.graph.expression.{ + AnonymousExpressionNode, + ExpressionNode, + PlainAnonymousExpressionNode, + TaskCallInputExpressionNode +} import wom.graph._ import wom.types.{WomOptionalType, WomType} import wdl.transforms.base.wdlom2wdl.WdlWriter.ops._ @@ -24,11 +29,12 @@ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl.expressionElementWriter import wdl.transforms.base.wdlom2wdl.WdlWriterImpl.CallElementWriter object CallElementToGraphNode { - def convert(a: CallNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = { + def convert(a: CallNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = { val callNodeBuilder = new CallNode.CallNodeBuilder() val callName = a.node.alias.getOrElse(a.node.callableReference.split("\\.").last) @@ -41,35 +47,41 @@ object CallElementToGraphNode { val unsuppliedInputs = w.inputs.collect { case r: RequiredInputDefinition if r.localName.value.contains(".") => r.localName.value } - val unsuppliedInputsValidation: ErrorOr[Unit] = if (unsuppliedInputs.isEmpty) { ().validNel } else { s"To be called as a sub-workflow it must declare and pass-through the following values via workflow inputs: ${unsuppliedInputs.mkString(", ")}".invalidNel } + val unsuppliedInputsValidation: ErrorOr[Unit] = if (unsuppliedInputs.isEmpty) { ().validNel } + else { + s"To be called as a sub-workflow it must declare and pass-through the following values via workflow inputs: ${unsuppliedInputs + .mkString(", ")}".invalidNel + } val unspecifiedOutputs = w.graph.outputNodes.map(_.localName).filter(_.contains(".")) - val unspecifiedOutputsValidation: ErrorOr[Unit] = if (unspecifiedOutputs.isEmpty) { ().validNel } else { s"To be called as a sub-workflow it must specify all outputs using an output section. This workflow may wish to declare outputs for: ${unspecifiedOutputs.mkString(", ")}".invalidNel } + val unspecifiedOutputsValidation: ErrorOr[Unit] = if (unspecifiedOutputs.isEmpty) { ().validNel } + else { + s"To be called as a sub-workflow it must specify all outputs using an output section. This workflow may wish to declare outputs for: ${unspecifiedOutputs + .mkString(", ")}".invalidNel + } - (unsuppliedInputsValidation, unspecifiedOutputsValidation) mapN { (_,_) => w } + (unsuppliedInputsValidation, unspecifiedOutputsValidation) mapN { (_, _) => w } case Some(c: Callable) => c.validNel case None => s"Cannot resolve a callable with name ${a.node.callableReference}".invalidNel } - def supplyableInput(definition: Callable.InputDefinition): Boolean = { - !definition.isInstanceOf[FixedInputDefinitionWithDefault] && - (!definition.name.contains(".") || a.allowNestedInputs) - } + def supplyableInput(definition: Callable.InputDefinition): Boolean = + !definition.isInstanceOf[FixedInputDefinitionWithDefault] && + (!definition.name.contains(".") || a.allowNestedInputs) - def validInput(name: String, definition: Callable.InputDefinition): Boolean = { + def validInput(name: String, definition: Callable.InputDefinition): Boolean = definition.name == name && supplyableInput(definition) - } /* - * Each input definition KV pair becomes an entry in map. - * - * i.e. - * call foo { - * input: key = value - * - * @return ErrorOr of LocalName(key) mapped to ExpressionNode(value). - */ + * Each input definition KV pair becomes an entry in map. + * + * i.e. + * call foo { + * input: key = value + * + * @return ErrorOr of LocalName(key) mapped to ExpressionNode(value). + */ def expressionNodeMappings(callable: Callable): ErrorOr[Map[LocalName, AnonymousExpressionNode]] = { def hasDeclaration(callable: Callable, name: String): Boolean = callable match { @@ -95,8 +107,14 @@ object CallElementToGraphNode { case _ => i.womType } - (WorkflowGraphElementToGraphNode.validateAssignmentType(wdlomWomExpression, requiredInputType) flatMap { _ => - AnonymousExpressionNode.fromInputMapping[AnonymousExpressionNode](identifier, wdlomWomExpression, a.linkablePorts, constructor) map { + (WorkflowGraphElementToGraphNode.validateAssignmentType(wdlomWomExpression, + requiredInputType + ) flatMap { _ => + AnonymousExpressionNode.fromInputMapping[AnonymousExpressionNode](identifier, + wdlomWomExpression, + a.linkablePorts, + constructor + ) map { LocalName(name) -> _ } }).contextualizeErrors(s"supply input $name = ${expression.toWdlV1}") @@ -116,26 +134,29 @@ object CallElementToGraphNode { } /* - * Fold over the input definitions and - * 1) assign each input definition its InputDefinitionPointer - * 2) if necessary, create a graph input node and assign its output port to the input definition - * - * @return InputDefinitionFold accumulates the input definition mappings, the create graph input nodes, and the expression nodes. + * Fold over the input definitions and + * 1) assign each input definition its InputDefinitionPointer + * 2) if necessary, create a graph input node and assign its output port to the input definition + * + * @return InputDefinitionFold accumulates the input definition mappings, the create graph input nodes, and the expression nodes. */ - def foldInputDefinitions(expressionNodes: Map[LocalName, ExpressionNode], callable: Callable): InputDefinitionFold = { + def foldInputDefinitions(expressionNodes: Map[LocalName, ExpressionNode], + callable: Callable + ): InputDefinitionFold = { // Updates the fold with a new graph input node. Happens when an optional or required undefined input without an // expression node mapping is found - def withGraphInputNode(inputDefinition: InputDefinition, graphInputNode: ExternalGraphInputNode) = { + def withGraphInputNode(inputDefinition: InputDefinition, graphInputNode: ExternalGraphInputNode) = InputDefinitionFold( - mappings = List(inputDefinition -> Coproduct[InputDefinitionPointer](graphInputNode.singleOutputPort: OutputPort)), + mappings = + List(inputDefinition -> Coproduct[InputDefinitionPointer](graphInputNode.singleOutputPort: OutputPort)), callInputPorts = Set(callNodeBuilder.makeInputPort(inputDefinition, graphInputNode.singleOutputPort)), newGraphInputNodes = Set(graphInputNode) ) - } callable.inputs foldMap { // If there is an input mapping for this input definition, use that - case inputDefinition if expressionNodes.contains(inputDefinition.localName) && supplyableInput(inputDefinition) => + case inputDefinition + if expressionNodes.contains(inputDefinition.localName) && supplyableInput(inputDefinition) => val expressionNode = expressionNodes(inputDefinition.localName) InputDefinitionFold( mappings = List(inputDefinition -> expressionNode.inputDefinitionPointer), @@ -144,26 +165,30 @@ object CallElementToGraphNode { ) // No input mapping, add an optional input using the default expression - case withDefault@OverridableInputDefinitionWithDefault(n, womType, expression, _, _) => + case withDefault @ OverridableInputDefinitionWithDefault(n, womType, expression, _, _) => val identifier = WomIdentifier( localName = s"$callName.${n.value}", fullyQualifiedName = s"${a.workflowName}.$callName.${n.value}" ) if (supplyableInput(withDefault)) { - withGraphInputNode(withDefault, OptionalGraphInputNodeWithDefault(identifier, womType, expression, identifier.fullyQualifiedName.value)) + withGraphInputNode( + withDefault, + OptionalGraphInputNodeWithDefault(identifier, womType, expression, identifier.fullyQualifiedName.value) + ) } else { // We can't supply this from outside so hard code in the default: InputDefinitionFold(mappings = List(withDefault -> Coproduct[InputDefinitionPointer](expression))) } // Not an input, use the default expression: - case fixedExpression @ FixedInputDefinitionWithDefault(_,_,expression,_, _) => InputDefinitionFold( - mappings = List(fixedExpression -> Coproduct[InputDefinitionPointer](expression)) - ) + case fixedExpression @ FixedInputDefinitionWithDefault(_, _, expression, _, _) => + InputDefinitionFold( + mappings = List(fixedExpression -> Coproduct[InputDefinitionPointer](expression)) + ) // No input mapping, required and we don't have a default value, create a new RequiredGraphInputNode // so that it can be satisfied via workflow inputs - case required@RequiredInputDefinition(n, womType, _, _) if supplyableInput(required) => + case required @ RequiredInputDefinition(n, womType, _, _) if supplyableInput(required) => val identifier = WomIdentifier( localName = s"$callName.${n.value}", fullyQualifiedName = s"${a.workflowName}.$callName.${n.value}" @@ -173,13 +198,15 @@ object CallElementToGraphNode { // No input mapping, no default value but optional, create a OptionalGraphInputNode // so that it can be satisfied via workflow inputs - case optional@OptionalInputDefinition(n, womType, _, _) => + case optional @ OptionalInputDefinition(n, womType, _, _) => val identifier = WomIdentifier( localName = s"$callName.${n.value}", fullyQualifiedName = s"${a.workflowName}.$callName.${n.value}" ) if (supplyableInput(optional)) { - withGraphInputNode(optional, OptionalGraphInputNode(identifier, womType, identifier.fullyQualifiedName.value)) + withGraphInputNode(optional, + OptionalGraphInputNode(identifier, womType, identifier.fullyQualifiedName.value) + ) } else { // Leave it unsupplied: InputDefinitionFold() @@ -188,7 +215,9 @@ object CallElementToGraphNode { } } - def updateTaskCallNodeInputs(callNodeAndNewNodes: CallNodeAndNewNodes, mappings: Map[LocalName, AnonymousExpressionNode]): Unit = { + def updateTaskCallNodeInputs(callNodeAndNewNodes: CallNodeAndNewNodes, + mappings: Map[LocalName, AnonymousExpressionNode] + ): Unit = { for { taskCallNode <- List(callNodeAndNewNodes.node) collect { case c: CommandCallNode => c } taskCallInputExpression <- mappings.values.toList collect { case t: TaskCallInputExpressionNode => t } @@ -197,20 +226,27 @@ object CallElementToGraphNode { () } - def findUpstreamCall(callName: String): ErrorOr[GraphNode] = { - a.upstreamCalls.get(callName).toErrorOr(s"No such upstream call '$callName' found in available set: [${a.upstreamCalls.keySet.mkString(", ")}]") - } + def findUpstreamCall(callName: String): ErrorOr[GraphNode] = + a.upstreamCalls + .get(callName) + .toErrorOr( + s"No such upstream call '$callName' found in available set: [${a.upstreamCalls.keySet.mkString(", ")}]" + ) - def findUpstreamCalls(callNames: List[String]): ErrorOr[Set[GraphNode]] = { + def findUpstreamCalls(callNames: List[String]): ErrorOr[Set[GraphNode]] = callNames.traverse(findUpstreamCall _).map(_.toSet) - } val result = for { callable <- callableValidation mappings <- expressionNodeMappings(callable) identifier = WomIdentifier(localName = callName, fullyQualifiedName = a.workflowName + "." + callName) upstream <- findUpstreamCalls(a.node.afters.toList) - result = callNodeBuilder.build(identifier, callable, foldInputDefinitions(mappings, callable), upstream, a.node.sourceLocation) + result = callNodeBuilder.build(identifier, + callable, + foldInputDefinitions(mappings, callable), + upstream, + a.node.sourceLocation + ) _ = updateTaskCallNodeInputs(result, mappings) } yield result.nodes @@ -226,4 +262,5 @@ case class CallNodeMakerInputs(node: CallElement, workflowName: String, insideAnotherScatter: Boolean, allowNestedInputs: Boolean, - callables: Map[String, Callable]) + callables: Map[String, Callable] +) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/IfElementToGraphNode.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/IfElementToGraphNode.scala index 53742b6be80..7c53c5c0cd6 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/IfElementToGraphNode.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/IfElementToGraphNode.scala @@ -23,28 +23,42 @@ import wom.graph.expression.{AnonymousExpressionNode, PlainAnonymousExpressionNo import wom.types.{WomAnyType, WomBooleanType, WomType} object IfElementToGraphNode { - def convert(a: ConditionalNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = { + def convert(a: ConditionalNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = { val conditionExpression = a.node.conditionExpression val graphElements = a.node.graphElements - val conditionWomExpressionV: ErrorOr[WdlomWomExpression] = WdlomWomExpression.make(conditionExpression, a.linkableValues) - val conditionExpressionNodeValidation: ErrorOr[AnonymousExpressionNode] = conditionWomExpressionV flatMap { conditionWomExpression => - AnonymousExpressionNode.fromInputMapping(WomIdentifier("if_condition"), conditionWomExpression, a.linkablePorts, PlainAnonymousExpressionNode.apply) } + val conditionWomExpressionV: ErrorOr[WdlomWomExpression] = + WdlomWomExpression.make(conditionExpression, a.linkableValues) + val conditionExpressionNodeValidation: ErrorOr[AnonymousExpressionNode] = conditionWomExpressionV flatMap { + conditionWomExpression => + AnonymousExpressionNode.fromInputMapping(WomIdentifier("if_condition"), + conditionWomExpression, + a.linkablePorts, + PlainAnonymousExpressionNode.apply + ) + } val conditionVariableTypeValidation: ErrorOr[Unit] = conditionExpression.evaluateType(a.linkableValues) flatMap { case WomBooleanType | WomAnyType => ().validNel case other => s"Invalid type for condition variable: ${other.stableName}".invalidNel } - final case class RequiredOuterPorts(valueGeneratorPorts: Map[String, OutputPort], completionPorts: Map[String, CallNode]) + final case class RequiredOuterPorts(valueGeneratorPorts: Map[String, OutputPort], + completionPorts: Map[String, CallNode] + ) val foundOuterGeneratorsValidation: ErrorOr[RequiredOuterPorts] = { - val required: ErrorOr[Set[UnlinkedConsumedValueHook]] = graphElements.toList.traverse { element => element.graphElementConsumedValueHooks(a.availableTypeAliases, a.callables) }.map(_.toSet.flatten) - val generated: ErrorOr[Set[GeneratedValueHandle]] = graphElements.toList.traverse { element => element.generatedValueHandles(a.availableTypeAliases, a.callables) }.map(_.toSet.flatten) + val required: ErrorOr[Set[UnlinkedConsumedValueHook]] = graphElements.toList + .traverse(element => element.graphElementConsumedValueHooks(a.availableTypeAliases, a.callables)) + .map(_.toSet.flatten) + val generated: ErrorOr[Set[GeneratedValueHandle]] = graphElements.toList + .traverse(element => element.generatedValueHandles(a.availableTypeAliases, a.callables)) + .map(_.toSet.flatten) def makeLink(hook: UnlinkedConsumedValueHook): (String, OutputPort) = { val name = a.linkableValues(hook).linkableName @@ -54,37 +68,49 @@ object IfElementToGraphNode { (required, generated) mapN { (r, g) => val requiredOuterValues = r collect { - case hook@UnlinkedIdentifierHook(id) if !g.exists(_.linkableName == id) => makeLink(hook) - case hook@UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) if !g.exists(_.linkableName == first) && !g.exists(_.linkableName == s"$first.$second") => makeLink(hook) + case hook @ UnlinkedIdentifierHook(id) if !g.exists(_.linkableName == id) => makeLink(hook) + case hook @ UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) + if !g.exists(_.linkableName == first) && !g.exists(_.linkableName == s"$first.$second") => + makeLink(hook) } val requiredCompletionPorts = r collect { case UnlinkedAfterCallHook(upstreamCallName) if !g.exists { - case GeneratedCallFinishedHandle(`upstreamCallName`) => true - case _ => false - } => upstreamCallName -> a.upstreamCalls.values.find(_.localName == upstreamCallName).get + case GeneratedCallFinishedHandle(`upstreamCallName`) => true + case _ => false + } => + upstreamCallName -> a.upstreamCalls.values.find(_.localName == upstreamCallName).get } RequiredOuterPorts(requiredOuterValues.toMap, requiredCompletionPorts.toMap) } } - (conditionExpressionNodeValidation, conditionVariableTypeValidation, foundOuterGeneratorsValidation) flatMapN { (expressionNode, _, foundOuterGenerators) => - val ogins: Set[GraphNode] = (foundOuterGenerators.valueGeneratorPorts.toList map { case (name: String, port: OutputPort) => - OuterGraphInputNode(WomIdentifier(name), port, preserveScatterIndex = true) - }).toSet + (conditionExpressionNodeValidation, conditionVariableTypeValidation, foundOuterGeneratorsValidation) flatMapN { + (expressionNode, _, foundOuterGenerators) => + val ogins: Set[GraphNode] = + (foundOuterGenerators.valueGeneratorPorts.toList map { case (name: String, port: OutputPort) => + OuterGraphInputNode(WomIdentifier(name), port, preserveScatterIndex = true) + }).toSet - val graphLikeConvertInputs = GraphLikeConvertInputs(graphElements.toSet, ogins, foundOuterGenerators.completionPorts, a.availableTypeAliases, a.workflowName, - insideAScatter = a.insideAnotherScatter, - convertNestedScatterToSubworkflow = a.convertNestedScatterToSubworkflow, - allowNestedInputs = a.allowNestedInputs, - a.callables) - val innerGraph: ErrorOr[Graph] = WorkflowDefinitionElementToWomWorkflowDefinition.convertGraphElements(graphLikeConvertInputs) + val graphLikeConvertInputs = GraphLikeConvertInputs( + graphElements.toSet, + ogins, + foundOuterGenerators.completionPorts, + a.availableTypeAliases, + a.workflowName, + insideAScatter = a.insideAnotherScatter, + convertNestedScatterToSubworkflow = a.convertNestedScatterToSubworkflow, + allowNestedInputs = a.allowNestedInputs, + a.callables + ) + val innerGraph: ErrorOr[Graph] = + WorkflowDefinitionElementToWomWorkflowDefinition.convertGraphElements(graphLikeConvertInputs) - innerGraph map { ig => - val withOutputs = WomGraphMakerTools.addDefaultOutputs(ig) - val generatedAndNew = ConditionalNode.wireInConditional(withOutputs, expressionNode) - generatedAndNew.nodes - } + innerGraph map { ig => + val withOutputs = WomGraphMakerTools.addDefaultOutputs(ig) + val generatedAndNew = ConditionalNode.wireInConditional(withOutputs, expressionNode) + generatedAndNew.nodes + } } } } @@ -96,6 +122,7 @@ final case class ConditionalNodeMakerInputs(node: IfElement, availableTypeAliases: Map[String, WomType], workflowName: String, insideAnotherScatter: Boolean, - convertNestedScatterToSubworkflow : Boolean, + convertNestedScatterToSubworkflow: Boolean, allowNestedInputs: Boolean, - callables: Map[String, Callable]) + callables: Map[String, Callable] +) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/InputDeclarationElementToGraphNode.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/InputDeclarationElementToGraphNode.scala index 605131027ba..d70bc22126f 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/InputDeclarationElementToGraphNode.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/InputDeclarationElementToGraphNode.scala @@ -18,15 +18,17 @@ import wdl.transforms.base.wdlom2wdl.WdlWriter.ops._ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl._ object InputDeclarationElementToGraphNode { - def convert(a: GraphInputNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = a.node match { + def convert(a: GraphInputNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = a.node match { case InputDeclarationElement(typeElement, name, None) => val nameInInputSet = s"${a.workflowName}.$name" typeElement.determineWomType(a.availableTypeAliases) map { - case opt: WomOptionalType => Set(OptionalGraphInputNode(WomIdentifier(name), opt.flatOptionalType, nameInInputSet)) + case opt: WomOptionalType => + Set(OptionalGraphInputNode(WomIdentifier(name), opt.flatOptionalType, nameInInputSet)) case womType => Set(RequiredGraphInputNode(WomIdentifier(name), womType, nameInInputSet)) } case InputDeclarationElement(typeElement, name, Some(expr)) => @@ -45,7 +47,8 @@ object InputDeclarationElementToGraphNode { } final case class GraphInputNodeMakerInputs(node: InputDeclarationElement, - linkableValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], - linkablePorts: Map[String, OutputPort], - availableTypeAliases: Map[String, WomType], - workflowName: String) + linkableValues: Map[UnlinkedConsumedValueHook, GeneratedValueHandle], + linkablePorts: Map[String, OutputPort], + availableTypeAliases: Map[String, WomType], + workflowName: String +) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/ScatterElementToGraphNode.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/ScatterElementToGraphNode.scala index 621461559e5..a1a3d5c4d76 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/ScatterElementToGraphNode.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/ScatterElementToGraphNode.scala @@ -17,7 +17,12 @@ import wdl.model.draft3.elements._ import wdl.model.draft3.graph._ import wdl.model.draft3.graph.expression.{FileEvaluator, TypeEvaluator, ValueEvaluator} import wdl.shared.transforms.wdlom2wom.WomGraphMakerTools -import wom.callable.Callable.{InputDefinition, OverridableInputDefinitionWithDefault, OptionalInputDefinition, RequiredInputDefinition} +import wom.callable.Callable.{ + InputDefinition, + OptionalInputDefinition, + OverridableInputDefinitionWithDefault, + RequiredInputDefinition +} import wom.callable.{Callable, WorkflowDefinition} import wom.graph.CallNode.{CallNodeBuilder, InputDefinitionFold, InputDefinitionPointer} import wom.graph.GraphNode.GraphNodeSetter @@ -27,11 +32,12 @@ import wom.graph.expression.{AnonymousExpressionNode, PlainAnonymousExpressionNo import wom.types.{WomAnyType, WomArrayType, WomType} object ScatterElementToGraphNode { - def convert(a: ScatterNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = + def convert(a: ScatterNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = if (a.convertNestedScatterToSubworkflow) { // Create a sub-workflow from the inner scatter. if (a.insideAnotherScatter) { @@ -44,18 +50,25 @@ object ScatterElementToGraphNode { convertOuterScatter(a) } - def convertOuterScatter(a: ScatterNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = { + def convertOuterScatter(a: ScatterNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = { val scatterExpression = a.node.scatterExpression val scatterVariableName = a.node.scatterVariableName val graphElements = a.node.graphElements - val scatterWomExpressionV: ErrorOr[WdlomWomExpression] = WdlomWomExpression.make(scatterExpression, a.linkableValues) - val scatterExpressionNodeValidation: ErrorOr[AnonymousExpressionNode] = scatterWomExpressionV flatMap { scatterWomExpression => - AnonymousExpressionNode.fromInputMapping(WomIdentifier(scatterVariableName), scatterWomExpression, a.linkablePorts, PlainAnonymousExpressionNode.apply) + val scatterWomExpressionV: ErrorOr[WdlomWomExpression] = + WdlomWomExpression.make(scatterExpression, a.linkableValues) + val scatterExpressionNodeValidation: ErrorOr[AnonymousExpressionNode] = scatterWomExpressionV flatMap { + scatterWomExpression => + AnonymousExpressionNode.fromInputMapping(WomIdentifier(scatterVariableName), + scatterWomExpression, + a.linkablePorts, + PlainAnonymousExpressionNode.apply + ) } val scatterVariableTypeValidation: ErrorOr[WomType] = scatterExpression.evaluateType(a.linkableValues) flatMap { @@ -64,11 +77,17 @@ object ScatterElementToGraphNode { case other => s"Invalid type for scatter variable '$scatterVariableName': ${other.stableName}".invalidNel } - final case class RequiredOuterPorts(valueGeneratorPorts: Map[String, OutputPort], completionPorts: Map[String, CallNode]) + final case class RequiredOuterPorts(valueGeneratorPorts: Map[String, OutputPort], + completionPorts: Map[String, CallNode] + ) val foundOuterGeneratorsValidation: ErrorOr[RequiredOuterPorts] = { - val required: ErrorOr[Set[UnlinkedConsumedValueHook]] = graphElements.toList.traverse { element => element.graphElementConsumedValueHooks(a.availableTypeAliases, a.callables) }.map(_.toSet.flatten) - val generated: ErrorOr[Set[GeneratedValueHandle]] = graphElements.toList.traverse { element => element.generatedValueHandles(a.availableTypeAliases, a.callables) }.map(_.toSet.flatten) + val required: ErrorOr[Set[UnlinkedConsumedValueHook]] = graphElements.toList + .traverse(element => element.graphElementConsumedValueHooks(a.availableTypeAliases, a.callables)) + .map(_.toSet.flatten) + val generated: ErrorOr[Set[GeneratedValueHandle]] = graphElements.toList + .traverse(element => element.generatedValueHandles(a.availableTypeAliases, a.callables)) + .map(_.toSet.flatten) def makeLink(hook: UnlinkedConsumedValueHook): (String, OutputPort) = { val name = a.linkableValues(hook).linkableName @@ -78,63 +97,88 @@ object ScatterElementToGraphNode { (required, generated) mapN { (r, g) => val requiredOuterValues = r collect { - case hook@UnlinkedIdentifierHook(id) if id != scatterVariableName && !g.exists(_.linkableName == id) => + case hook @ UnlinkedIdentifierHook(id) if id != scatterVariableName && !g.exists(_.linkableName == id) => makeLink(hook) - case hook@UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) if first != scatterVariableName && !g.exists(_.linkableName == first) && !g.exists(_.linkableName == s"$first.$second") => + case hook @ UnlinkedCallOutputOrIdentifierAndMemberAccessHook(first, second) + if first != scatterVariableName && !g + .exists(_.linkableName == first) && !g.exists(_.linkableName == s"$first.$second") => makeLink(hook) } val requiredCompletionPorts = r collect { case UnlinkedAfterCallHook(upstreamCallName) if !g.exists { - case GeneratedCallFinishedHandle(`upstreamCallName`) => true - case _ => false - } => upstreamCallName -> a.upstreamCalls.values.find(_.localName == upstreamCallName).get + case GeneratedCallFinishedHandle(`upstreamCallName`) => true + case _ => false + } => + upstreamCallName -> a.upstreamCalls.values.find(_.localName == upstreamCallName).get } RequiredOuterPorts(requiredOuterValues.toMap, requiredCompletionPorts.toMap) } } - (scatterExpressionNodeValidation, scatterVariableTypeValidation, foundOuterGeneratorsValidation) flatMapN { (expressionNode, scatterVariableType, foundOuterGenerators) => - val womInnerGraphScatterVariableInput = ScatterVariableNode(WomIdentifier(scatterVariableName), expressionNode, scatterVariableType) - val ogins: Set[GraphNode] = (foundOuterGenerators.valueGeneratorPorts.toList map { case (name: String, port: OutputPort) => - OuterGraphInputNode(WomIdentifier(name), port, preserveScatterIndex = false) - }).toSet - - val graphLikeConvertInputs = GraphLikeConvertInputs(graphElements.toSet, ogins ++ Set(womInnerGraphScatterVariableInput), foundOuterGenerators.completionPorts, a.availableTypeAliases, a.workflowName, - insideAScatter = true, - convertNestedScatterToSubworkflow = a.convertNestedScatterToSubworkflow, - allowNestedInputs = a.allowNestedInputs, - a.callables) - val innerGraph: ErrorOr[Graph] = WorkflowDefinitionElementToWomWorkflowDefinition.convertGraphElements(graphLikeConvertInputs) - - innerGraph map { ig => - val withOutputs = WomGraphMakerTools.addDefaultOutputs(ig) - val generatedAndNew = ScatterNode.scatterOverGraph(withOutputs, womInnerGraphScatterVariableInput) - generatedAndNew.nodes - } + (scatterExpressionNodeValidation, scatterVariableTypeValidation, foundOuterGeneratorsValidation) flatMapN { + (expressionNode, scatterVariableType, foundOuterGenerators) => + val womInnerGraphScatterVariableInput = + ScatterVariableNode(WomIdentifier(scatterVariableName), expressionNode, scatterVariableType) + val ogins: Set[GraphNode] = + (foundOuterGenerators.valueGeneratorPorts.toList map { case (name: String, port: OutputPort) => + OuterGraphInputNode(WomIdentifier(name), port, preserveScatterIndex = false) + }).toSet + + val graphLikeConvertInputs = GraphLikeConvertInputs( + graphElements.toSet, + ogins ++ Set(womInnerGraphScatterVariableInput), + foundOuterGenerators.completionPorts, + a.availableTypeAliases, + a.workflowName, + insideAScatter = true, + convertNestedScatterToSubworkflow = a.convertNestedScatterToSubworkflow, + allowNestedInputs = a.allowNestedInputs, + a.callables + ) + val innerGraph: ErrorOr[Graph] = + WorkflowDefinitionElementToWomWorkflowDefinition.convertGraphElements(graphLikeConvertInputs) + + innerGraph map { ig => + val withOutputs = WomGraphMakerTools.addDefaultOutputs(ig) + val generatedAndNew = ScatterNode.scatterOverGraph(withOutputs, womInnerGraphScatterVariableInput) + generatedAndNew.nodes + } } } - def convertInnerScatter(a: ScatterNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = { + def convertInnerScatter(a: ScatterNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = { - val requiredOuterValuesValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = a.node.graphElementConsumedValueHooks(a.availableTypeAliases, a.callables) + val requiredOuterValuesValidation: ErrorOr[Set[UnlinkedConsumedValueHook]] = + a.node.graphElementConsumedValueHooks(a.availableTypeAliases, a.callables) - val subWorkflowInputsValidation: ErrorOr[Set[GraphNode]] = requiredOuterValuesValidation map { requiredOuterValues => - val requiredLinkableNames: Set[(String, WomType)] = requiredOuterValues map { hook => (a.linkableValues(hook).linkableName, a.linkableValues(hook).womType) } - requiredLinkableNames map { case (name, womType) => RequiredGraphInputNode(WomIdentifier(name), womType, name) } + val subWorkflowInputsValidation: ErrorOr[Set[GraphNode]] = requiredOuterValuesValidation map { + requiredOuterValues => + val requiredLinkableNames: Set[(String, WomType)] = requiredOuterValues map { hook => + (a.linkableValues(hook).linkableName, a.linkableValues(hook).womType) + } + requiredLinkableNames map { case (name, womType) => RequiredGraphInputNode(WomIdentifier(name), womType, name) } } val subWorkflowGraphValidation: ErrorOr[Graph] = subWorkflowInputsValidation flatMap { subWorkflowInputs => - val graphLikeConvertInputs = GraphLikeConvertInputs(Set(a.node), subWorkflowInputs, Map.empty, a.availableTypeAliases, a.workflowName, - insideAScatter = false, - convertNestedScatterToSubworkflow = a.convertNestedScatterToSubworkflow, - allowNestedInputs = a.allowNestedInputs, - a.callables) - val subWorkflowGraph = WorkflowDefinitionElementToWomWorkflowDefinition.convertGraphElements(graphLikeConvertInputs) + val graphLikeConvertInputs = GraphLikeConvertInputs( + Set(a.node), + subWorkflowInputs, + Map.empty, + a.availableTypeAliases, + a.workflowName, + insideAScatter = false, + convertNestedScatterToSubworkflow = a.convertNestedScatterToSubworkflow, + allowNestedInputs = a.allowNestedInputs, + a.callables + ) + val subWorkflowGraph = + WorkflowDefinitionElementToWomWorkflowDefinition.convertGraphElements(graphLikeConvertInputs) subWorkflowGraph map { WomGraphMakerTools.addDefaultOutputs(_) } } @@ -152,26 +196,46 @@ object ScatterElementToGraphNode { val graphNodeSetter = new GraphNodeSetter[CallNode] val unsatisfiedInputs = subWorkflowDefinition.inputs filter { i => !a.linkablePorts.contains(i.name) } - def inputNodeIdentifier(input: InputDefinition) = WomIdentifier(localName = input.name, fullyQualifiedName = a.workflowName + "." + input.name) + def inputNodeIdentifier(input: InputDefinition) = + WomIdentifier(localName = input.name, fullyQualifiedName = a.workflowName + "." + input.name) val newInputNodes: Map[String, ExternalGraphInputNode] = (unsatisfiedInputs collect { - case i: RequiredInputDefinition => i.name -> RequiredGraphInputNode(inputNodeIdentifier(i), i.womType, a.workflowName + "." + i.name, Callable.InputDefinition.IdentityValueMapper) - case i: OptionalInputDefinition => i.name -> OptionalGraphInputNode(inputNodeIdentifier(i), i.womType, a.workflowName + "." + i.name, Callable.InputDefinition.IdentityValueMapper) - case i: OverridableInputDefinitionWithDefault => i.name -> OptionalGraphInputNodeWithDefault(inputNodeIdentifier(i), i.womType, i.default, a.workflowName + "." + i.name, Callable.InputDefinition.IdentityValueMapper) + case i: RequiredInputDefinition => + i.name -> RequiredGraphInputNode(inputNodeIdentifier(i), + i.womType, + a.workflowName + "." + i.name, + Callable.InputDefinition.IdentityValueMapper + ) + case i: OptionalInputDefinition => + i.name -> OptionalGraphInputNode(inputNodeIdentifier(i), + i.womType, + a.workflowName + "." + i.name, + Callable.InputDefinition.IdentityValueMapper + ) + case i: OverridableInputDefinitionWithDefault => + i.name -> OptionalGraphInputNodeWithDefault(inputNodeIdentifier(i), + i.womType, + i.default, + a.workflowName + "." + i.name, + Callable.InputDefinition.IdentityValueMapper + ) }).toMap - val mappingAndPorts: List[((InputDefinition, InputDefinitionPointer), InputPort)] = subWorkflowDefinition.inputs map { i => - val port: OutputPort = a.linkablePorts.getOrElse(i.name, newInputNodes(i.name).singleOutputPort) - val pointer = Coproduct[InputDefinitionPointer](port) - (i -> pointer, ConnectedInputPort(i.name, i.womType, port, graphNodeSetter.get)) - } + val mappingAndPorts: List[((InputDefinition, InputDefinitionPointer), InputPort)] = + subWorkflowDefinition.inputs map { i => + val port: OutputPort = a.linkablePorts.getOrElse(i.name, newInputNodes(i.name).singleOutputPort) + val pointer = Coproduct[InputDefinitionPointer](port) + (i -> pointer, ConnectedInputPort(i.name, i.womType, port, graphNodeSetter.get)) + } val mapping = mappingAndPorts.map(_._1) val inputPorts = mappingAndPorts.map(_._2).toSet - val result = callNodeBuilder.build(WomIdentifier(a.node.scatterName), - subWorkflowDefinition, - InputDefinitionFold(mappings = mapping, callInputPorts = inputPorts), - Set.empty, - a.node.sourceLocation, - (_, localName) => WomIdentifier(localName)) + val result = callNodeBuilder.build( + WomIdentifier(a.node.scatterName), + subWorkflowDefinition, + InputDefinitionFold(mappings = mapping, callInputPorts = inputPorts), + Set.empty, + a.node.sourceLocation, + (_, localName) => WomIdentifier(localName) + ) graphNodeSetter._graphNode = result.node result.copy(newInputs = result.newInputs ++ newInputNodes.values) } @@ -191,4 +255,5 @@ final case class ScatterNodeMakerInputs(node: ScatterElement, insideAnotherScatter: Boolean, convertNestedScatterToSubworkflow: Boolean, allowNestedInputs: Boolean, - callables: Map[String, Callable]) + callables: Map[String, Callable] +) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/WorkflowGraphElementToGraphNode.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/WorkflowGraphElementToGraphNode.scala index d72fd299dba..56ff7709605 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/WorkflowGraphElementToGraphNode.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/WorkflowGraphElementToGraphNode.scala @@ -20,13 +20,15 @@ import wdl.transforms.base.wdlom2wdl.WdlWriter.ops._ import wdl.transforms.base.wdlom2wdl.WdlWriterImpl._ object WorkflowGraphElementToGraphNode { - def convert(a: GraphNodeMakerInputs) - (implicit expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], - fileEvaluator: FileEvaluator[ExpressionElement], - typeEvaluator: TypeEvaluator[ExpressionElement], - valueEvaluator: ValueEvaluator[ExpressionElement]): ErrorOr[Set[GraphNode]] = a.node match { + def convert(a: GraphNodeMakerInputs)(implicit + expressionValueConsumer: ExpressionValueConsumer[ExpressionElement], + fileEvaluator: FileEvaluator[ExpressionElement], + typeEvaluator: TypeEvaluator[ExpressionElement], + valueEvaluator: ValueEvaluator[ExpressionElement] + ): ErrorOr[Set[GraphNode]] = a.node match { case ie: InputDeclarationElement => - val inputNodeMakerInputs = GraphInputNodeMakerInputs(ie, a.linkableValues, a.linkablePorts, a.availableTypeAliases, a.workflowName) + val inputNodeMakerInputs = + GraphInputNodeMakerInputs(ie, a.linkableValues, a.linkablePorts, a.availableTypeAliases, a.workflowName) InputDeclarationElementToGraphNode.convert(inputNodeMakerInputs) case DeclarationElement(typeElement, name, Some(expr)) => @@ -38,11 +40,19 @@ object WorkflowGraphElementToGraphNode { val graphNode: ErrorOr[Set[GraphNode]] = a.node match { case _: InputDeclarationElement => - Set[GraphNode](OptionalGraphInputNodeWithDefault.apply(WomIdentifier(name), womType, womExpr, name)).validNel + Set[GraphNode]( + OptionalGraphInputNodeWithDefault.apply(WomIdentifier(name), womType, womExpr, name) + ).validNel case _: IntermediateValueDeclarationElement => - ExposedExpressionNode.fromInputMapping(WomIdentifier(name), womExpr, womType, a.linkablePorts) map { Set(_) } + ExposedExpressionNode.fromInputMapping(WomIdentifier(name), womExpr, womType, a.linkablePorts) map { + Set(_) + } case _: OutputDeclarationElement => - ExpressionBasedGraphOutputNode.fromInputMapping(WomIdentifier(name, s"${a.workflowName}.$name"), womExpr, womType, a.linkablePorts) map {Set(_)} + ExpressionBasedGraphOutputNode.fromInputMapping(WomIdentifier(name, s"${a.workflowName}.$name"), + womExpr, + womType, + a.linkablePorts + ) map { Set(_) } } (correctType, graphNode) mapN { (_, gn) => gn } @@ -50,15 +60,46 @@ object WorkflowGraphElementToGraphNode { result.contextualizeErrors(s"process declaration '${typeElement.toWdlV1} $name = ${expr.toWdlV1}'") case se: ScatterElement => - val scatterMakerInputs = ScatterNodeMakerInputs(se, a.upstreamCalls, a.linkableValues, a.linkablePorts, a.availableTypeAliases, a.workflowName, a.insideAScatter, a.convertNestedScatterToSubworkflow, a.allowNestedInputs, a.callables) + val scatterMakerInputs = ScatterNodeMakerInputs( + se, + a.upstreamCalls, + a.linkableValues, + a.linkablePorts, + a.availableTypeAliases, + a.workflowName, + a.insideAScatter, + a.convertNestedScatterToSubworkflow, + a.allowNestedInputs, + a.callables + ) ScatterElementToGraphNode.convert(scatterMakerInputs) case ie: IfElement => - val ifMakerInputs = ConditionalNodeMakerInputs(ie, a.upstreamCalls, a.linkableValues, a.linkablePorts, a.availableTypeAliases, a.workflowName, a.insideAScatter, a.convertNestedScatterToSubworkflow, a.allowNestedInputs, a.callables) + val ifMakerInputs = ConditionalNodeMakerInputs( + ie, + a.upstreamCalls, + a.linkableValues, + a.linkablePorts, + a.availableTypeAliases, + a.workflowName, + a.insideAScatter, + a.convertNestedScatterToSubworkflow, + a.allowNestedInputs, + a.callables + ) IfElementToGraphNode.convert(ifMakerInputs) case ce: CallElement => - val callNodeMakerInputs = CallNodeMakerInputs(ce, a.upstreamCalls, a.linkableValues, a.linkablePorts, a.availableTypeAliases, a.workflowName, a.insideAScatter, a.allowNestedInputs, a.callables) + val callNodeMakerInputs = CallNodeMakerInputs(ce, + a.upstreamCalls, + a.linkableValues, + a.linkablePorts, + a.availableTypeAliases, + a.workflowName, + a.insideAScatter, + a.allowNestedInputs, + a.callables + ) CallElementToGraphNode.convert(callNodeMakerInputs) } @@ -80,6 +121,7 @@ final case class GraphNodeMakerInputs(node: WorkflowGraphElement, availableTypeAliases: Map[String, WomType], workflowName: String, insideAScatter: Boolean, - convertNestedScatterToSubworkflow : Boolean, + convertNestedScatterToSubworkflow: Boolean, allowNestedInputs: Boolean, - callables: Map[String, Callable]) + callables: Map[String, Callable] +) diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/GraphIdentifierLookupRenamer.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/GraphIdentifierLookupRenamer.scala index 7324f320ec6..a1303c8aac4 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/GraphIdentifierLookupRenamer.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/GraphIdentifierLookupRenamer.scala @@ -6,7 +6,8 @@ import wdl.model.draft3.elements.{ExpressionElement, WorkflowGraphElement} @typeclass trait GraphIdentifierLookupRenamer[A] { - def renameIdentifiers(a: A, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): A + def renameIdentifiers(a: A, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): A } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/package.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/package.scala index 731a0996c52..9f2673763c1 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/package.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/renaming/package.scala @@ -7,39 +7,36 @@ import wdl.transforms.base.wdlom2wom.expression.renaming.IdentifierLookupRenamer package object renaming { - implicit val workflowDefinitionIdentifierRenamer: GraphIdentifierLookupRenamer[WorkflowDefinitionElement] = new GraphIdentifierLookupRenamer[WorkflowDefinitionElement] { - override def renameIdentifiers(a: WorkflowDefinitionElement, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): WorkflowDefinitionElement = { - val newInputsSection = a.inputsSection map { inputSection => - inputSection.copy(inputDeclarations = inputSection.inputDeclarations.map { - id => + implicit val workflowDefinitionIdentifierRenamer: GraphIdentifierLookupRenamer[WorkflowDefinitionElement] = + new GraphIdentifierLookupRenamer[WorkflowDefinitionElement] { + override def renameIdentifiers(a: WorkflowDefinitionElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): WorkflowDefinitionElement = { + val newInputsSection = a.inputsSection map { inputSection => + inputSection.copy(inputDeclarations = inputSection.inputDeclarations.map { id => val renamingMapWithoutThisValue = renamingMap.filterNot(_._1 == id.name) id.copy(expression = id.expression.map(_.renameIdentifiers(renamingMapWithoutThisValue))) - }) - } + }) + } - val newGraphElements = a.graphElements map { ge => ge.renameIdentifiers(renamingMap) } - - val newOutputsSection = a.outputsSection map { outputSection => - outputSection.copy(outputs = outputSection.outputs.map { - od => od.copy(expression = od.expression.renameIdentifiers(renamingMap)) - }) - } + val newGraphElements = a.graphElements map { ge => ge.renameIdentifiers(renamingMap) } + val newOutputsSection = a.outputsSection map { outputSection => + outputSection.copy(outputs = outputSection.outputs.map { od => + od.copy(expression = od.expression.renameIdentifiers(renamingMap)) + }) + } - a.copy(inputsSection = newInputsSection, - graphElements = newGraphElements, - outputsSection = newOutputsSection) + a.copy(inputsSection = newInputsSection, graphElements = newGraphElements, outputsSection = newOutputsSection) + } } - } implicit val graphElementIdentifierRenamer = new GraphIdentifierLookupRenamer[WorkflowGraphElement] { - override def renameIdentifiers(a: WorkflowGraphElement, - renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): WorkflowGraphElement = a match { + override def renameIdentifiers(a: WorkflowGraphElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): WorkflowGraphElement = a match { case e: ScatterElement => e.renameIdentifiers(renamingMap) case e: IfElement => e.renameIdentifiers(renamingMap) case e: CallElement => e.renameIdentifiers(renamingMap) @@ -49,63 +46,74 @@ package object renaming { } } - implicit val scatterElementIdentifierRenamer: GraphIdentifierLookupRenamer[ScatterElement] = new GraphIdentifierLookupRenamer[ScatterElement] { - override def renameIdentifiers(a: ScatterElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): ScatterElement = { - a.copy(scatterExpression = a.scatterExpression.renameIdentifiers(renamingMap), - graphElements = a.graphElements.map(graphIdentifierLookupRenamer.renameIdentifiers(_, renamingMap)) - ) + implicit val scatterElementIdentifierRenamer: GraphIdentifierLookupRenamer[ScatterElement] = + new GraphIdentifierLookupRenamer[ScatterElement] { + override def renameIdentifiers(a: ScatterElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): ScatterElement = + a.copy( + scatterExpression = a.scatterExpression.renameIdentifiers(renamingMap), + graphElements = a.graphElements.map(graphIdentifierLookupRenamer.renameIdentifiers(_, renamingMap)) + ) } - } - implicit val ifElementIdentifierRenamer: GraphIdentifierLookupRenamer[IfElement] = new GraphIdentifierLookupRenamer[IfElement] { - override def renameIdentifiers(a: IfElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): IfElement = { - a.copy( - conditionExpression = a.conditionExpression.renameIdentifiers(renamingMap), - graphElements = a.graphElements.map(graphIdentifierLookupRenamer.renameIdentifiers(_, renamingMap)) - ) + implicit val ifElementIdentifierRenamer: GraphIdentifierLookupRenamer[IfElement] = + new GraphIdentifierLookupRenamer[IfElement] { + override def renameIdentifiers(a: IfElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): IfElement = + a.copy( + conditionExpression = a.conditionExpression.renameIdentifiers(renamingMap), + graphElements = a.graphElements.map(graphIdentifierLookupRenamer.renameIdentifiers(_, renamingMap)) + ) } - } - implicit val callElementIdentifierRenamer: GraphIdentifierLookupRenamer[CallElement] = new GraphIdentifierLookupRenamer[CallElement] { - override def renameIdentifiers(a: CallElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): CallElement = { - a.copy(body = a.body map { bodyElement => - bodyElement.copy(inputs = bodyElement.inputs.map { callInput => - callInput.copy(value = callInput.value.renameIdentifiers(renamingMap)) + implicit val callElementIdentifierRenamer: GraphIdentifierLookupRenamer[CallElement] = + new GraphIdentifierLookupRenamer[CallElement] { + override def renameIdentifiers(a: CallElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): CallElement = + a.copy(body = a.body map { bodyElement => + bodyElement.copy(inputs = bodyElement.inputs.map { callInput => + callInput.copy(value = callInput.value.renameIdentifiers(renamingMap)) + }) }) - }) } - } - implicit val inputDeclarationIdentifierRenamer: GraphIdentifierLookupRenamer[InputDeclarationElement] = new GraphIdentifierLookupRenamer[InputDeclarationElement] { - override def renameIdentifiers(a: InputDeclarationElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): InputDeclarationElement = { - val renamingMapWithoutThisValue = renamingMap.filterNot(_._2 == a.name) - a.copy(expression = a.expression.map(_.renameIdentifiers(renamingMapWithoutThisValue))) + implicit val inputDeclarationIdentifierRenamer: GraphIdentifierLookupRenamer[InputDeclarationElement] = + new GraphIdentifierLookupRenamer[InputDeclarationElement] { + override def renameIdentifiers(a: InputDeclarationElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): InputDeclarationElement = { + val renamingMapWithoutThisValue = renamingMap.filterNot(_._2 == a.name) + a.copy(expression = a.expression.map(_.renameIdentifiers(renamingMapWithoutThisValue))) + } } - } - implicit val intermediateDeclarationIdentifierRenamer: GraphIdentifierLookupRenamer[IntermediateValueDeclarationElement] = new GraphIdentifierLookupRenamer[IntermediateValueDeclarationElement] { - override def renameIdentifiers(a: IntermediateValueDeclarationElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): IntermediateValueDeclarationElement = { - val renamingMapWithoutThisValue = renamingMap.filterNot(_._2 == a.name) - a.copy(expression = a.expression.renameIdentifiers(renamingMapWithoutThisValue)) + implicit val intermediateDeclarationIdentifierRenamer + : GraphIdentifierLookupRenamer[IntermediateValueDeclarationElement] = + new GraphIdentifierLookupRenamer[IntermediateValueDeclarationElement] { + override def renameIdentifiers(a: IntermediateValueDeclarationElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): IntermediateValueDeclarationElement = { + val renamingMapWithoutThisValue = renamingMap.filterNot(_._2 == a.name) + a.copy(expression = a.expression.renameIdentifiers(renamingMapWithoutThisValue)) + } } - } - implicit val outputDeclarationIdentifierRenamer: GraphIdentifierLookupRenamer[OutputDeclarationElement] = new GraphIdentifierLookupRenamer[OutputDeclarationElement] { - override def renameIdentifiers(a: OutputDeclarationElement, renamingMap: Map[String, String]) - (implicit expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], - graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement]): OutputDeclarationElement = { - val renamingMapWithoutThisValue = renamingMap.filterNot(_._2 == a.name) - a.copy(expression = a.expression.renameIdentifiers(renamingMapWithoutThisValue)) + implicit val outputDeclarationIdentifierRenamer: GraphIdentifierLookupRenamer[OutputDeclarationElement] = + new GraphIdentifierLookupRenamer[OutputDeclarationElement] { + override def renameIdentifiers(a: OutputDeclarationElement, renamingMap: Map[String, String])(implicit + expressionElementRenamer: IdentifierLookupRenamer[ExpressionElement], + graphIdentifierLookupRenamer: GraphIdentifierLookupRenamer[WorkflowGraphElement] + ): OutputDeclarationElement = { + val renamingMapWithoutThisValue = renamingMap.filterNot(_._2 == a.name) + a.copy(expression = a.expression.renameIdentifiers(renamingMapWithoutThisValue)) + } } - } } diff --git a/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/evaluation/values/EngineFunctions.scala b/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/evaluation/values/EngineFunctions.scala index 301f394f45c..4d2b8fa346b 100644 --- a/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/evaluation/values/EngineFunctions.scala +++ b/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/evaluation/values/EngineFunctions.scala @@ -9,15 +9,29 @@ object EngineFunctions { def transpose(a: WomValue): Try[WomArray] = { case class ExpandedTwoDimensionalArray(innerType: WomType, value: Seq[Seq[WomValue]]) def validateAndExpand(value: WomValue): Try[ExpandedTwoDimensionalArray] = value match { - case WomArray(WomArrayType(WomArrayType(innerType)), array: Seq[WomValue]) => expandWdlArray(array) map { ExpandedTwoDimensionalArray(innerType, _) } - case WomArray(WomArrayType(nonArrayType), _) => Failure(new IllegalArgumentException(s"Array must be two-dimensional to be transposed but given array of $nonArrayType")) - case otherValue => Failure(new IllegalArgumentException(s"Function 'transpose' must be given a two-dimensional array but instead got ${otherValue.typeName}")) + case WomArray(WomArrayType(WomArrayType(innerType)), array: Seq[WomValue]) => + expandWdlArray(array) map { ExpandedTwoDimensionalArray(innerType, _) } + case WomArray(WomArrayType(nonArrayType), _) => + Failure( + new IllegalArgumentException( + s"Array must be two-dimensional to be transposed but given array of $nonArrayType" + ) + ) + case otherValue => + Failure( + new IllegalArgumentException( + s"Function 'transpose' must be given a two-dimensional array but instead got ${otherValue.typeName}" + ) + ) } def expandWdlArray(outerArray: Seq[WomValue]): Try[Seq[Seq[WomValue]]] = Try { outerArray map { case array: WomArray => array.value - case otherValue => throw new IllegalArgumentException(s"Function 'transpose' must be given a two-dimensional array but instead got WdlArray[${otherValue.typeName}]") + case otherValue => + throw new IllegalArgumentException( + s"Function 'transpose' must be given a two-dimensional array but instead got WdlArray[${otherValue.typeName}]" + ) } } diff --git a/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WdlSharedInputParsing.scala b/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WdlSharedInputParsing.scala index 396198b3c85..5bd7c31e44d 100644 --- a/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WdlSharedInputParsing.scala +++ b/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WdlSharedInputParsing.scala @@ -16,18 +16,23 @@ object WdlSharedInputParsing { import spray.json._ Try(inputString.parseJson).toErrorOr.toEither flatMap { - case JsObject(fields) => fields.map({ - case (key, jsValue) => key -> { womType: WomType => womType.coerceRawValue(jsValue).toErrorOr } - }).validNelCheck - case other => s"WDL input file must be a valid Json object. Found a ${other.getClass.getSimpleName}".invalidNelCheck[ParsedInputMap] + case JsObject(fields) => + fields.map { case (key, jsValue) => + key -> { womType: WomType => womType.coerceRawValue(jsValue).toErrorOr } + }.validNelCheck + case other => + s"WDL input file must be a valid Json object. Found a ${other.getClass.getSimpleName}" + .invalidNelCheck[ParsedInputMap] } } - def buildWomExecutable(bundle: WomBundle, inputs: Option[WorkflowJson], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] = { - + def buildWomExecutable(bundle: WomBundle, + inputs: Option[WorkflowJson], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] = for { ec <- bundle.toExecutableCallable executable <- Executable.withInputs(ec, inputCoercionFunction, inputs, ioFunctions, strictValidation) } yield executable - } } diff --git a/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WomGraphMakerTools.scala b/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WomGraphMakerTools.scala index c5f4c42e65e..6cb7b3642b5 100644 --- a/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WomGraphMakerTools.scala +++ b/wdl/transforms/shared/src/main/scala/wdl/shared/transforms/wdlom2wom/WomGraphMakerTools.scala @@ -17,19 +17,19 @@ object WomGraphMakerTools { } Graph(g.nodes.union((g.nodes collect { - case node: CallNode => node.outputPorts.map(op => { - val identifier = makeIdentifier(WomIdentifier(op.identifier.localName.value)) - PortBasedGraphOutputNode(identifier, op.womType, op) - }) - case node: ExposedExpressionNode if forWorkflowOutputs.isEmpty => node.outputPorts.map(op => { - PortBasedGraphOutputNode(makeIdentifier(WomIdentifier(op.name)), op.womType, op) - }) - case node: ScatterNode => node.outputMapping.map(op => { - PortBasedGraphOutputNode(makeIdentifier(op.identifier), op.womType, op) - }) - case node: ConditionalNode => node.conditionalOutputPorts.map(op => { - PortBasedGraphOutputNode(makeIdentifier(op.identifier), op.womType, op) - }) + case node: CallNode => + node.outputPorts.map { op => + val identifier = makeIdentifier(WomIdentifier(op.identifier.localName.value)) + PortBasedGraphOutputNode(identifier, op.womType, op) + } + case node: ExposedExpressionNode if forWorkflowOutputs.isEmpty => + node.outputPorts.map(op => PortBasedGraphOutputNode(makeIdentifier(WomIdentifier(op.name)), op.womType, op)) + case node: ScatterNode => + node.outputMapping.map(op => PortBasedGraphOutputNode(makeIdentifier(op.identifier), op.womType, op)) + case node: ConditionalNode => + node.conditionalOutputPorts.map { op => + PortBasedGraphOutputNode(makeIdentifier(op.identifier), op.womType, op) + } }).flatten)) } } diff --git a/wom/src/main/scala/wdl/util/StringUtil.scala b/wom/src/main/scala/wdl/util/StringUtil.scala index 871e1842656..f89c4fd73da 100644 --- a/wom/src/main/scala/wdl/util/StringUtil.scala +++ b/wom/src/main/scala/wdl/util/StringUtil.scala @@ -50,18 +50,16 @@ object StringUtil { def stripAll(s: String, startChars: String, endChars: String): String = { /* https://stackoverflow.com/questions/17995260/trimming-strings-in-scala */ @tailrec - def start(n: Int): String = { + def start(n: Int): String = if (n == s.length) "" else if (startChars.indexOf(s.charAt(n).toInt) < 0) end(n, s.length) else start(1 + n) - } @tailrec - def end(a: Int, n: Int): String = { + def end(a: Int, n: Int): String = if (n <= a) s.substring(a, n) else if (endChars.indexOf(s.charAt(n - 1).toInt) < 0) s.substring(a, n) else end(a, n - 1) - } start(0) } diff --git a/wom/src/main/scala/wom/CommandPart.scala b/wom/src/main/scala/wom/CommandPart.scala index 06279304531..97e661c5b81 100644 --- a/wom/src/main/scala/wom/CommandPart.scala +++ b/wom/src/main/scala/wom/CommandPart.scala @@ -10,5 +10,6 @@ trait CommandPart { def instantiate(inputsMap: Map[LocalName, WomValue], functions: IoFunctionSet, valueMapper: WomValue => WomValue, - runtimeEnvironment: RuntimeEnvironment): ErrorOr[List[InstantiatedCommand]] + runtimeEnvironment: RuntimeEnvironment + ): ErrorOr[List[InstantiatedCommand]] } diff --git a/wom/src/main/scala/wom/RuntimeAttributes.scala b/wom/src/main/scala/wom/RuntimeAttributes.scala index 853e3145c23..486d69d5e97 100644 --- a/wom/src/main/scala/wom/RuntimeAttributes.scala +++ b/wom/src/main/scala/wom/RuntimeAttributes.scala @@ -5,6 +5,7 @@ import wom.expression.WomExpression object RuntimeAttributesKeys { val DockerKey = "docker" val MaxRetriesKey = "maxRetries" + /** * Equivalent to CPUMinKey */ @@ -12,6 +13,7 @@ object RuntimeAttributesKeys { val CpuPlatformKey = "cpuPlatform" val CpuMinKey = "cpuMin" val CpuMaxKey = "cpuMax" + /** * Equivalent to GPUMinKey */ @@ -20,6 +22,7 @@ object RuntimeAttributesKeys { val GpuMaxKey = "gpuCountMax" val GpuTypeKey = "gpuType" val DnaNexusInputDirMinKey = "dnaNexusInputDirMin" + /** * Equivalent to MemoryMinKey */ diff --git a/wom/src/main/scala/wom/WomFileMapper.scala b/wom/src/main/scala/wom/WomFileMapper.scala index 9d04de3f031..0b04b103537 100644 --- a/wom/src/main/scala/wom/WomFileMapper.scala +++ b/wom/src/main/scala/wom/WomFileMapper.scala @@ -8,6 +8,7 @@ import wom.values._ import scala.util.{Success, Try} object WomFileMapper { + /** * Loops over a WomValue applying the supplied mapper function whenever a WomFile is encountered. * @@ -18,8 +19,7 @@ object WomFileMapper { * @see [[wom.values.WomValue.collectAsSeq]] * @see [[wom.values.WomFile.mapFile]] */ - def mapWomFiles(mapper: WomFile => WomFile) - (womValue: WomValue): Try[WomValue] = { + def mapWomFiles(mapper: WomFile => WomFile)(womValue: WomValue): Try[WomValue] = womValue match { case file: WomFile => Try(mapper(file)) case array: WomArray => @@ -28,25 +28,27 @@ object WomFileMapper { WomArray(array.womType, _) } case map: WomMap => - val mappedMap = map.value map { - case (key, value) => mapWomFiles(mapper)(key) -> mapWomFiles(mapper)(value) + val mappedMap = map.value map { case (key, value) => + mapWomFiles(mapper)(key) -> mapWomFiles(mapper)(value) } TryUtil.sequenceKeyValues(mappedMap) map { WomMap(map.womType, _) } case womObject: WomObjectLike => - val mappedMap = womObject.values map { - case (key, value) => key -> mapWomFiles(mapper)(value) + val mappedMap = womObject.values map { case (key, value) => + key -> mapWomFiles(mapper)(value) } TryUtil.sequenceMap(mappedMap).map(WomObject.withTypeUnsafe(_, womObject.womObjectTypeLike)) case pair: WomPair => - val mappedPair: (Try[WomValue], Try[WomValue]) = (mapWomFiles(mapper)(pair.left), mapWomFiles(mapper)(pair.right)) + val mappedPair: (Try[WomValue], Try[WomValue]) = + (mapWomFiles(mapper)(pair.left), mapWomFiles(mapper)(pair.right)) TryUtil.sequenceTuple(mappedPair) map { (WomPair.apply _).tupled } case optionalValue: WomOptionalValue => // Build a `WomOptionalValue` from an `Option[WomValue]`. - def buildWomOptionalValue(optionalWomValue: Option[WomValue]) = WomOptionalValue(optionalValue.innerType, optionalWomValue) + def buildWomOptionalValue(optionalWomValue: Option[WomValue]) = + WomOptionalValue(optionalValue.innerType, optionalWomValue) val mappedOptional: Option[Try[WomValue]] = optionalValue.value.map(mapWomFiles(mapper)) mappedOptional match { @@ -57,5 +59,4 @@ object WomFileMapper { case coproduct: WomCoproductValue => mapWomFiles(mapper)(coproduct.womValue) case other => Success(other) } - } } diff --git a/wom/src/main/scala/wom/callable/Callable.scala b/wom/src/main/scala/wom/callable/Callable.scala index 70140cc999f..bb325fa093b 100644 --- a/wom/src/main/scala/wom/callable/Callable.scala +++ b/wom/src/main/scala/wom/callable/Callable.scala @@ -16,13 +16,13 @@ trait Callable { def inputs: List[_ <: InputDefinition] def outputs: List[_ <: OutputDefinition] - val sourceLocation : Option[SourceFileLocation] = None + val sourceLocation: Option[SourceFileLocation] = None } trait ExecutableCallable extends Callable { def graph: Graph - lazy val taskCallNodes: Set[CommandCallNode] = graph.allNodes collect { - case taskNode: CommandCallNode => taskNode + lazy val taskCallNodes: Set[CommandCallNode] = graph.allNodes collect { case taskNode: CommandCallNode => + taskNode } } @@ -48,37 +48,57 @@ object Callable { } object RequiredInputDefinition { - def apply(name: String, womType: WomType, valueMapper: InputValueMapper): RequiredInputDefinition = { + def apply(name: String, womType: WomType, valueMapper: InputValueMapper): RequiredInputDefinition = RequiredInputDefinition(LocalName(name), womType, valueMapper, None) - } - def apply(name: String, womType: WomType): RequiredInputDefinition = { + def apply(name: String, womType: WomType): RequiredInputDefinition = RequiredInputDefinition(LocalName(name), womType, InputDefinition.IdentityValueMapper, None) - } - def apply(name: String, womType: WomType, parameterMeta: Option[MetaValueElement]): RequiredInputDefinition = { + def apply(name: String, womType: WomType, parameterMeta: Option[MetaValueElement]): RequiredInputDefinition = RequiredInputDefinition(LocalName(name), womType, InputDefinition.IdentityValueMapper, parameterMeta) - } - def apply(name: String, womType: WomType, valueMapper: InputValueMapper, parameterMeta: Option[MetaValueElement]): RequiredInputDefinition = { + def apply(name: String, + womType: WomType, + valueMapper: InputValueMapper, + parameterMeta: Option[MetaValueElement] + ): RequiredInputDefinition = RequiredInputDefinition(LocalName(name), womType, valueMapper, parameterMeta) - } } final case class RequiredInputDefinition(localName: LocalName, womType: WomType, valueMapper: InputValueMapper = InputDefinition.IdentityValueMapper, - parameterMeta: Option[MetaValueElement] = None) extends InputDefinition + parameterMeta: Option[MetaValueElement] = None + ) extends InputDefinition object OverridableInputDefinitionWithDefault { - def apply(name: String, womType: WomType, default: WomExpression): OverridableInputDefinitionWithDefault = { - OverridableInputDefinitionWithDefault(LocalName(name), womType, default, InputDefinition.IdentityValueMapper, None) - } - def apply(name: String, womType: WomType, default: WomExpression, parameterMeta: Option[MetaValueElement]): OverridableInputDefinitionWithDefault = { - OverridableInputDefinitionWithDefault(LocalName(name), womType, default, InputDefinition.IdentityValueMapper, parameterMeta) - } - def apply(name: String, womType: WomType, default: WomExpression, valueMapper: InputValueMapper): OverridableInputDefinitionWithDefault = { + def apply(name: String, womType: WomType, default: WomExpression): OverridableInputDefinitionWithDefault = + OverridableInputDefinitionWithDefault(LocalName(name), + womType, + default, + InputDefinition.IdentityValueMapper, + None + ) + def apply(name: String, + womType: WomType, + default: WomExpression, + parameterMeta: Option[MetaValueElement] + ): OverridableInputDefinitionWithDefault = + OverridableInputDefinitionWithDefault(LocalName(name), + womType, + default, + InputDefinition.IdentityValueMapper, + parameterMeta + ) + def apply(name: String, + womType: WomType, + default: WomExpression, + valueMapper: InputValueMapper + ): OverridableInputDefinitionWithDefault = OverridableInputDefinitionWithDefault(LocalName(name), womType, default, valueMapper, None) - } - def apply(name: String, womType: WomType, default: WomExpression, valueMapper: InputValueMapper, parameterMeta: Option[MetaValueElement]): OverridableInputDefinitionWithDefault = { + def apply(name: String, + womType: WomType, + default: WomExpression, + valueMapper: InputValueMapper, + parameterMeta: Option[MetaValueElement] + ): OverridableInputDefinitionWithDefault = OverridableInputDefinitionWithDefault(LocalName(name), womType, default, valueMapper, parameterMeta) - } } sealed trait InputDefinitionWithDefault extends InputDefinition { @@ -91,16 +111,25 @@ object Callable { final case class OverridableInputDefinitionWithDefault(localName: LocalName, womType: WomType, default: WomExpression, - valueMapper: InputValueMapper = InputDefinition.IdentityValueMapper, - parameterMeta: Option[MetaValueElement] = None) extends InputDefinitionWithDefault + valueMapper: InputValueMapper = + InputDefinition.IdentityValueMapper, + parameterMeta: Option[MetaValueElement] = None + ) extends InputDefinitionWithDefault object FixedInputDefinitionWithDefault { - def apply(name: String, womType: WomType, default: WomExpression): FixedInputDefinitionWithDefault = { + def apply(name: String, womType: WomType, default: WomExpression): FixedInputDefinitionWithDefault = FixedInputDefinitionWithDefault(LocalName(name), womType, default, InputDefinition.IdentityValueMapper, None) - } - def apply(name: String, womType: WomType, default: WomExpression, parameterMeta: Option[MetaValueElement]): FixedInputDefinitionWithDefault = { - FixedInputDefinitionWithDefault(LocalName(name), womType, default, InputDefinition.IdentityValueMapper, parameterMeta) - } + def apply(name: String, + womType: WomType, + default: WomExpression, + parameterMeta: Option[MetaValueElement] + ): FixedInputDefinitionWithDefault = + FixedInputDefinitionWithDefault(LocalName(name), + womType, + default, + InputDefinition.IdentityValueMapper, + parameterMeta + ) } /** @@ -110,25 +139,37 @@ object Callable { womType: WomType, default: WomExpression, valueMapper: InputValueMapper = InputDefinition.IdentityValueMapper, - parameterMeta: Option[MetaValueElement] = None) extends InputDefinitionWithDefault + parameterMeta: Option[MetaValueElement] = None + ) extends InputDefinitionWithDefault object OptionalInputDefinition { - def apply(name: String, womType: WomOptionalType): OptionalInputDefinition = OptionalInputDefinition(LocalName(name), womType, InputDefinition.IdentityValueMapper, None) - def apply(name: String, womType: WomOptionalType, parameterMeta: Option[MetaValueElement]): OptionalInputDefinition = OptionalInputDefinition(LocalName(name), womType, InputDefinition.IdentityValueMapper, parameterMeta) - def apply(name: String, womType: WomOptionalType, valueMapper: InputValueMapper): OptionalInputDefinition = OptionalInputDefinition(LocalName(name), womType, valueMapper, None) - def apply(name: String, womType: WomOptionalType, valueMapper: InputValueMapper, parameterMeta: Option[MetaValueElement]): OptionalInputDefinition = OptionalInputDefinition(LocalName(name), womType, valueMapper, parameterMeta) + def apply(name: String, womType: WomOptionalType): OptionalInputDefinition = + OptionalInputDefinition(LocalName(name), womType, InputDefinition.IdentityValueMapper, None) + def apply(name: String, + womType: WomOptionalType, + parameterMeta: Option[MetaValueElement] + ): OptionalInputDefinition = + OptionalInputDefinition(LocalName(name), womType, InputDefinition.IdentityValueMapper, parameterMeta) + def apply(name: String, womType: WomOptionalType, valueMapper: InputValueMapper): OptionalInputDefinition = + OptionalInputDefinition(LocalName(name), womType, valueMapper, None) + def apply(name: String, + womType: WomOptionalType, + valueMapper: InputValueMapper, + parameterMeta: Option[MetaValueElement] + ): OptionalInputDefinition = OptionalInputDefinition(LocalName(name), womType, valueMapper, parameterMeta) } final case class OptionalInputDefinition(localName: LocalName, womType: WomOptionalType, valueMapper: InputValueMapper = InputDefinition.IdentityValueMapper, - parameterMeta: Option[MetaValueElement] = None) extends InputDefinition + parameterMeta: Option[MetaValueElement] = None + ) extends InputDefinition object OutputDefinition { - def apply(name: String, womType: WomType, expression: WomExpression): OutputDefinition = { + def apply(name: String, womType: WomType, expression: WomExpression): OutputDefinition = OutputDefinition(LocalName(name), womType, expression) - } } final case class OutputDefinition(localName: LocalName, womType: WomType, expression: WomExpression) { + /** * Alias for localName.asString */ diff --git a/wom/src/main/scala/wom/callable/CommandTaskDefinition.scala b/wom/src/main/scala/wom/callable/CommandTaskDefinition.scala index 6007fbcf7c9..62531ac5802 100644 --- a/wom/src/main/scala/wom/callable/CommandTaskDefinition.scala +++ b/wom/src/main/scala/wom/callable/CommandTaskDefinition.scala @@ -23,21 +23,24 @@ object CommandTaskDefinition { type EvaluatedOutputs = Checked[Map[OutputPort, WomValue]] /* - * Result type of the OutputEvaluationFunction. Equivalent to Future[Option[EvaluatedOutputs]] - * - Future because it might involve I/O - * - Option because it might not return any value (dependent on the task) - * - Checked because the evaluation could fail (invalid types, missing values etc...) - */ + * Result type of the OutputEvaluationFunction. Equivalent to Future[Option[EvaluatedOutputs]] + * - Future because it might involve I/O + * - Option because it might not return any value (dependent on the task) + * - Checked because the evaluation could fail (invalid types, missing values etc...) + */ type OutputFunctionResponse = OptionT[Future, EvaluatedOutputs] // Function definition to evaluate task outputs - type OutputEvaluationFunction = (Set[OutputPort], Map[String, WomValue], IoFunctionSet, ExecutionContext) => OutputFunctionResponse + type OutputEvaluationFunction = + (Set[OutputPort], Map[String, WomValue], IoFunctionSet, ExecutionContext) => OutputFunctionResponse object OutputEvaluationFunction { - val none: OutputEvaluationFunction = { case (_ ,_, _, _) => OptionT[Future, EvaluatedOutputs](Future.successful(None)) } + val none: OutputEvaluationFunction = { case (_, _, _, _) => + OptionT[Future, EvaluatedOutputs](Future.successful(None)) + } } - private implicit val instantiatedCommandMonoid = cats.derived.MkMonoid[InstantiatedCommand] + implicit private val instantiatedCommandMonoid = cats.derived.MkMonoid[InstantiatedCommand] object CommandTemplateBuilder { def fromValues(values: Seq[CommandPart]) = new CommandTemplateBuilder { override def build(inputs: WomEvaluatedCallInputs): ErrorOr[Seq[CommandPart]] = values.validNel @@ -63,11 +66,12 @@ sealed trait TaskDefinition extends Callable { * Transform the Callable TaskDefinition to an ExecutableCallable that can be executed on its own. */ def toExecutable: ErrorOr[ExecutableCallable] + /** * Provides a custom way to evaluate outputs of the task definition. * Return None to leave the evaluation method to the engine. */ - private [wom] def customizedOutputEvaluation: OutputEvaluationFunction + private[wom] def customizedOutputEvaluation: OutputEvaluationFunction } /** @@ -80,7 +84,8 @@ sealed trait CommandTaskDefinition extends TaskDefinition { def stderrOverride: Option[WomExpression] def commandTemplateBuilder: WomEvaluatedCallInputs => ErrorOr[Seq[CommandPart]] // TODO ErrorOrify this ? Throw for now - def commandTemplate(taskInputs: WomEvaluatedCallInputs): Seq[CommandPart] = commandTemplateBuilder(taskInputs).toTry("Failed to build command").get + def commandTemplate(taskInputs: WomEvaluatedCallInputs): Seq[CommandPart] = + commandTemplateBuilder(taskInputs).toTry("Failed to build command").get def prefixSeparator: String def commandPartSeparator: String def stdinRedirection: Option[WomExpression] @@ -88,18 +93,20 @@ sealed trait CommandTaskDefinition extends TaskDefinition { def environmentExpressions: Map[String, WomExpression] def additionalGlob: Option[WomGlobFile] def homeOverride: Option[RuntimeEnvironment => String] + /** * Provides a custom way to evaluate outputs of the task definition. * Return None to leave the evaluation method to the engine. */ - private [wom] def customizedOutputEvaluation: OutputEvaluationFunction + private[wom] def customizedOutputEvaluation: OutputEvaluationFunction lazy val unqualifiedName: LocallyQualifiedName = name def instantiateCommand(taskInputs: WomEvaluatedCallInputs, functions: IoFunctionSet, valueMapper: WomValue => WomValue, - runtimeEnvironment: RuntimeEnvironment): ErrorOr[InstantiatedCommand] = { + runtimeEnvironment: RuntimeEnvironment + ): ErrorOr[InstantiatedCommand] = { val inputsByLocalName = taskInputs map { case (k, v) => k.localName -> v } val valueMappedInputsByLocalName = inputsByLocalName map { case (k, v) => k -> valueMapper(v) } @@ -107,26 +114,30 @@ sealed trait CommandTaskDefinition extends TaskDefinition { // Just raw command parts, no separators. val rawCommandParts: List[ErrorOr[InstantiatedCommand]] = - commandTemplate(taskInputs).toList.flatMap({ commandPart => + commandTemplate(taskInputs).toList.flatMap { commandPart => commandPart.instantiate(inputsByLocalName, functions, valueMapper, runtimeEnvironment).sequence - }) + } // Add separator command parts and monoid smash down to one `ErrorOr[InstantiatedCommand]`. val instantiatedCommand: ErrorOr[InstantiatedCommand] = rawCommandParts.intercalate(InstantiatedCommand(commandPartSeparator).validNel) // `normalize` the instantiation (i.e. don't break Python code indentation) and add in the inputs. - instantiatedCommand map { c => c.copy( - commandString = StringUtil.normalize(c.commandString), - preprocessedInputs = inputsByLocalName.toList, - valueMappedPreprocessedInputs = valueMappedInputsByLocalName.toList - )} + instantiatedCommand map { c => + c.copy( + commandString = StringUtil.normalize(c.commandString), + preprocessedInputs = inputsByLocalName.toList, + valueMappedPreprocessedInputs = valueMappedInputsByLocalName.toList + ) + } } - def commandTemplateString(taskInputs: WomEvaluatedCallInputs): String = StringUtil.normalize(commandTemplate(taskInputs).map(_.toString).mkString) + def commandTemplateString(taskInputs: WomEvaluatedCallInputs): String = + StringUtil.normalize(commandTemplate(taskInputs).map(_.toString).mkString) override def toString: String = { - val template = Try(commandTemplate(Map.empty).toString()).getOrElse("Could not generate command template without inputs") + val template = + Try(commandTemplate(Map.empty).toString()).getOrElse("Could not generate command template without inputs") s"[Task name=$name commandTemplate=$template]" } } @@ -150,12 +161,15 @@ final case class CallableTaskDefinition(name: String, stdoutOverride: Option[WomExpression] = None, stderrOverride: Option[WomExpression] = None, additionalGlob: Option[WomGlobFile] = None, - private [wom] val customizedOutputEvaluation: OutputEvaluationFunction = OutputEvaluationFunction.none, + private[wom] val customizedOutputEvaluation: OutputEvaluationFunction = + OutputEvaluationFunction.none, homeOverride: Option[RuntimeEnvironment => String] = None, dockerOutputDirectory: Option[String] = None, - override val sourceLocation : Option[SourceFileLocation] - ) extends CommandTaskDefinition { - def toExecutable: ErrorOr[ExecutableTaskDefinition] = TaskCall.graphFromDefinition(this) map { ExecutableTaskDefinition(this, _) } + override val sourceLocation: Option[SourceFileLocation] +) extends CommandTaskDefinition { + def toExecutable: ErrorOr[ExecutableTaskDefinition] = TaskCall.graphFromDefinition(this) map { + ExecutableTaskDefinition(this, _) + } } /** @@ -164,7 +178,8 @@ final case class CallableTaskDefinition(name: String, */ final case class ExecutableTaskDefinition private (callableTaskDefinition: CallableTaskDefinition, override val graph: Graph - ) extends CommandTaskDefinition with ExecutableCallable { +) extends CommandTaskDefinition + with ExecutableCallable { override def name = callableTaskDefinition.name override def inputs = callableTaskDefinition.inputs override def outputs = callableTaskDefinition.outputs @@ -181,7 +196,7 @@ final case class ExecutableTaskDefinition private (callableTaskDefinition: Calla override def adHocFileCreation = callableTaskDefinition.adHocFileCreation override def environmentExpressions = callableTaskDefinition.environmentExpressions override def additionalGlob = callableTaskDefinition.additionalGlob - override private [wom] def customizedOutputEvaluation = callableTaskDefinition.customizedOutputEvaluation + override private[wom] def customizedOutputEvaluation = callableTaskDefinition.customizedOutputEvaluation override def toExecutable = this.validNel override def homeOverride = callableTaskDefinition.homeOverride override def dockerOutputDirectory = callableTaskDefinition.dockerOutputDirectory @@ -191,22 +206,24 @@ sealed trait ExpressionTaskDefinition extends TaskDefinition { def evaluate: (Map[String, WomValue], IoFunctionSet, List[OutputPort]) => Checked[Map[OutputPort, WomValue]] } - /** * An expression task definition only. * Can be called but cannot be used in an Executable as a standalone execution. */ -final case class CallableExpressionTaskDefinition(name: String, - evaluate: (Map[String, WomValue], IoFunctionSet, List[OutputPort]) => Checked[Map[OutputPort, WomValue]], - runtimeAttributes: RuntimeAttributes, - meta: Map[String, MetaValueElement], - parameterMeta: Map[String, MetaValueElement], - outputs: List[Callable.OutputDefinition], - inputs: List[_ <: Callable.InputDefinition], - prefixSeparator: String = ".", - private [wom] val customizedOutputEvaluation: OutputEvaluationFunction = OutputEvaluationFunction.none - ) extends ExpressionTaskDefinition { - def toExecutable: ErrorOr[ExecutableExpressionTaskDefinition] = TaskCall.graphFromDefinition(this) map { ExecutableExpressionTaskDefinition(this, _) } +final case class CallableExpressionTaskDefinition( + name: String, + evaluate: (Map[String, WomValue], IoFunctionSet, List[OutputPort]) => Checked[Map[OutputPort, WomValue]], + runtimeAttributes: RuntimeAttributes, + meta: Map[String, MetaValueElement], + parameterMeta: Map[String, MetaValueElement], + outputs: List[Callable.OutputDefinition], + inputs: List[_ <: Callable.InputDefinition], + prefixSeparator: String = ".", + private[wom] val customizedOutputEvaluation: OutputEvaluationFunction = OutputEvaluationFunction.none +) extends ExpressionTaskDefinition { + def toExecutable: ErrorOr[ExecutableExpressionTaskDefinition] = TaskCall.graphFromDefinition(this) map { + ExecutableExpressionTaskDefinition(this, _) + } } /** @@ -214,8 +231,9 @@ final case class CallableExpressionTaskDefinition(name: String, * Can be called from a workflow but can also be run as a standalone execution. */ final case class ExecutableExpressionTaskDefinition private (callableTaskDefinition: CallableExpressionTaskDefinition, - override val graph: Graph - ) extends ExpressionTaskDefinition with ExecutableCallable { + override val graph: Graph +) extends ExpressionTaskDefinition + with ExecutableCallable { override def name = callableTaskDefinition.name override def inputs = callableTaskDefinition.inputs override def outputs = callableTaskDefinition.outputs @@ -225,5 +243,5 @@ final case class ExecutableExpressionTaskDefinition private (callableTaskDefinit override def meta = callableTaskDefinition.meta override def parameterMeta = callableTaskDefinition.parameterMeta override def toExecutable = this.validNel - override private [wom] def customizedOutputEvaluation = callableTaskDefinition.customizedOutputEvaluation + override private[wom] def customizedOutputEvaluation = callableTaskDefinition.customizedOutputEvaluation } diff --git a/wom/src/main/scala/wom/callable/ContainerizedInputExpression.scala b/wom/src/main/scala/wom/callable/ContainerizedInputExpression.scala index e77689fbb5b..5293dbcd648 100644 --- a/wom/src/main/scala/wom/callable/ContainerizedInputExpression.scala +++ b/wom/src/main/scala/wom/callable/ContainerizedInputExpression.scala @@ -10,7 +10,8 @@ import wom.values.{WomFile, WomValue} trait ContainerizedInputExpression { def evaluate(hostInputValues: Map[String, WomValue], containerizedInputValues: Map[String, WomValue], - ioFunctionSet: IoFunctionSet): IOChecked[List[AdHocValue]] + ioFunctionSet: IoFunctionSet + ): IOChecked[List[AdHocValue]] } final case class AdHocValue(womValue: WomFile, alternativeName: Option[String], inputName: Option[String]) diff --git a/wom/src/main/scala/wom/callable/RuntimeEnvironment.scala b/wom/src/main/scala/wom/callable/RuntimeEnvironment.scala index a2eb4983788..513516a1af2 100644 --- a/wom/src/main/scala/wom/callable/RuntimeEnvironment.scala +++ b/wom/src/main/scala/wom/callable/RuntimeEnvironment.scala @@ -18,5 +18,5 @@ case class RuntimeEnvironment(outputPath: String, cores: Int Refined Positive, ram: Double, outputPathSize: Long, - tempPathSize: Long) - + tempPathSize: Long +) diff --git a/wom/src/main/scala/wom/callable/WorkflowDefinition.scala b/wom/src/main/scala/wom/callable/WorkflowDefinition.scala index 07f70ebc7e7..102a47ae775 100644 --- a/wom/src/main/scala/wom/callable/WorkflowDefinition.scala +++ b/wom/src/main/scala/wom/callable/WorkflowDefinition.scala @@ -8,7 +8,8 @@ final case class WorkflowDefinition(name: String, innerGraph: Graph, meta: Map[String, MetaValueElement], parameterMeta: Map[String, MetaValueElement], - override val sourceLocation : Option[SourceFileLocation]) extends ExecutableCallable { + override val sourceLocation: Option[SourceFileLocation] +) extends ExecutableCallable { override lazy val toString = s"[Workflow $name]" override val graph: Graph = innerGraph diff --git a/wom/src/main/scala/wom/executable/Executable.scala b/wom/src/main/scala/wom/executable/Executable.scala index a68c7fc98c2..54da67d0d9c 100644 --- a/wom/src/main/scala/wom/executable/Executable.scala +++ b/wom/src/main/scala/wom/executable/Executable.scala @@ -24,66 +24,88 @@ import wom.values.WomValue object Executable { /* - * Function provided by each language, that takes the raw input file as a String and returns a Checked[ParsedInputMap] - * Each entry of the map is an input found in the file. - * The key is a string representation of the input. It must be be equal to the name of the matching GraphInputNode. - * The value is a function which given a WomType, attempts to coerce the input value to that type. - * Thanks to this level of indirection, the logic that links graph input nodes to input values still resides in WOM, - * which 1) abstracts it away and 2) guarantees that the linking mechanism is the same regardless of the language. - * At the same time each language can parse the input file however it wants. + * Function provided by each language, that takes the raw input file as a String and returns a Checked[ParsedInputMap] + * Each entry of the map is an input found in the file. + * The key is a string representation of the input. It must be be equal to the name of the matching GraphInputNode. + * The value is a function which given a WomType, attempts to coerce the input value to that type. + * Thanks to this level of indirection, the logic that links graph input nodes to input values still resides in WOM, + * which 1) abstracts it away and 2) guarantees that the linking mechanism is the same regardless of the language. + * At the same time each language can parse the input file however it wants. */ type InputParsingFunction = String => Checked[ParsedInputMap] type ParsedInputMap = Map[String, DelayedCoercionFunction] type DelayedCoercionFunction = WomType => ErrorOr[WomValue] - + /* - * Maps output ports from graph input nodes to ResolvedExecutableInput + * Maps output ports from graph input nodes to ResolvedExecutableInput */ type ResolvedExecutableInputs = Map[OutputPort, ResolvedExecutableInput] - def withInputs(entryPoint: ExecutableCallable, inputParsingFunction: InputParsingFunction, inputFile: Option[String], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] = { - validateExecutable(entryPoint, inputParsingFunction, parseGraphInputs(strictValidation), inputFile, ioFunctions).toChecked - } + def withInputs(entryPoint: ExecutableCallable, + inputParsingFunction: InputParsingFunction, + inputFile: Option[String], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] = + validateExecutable(entryPoint, + inputParsingFunction, + parseGraphInputs(strictValidation), + inputFile, + ioFunctions + ).toChecked /** * Given the graph and the Map[String, DelayedCoercionFunction], attempts to find a value in the map for each ExternalGraphInputNode of the graph */ - private def parseGraphInputs(strictValidation: Boolean)(graph: Graph, inputCoercionMap: Map[String, DelayedCoercionFunction], ioFunctions: IoFunctionSet): IOChecked[ResolvedExecutableInputs] = { + private def parseGraphInputs(strictValidation: Boolean)(graph: Graph, + inputCoercionMap: Map[String, DelayedCoercionFunction], + ioFunctions: IoFunctionSet + ): IOChecked[ResolvedExecutableInputs] = { import ioFunctions.cs - def fromInputMapping(gin: ExternalGraphInputNode): Option[IOChecked[ResolvedExecutableInput]] = { + def fromInputMapping(gin: ExternalGraphInputNode): Option[IOChecked[ResolvedExecutableInput]] = inputCoercionMap .get(gin.nameInInputSet) - .map { _.apply(gin.womType).toIOChecked - .flatMap(gin.valueMapper(ioFunctions)(_)) - .contextualizeErrors(s"evaluate input '${gin.localName}'") - .map(Coproduct[ResolvedExecutableInput](_)) + .map { + _.apply(gin.womType).toIOChecked + .flatMap(gin.valueMapper(ioFunctions)(_)) + .contextualizeErrors(s"evaluate input '${gin.localName}'") + .map(Coproduct[ResolvedExecutableInput](_)) } - } def fallBack(gin: ExternalGraphInputNode): IOChecked[ResolvedExecutableInput] = gin match { - case required: RequiredGraphInputNode => s"Required workflow input '${required.nameInInputSet}' not specified".invalidIOChecked + case required: RequiredGraphInputNode => + s"Required workflow input '${required.nameInInputSet}' not specified".invalidIOChecked case optionalWithDefault: OptionalGraphInputNodeWithDefault => for { evaluatedDefault <- optionalWithDefault.default.evaluateValue(Map.empty, ioFunctions).toIOChecked mapped <- optionalWithDefault.valueMapper(ioFunctions)(evaluatedDefault) } yield Coproduct[ResolvedExecutableInput](mapped) - case optional: OptionalGraphInputNode => IOChecked.pure(Coproduct[ResolvedExecutableInput](optional.womType.none: WomValue)) + case optional: OptionalGraphInputNode => + IOChecked.pure(Coproduct[ResolvedExecutableInput](optional.womType.none: WomValue)) } - val providedInputsValidation: IOChecked[ResolvedExecutableInputs] = graph.inputNodes.toList.parTraverse[IOChecked, Option[(OutputPort, ResolvedExecutableInput)]]({ - case gin: ExternalGraphInputNode => - // The compiler needs the type ascription for some reason - fromInputMapping(gin).getOrElse(fallBack(gin)).map((gin.singleOutputPort: OutputPort) -> _).map(Option(_)) - case _ => Option.empty[(OutputPort, ResolvedExecutableInput)].validIOChecked - }).map(_.flatten).map(_.toMap) + val providedInputsValidation: IOChecked[ResolvedExecutableInputs] = graph.inputNodes.toList + .parTraverse[IOChecked, Option[(OutputPort, ResolvedExecutableInput)]] { + case gin: ExternalGraphInputNode => + // The compiler needs the type ascription for some reason + fromInputMapping(gin).getOrElse(fallBack(gin)).map((gin.singleOutputPort: OutputPort) -> _).map(Option(_)) + case _ => Option.empty[(OutputPort, ResolvedExecutableInput)].validIOChecked + } + .map(_.flatten) + .map(_.toMap) val wantedInputs = graph.externalInputNodes.map(_.nameInInputSet) val unwantedInputs = if (strictValidation) inputCoercionMap.keySet.diff(wantedInputs) else Set.empty val wantedInputsValidation: ErrorOr[Unit] = NonEmptyList.fromList(unwantedInputs.toList) match { case None => ().validNel - case Some(unwanteds) => Invalid(unwanteds.map(unwanted => s"WARNING: Unexpected input provided: $unwanted (expected inputs: [${wantedInputs.mkString(", ")}])")) + case Some(unwanteds) => + Invalid( + unwanteds.map(unwanted => + s"WARNING: Unexpected input provided: $unwanted (expected inputs: [${wantedInputs.mkString(", ")}])" + ) + ) } (providedInputsValidation, wantedInputsValidation.toIOChecked) mapN { (providedInputs, _) => providedInputs } diff --git a/wom/src/main/scala/wom/executable/ExecutableValidation.scala b/wom/src/main/scala/wom/executable/ExecutableValidation.scala index e892b2f2029..4904e41ae69 100644 --- a/wom/src/main/scala/wom/executable/ExecutableValidation.scala +++ b/wom/src/main/scala/wom/executable/ExecutableValidation.scala @@ -7,14 +7,21 @@ import wom.executable.Executable.{DelayedCoercionFunction, InputParsingFunction, import wom.expression.IoFunctionSet import wom.graph.Graph -private [executable] object ExecutableValidation { +private[executable] object ExecutableValidation { - private [executable] def validateExecutable(entryPoint: ExecutableCallable, - inputParsingFunction: InputParsingFunction, - parseGraphInputs: (Graph, Map[String, DelayedCoercionFunction], IoFunctionSet) => IOChecked[ResolvedExecutableInputs], - inputFile: Option[String], - ioFunctions: IoFunctionSet): IOChecked[Executable] = for { - parsedInputs <- inputFile.map(inputParsingFunction).map(_.toIOChecked).getOrElse(IOChecked.pure(Map.empty[String, DelayedCoercionFunction])) + private[executable] def validateExecutable(entryPoint: ExecutableCallable, + inputParsingFunction: InputParsingFunction, + parseGraphInputs: (Graph, + Map[String, DelayedCoercionFunction], + IoFunctionSet + ) => IOChecked[ResolvedExecutableInputs], + inputFile: Option[String], + ioFunctions: IoFunctionSet + ): IOChecked[Executable] = for { + parsedInputs <- inputFile + .map(inputParsingFunction) + .map(_.toIOChecked) + .getOrElse(IOChecked.pure(Map.empty[String, DelayedCoercionFunction])) validatedInputs <- parseGraphInputs(entryPoint.graph, parsedInputs, ioFunctions) } yield Executable(entryPoint, validatedInputs) } diff --git a/wom/src/main/scala/wom/executable/WomBundle.scala b/wom/src/main/scala/wom/executable/WomBundle.scala index 795df07a258..b3cb11762e4 100644 --- a/wom/src/main/scala/wom/executable/WomBundle.scala +++ b/wom/src/main/scala/wom/executable/WomBundle.scala @@ -12,11 +12,13 @@ import wom.types.WomType final case class WomBundle(primaryCallable: Option[Callable], allCallables: Map[String, Callable], typeAliases: Map[String, WomType], - resolvedImportRecords: Set[ResolvedImportRecord]) { + resolvedImportRecords: Set[ResolvedImportRecord] +) { def toExecutableCallable: Checked[ExecutableCallable] = primaryCallable match { case Some(w: WorkflowDefinition) => w.validNelCheck case Some(c: CallableTaskDefinition) => c.toExecutable.toEither - case Some(other) => s"Cannot convert WOM bundle to executable. Primary callable was an unknown type ${other.getClass.getSimpleName}.".invalidNelCheck + case Some(other) => + s"Cannot convert WOM bundle to executable. Primary callable was an unknown type ${other.getClass.getSimpleName}.".invalidNelCheck case None => s"Cannot convert WOM bundle to executable. No primary callable was available.".invalidNelCheck } diff --git a/wom/src/main/scala/wom/expression/InputPointerToWomValue.scala b/wom/src/main/scala/wom/expression/InputPointerToWomValue.scala index 33fa0241127..a95ff619f04 100644 --- a/wom/src/main/scala/wom/expression/InputPointerToWomValue.scala +++ b/wom/src/main/scala/wom/expression/InputPointerToWomValue.scala @@ -5,7 +5,7 @@ import common.validation.ErrorOr.ErrorOr import common.validation.IOChecked.{IOChecked, _} import shapeless.Poly1 import wom.callable.Callable.InputDefinition.InputValueMapper -import wom.callable.Callable.{InputDefinition, OverridableInputDefinitionWithDefault, OptionalInputDefinition} +import wom.callable.Callable.{InputDefinition, OptionalInputDefinition, OverridableInputDefinitionWithDefault} import wom.graph.GraphNodePort.OutputPort import wom.values.{WomOptionalValue, WomValue} @@ -16,22 +16,28 @@ object InputPointerToWomValue extends Poly1 { // Function that can transform any of the coproduct types to an ErrorOr[WomValue] type ToWomValueFn = (Map[String, WomValue], IoFunctionSet, OutputPortLookup, InputDefinition) => IOChecked[WomValue] - def withInitializedValue(ioFunctionSet: IoFunctionSet, womValue: WomValue)(block: WomValue => IOChecked[WomValue]): IOChecked[WomValue] = { + def withInitializedValue(ioFunctionSet: IoFunctionSet, womValue: WomValue)( + block: WomValue => IOChecked[WomValue] + ): IOChecked[WomValue] = for { - initialized <- womValue.initialize(ioFunctionSet) + initialized <- womValue.initialize(ioFunctionSet) evaluated <- block(initialized) } yield evaluated - } implicit def fromWomValue: Case.Aux[WomValue, ToWomValueFn] = at[WomValue] { - womValue => (_: Map[String, WomValue], ioFunctions: IoFunctionSet, _: OutputPortLookup, inputDefinition: InputDefinition) => - withInitializedValue(ioFunctions, womValue) { - inputDefinition.valueMapper(ioFunctions) - } + womValue => + (_: Map[String, WomValue], ioFunctions: IoFunctionSet, _: OutputPortLookup, inputDefinition: InputDefinition) => + withInitializedValue(ioFunctions, womValue) { + inputDefinition.valueMapper(ioFunctions) + } } implicit def fromOutputPort: Case.Aux[OutputPort, ToWomValueFn] = at[OutputPort] { - port => (knownValues: Map[String, WomValue], ioFunctions: IoFunctionSet, outputPortLookup: OutputPortLookup, inputDefinition: InputDefinition) => + port => (knownValues: Map[String, WomValue], + ioFunctions: IoFunctionSet, + outputPortLookup: OutputPortLookup, + inputDefinition: InputDefinition + ) => (outputPortLookup(port), inputDefinition) match { case (Valid(womValue), _) if isDefined(womValue) => withInitializedValue(ioFunctions, womValue) { @@ -39,32 +45,40 @@ object InputPointerToWomValue extends Poly1 { } case (_, OverridableInputDefinitionWithDefault(_, _, defaultExpression, valueMapper, _)) => evaluateAndMap(defaultExpression, knownValues, valueMapper, ioFunctions) - case (_, OptionalInputDefinition(_, optionalType, valueMapper, _)) => valueMapper(ioFunctions)(optionalType.none) + case (_, OptionalInputDefinition(_, optionalType, valueMapper, _)) => + valueMapper(ioFunctions)(optionalType.none) case _ => s"Failed to lookup input value for required input ${port.internalName}".invalidIOChecked } } @tailrec - private def isDefined(womValue: WomValue): Boolean = { + private def isDefined(womValue: WomValue): Boolean = womValue match { case WomOptionalValue(_, Some(innerWomValue)) => isDefined(innerWomValue) case WomOptionalValue(_, None) => false case _ => true } - } implicit def fromWomExpression: Case.Aux[WomExpression, ToWomValueFn] = at[WomExpression] { - womExpression => (knownValues: Map[String, WomValue], ioFunctions: IoFunctionSet, _: OutputPortLookup, inputDefinition: InputDefinition) => + womExpression => (knownValues: Map[String, WomValue], + ioFunctions: IoFunctionSet, + _: OutputPortLookup, + inputDefinition: InputDefinition + ) => evaluateAndMap(womExpression, knownValues, inputDefinition.valueMapper, ioFunctions) } - def evaluate(womExpression: WomExpression, knownValues: Map[String, WomValue], ioFunctions: IoFunctionSet): ErrorOr[WomValue] = + def evaluate(womExpression: WomExpression, + knownValues: Map[String, WomValue], + ioFunctions: IoFunctionSet + ): ErrorOr[WomValue] = womExpression.evaluateValue(knownValues, ioFunctions) def evaluateAndMap(womExpression: WomExpression, knownValues: Map[String, WomValue], valueMapper: InputValueMapper, - ioFunctions: IoFunctionSet): IOChecked[WomValue] = for { + ioFunctions: IoFunctionSet + ): IOChecked[WomValue] = for { evaluated <- evaluate(womExpression, knownValues, ioFunctions).toIOChecked mapped <- valueMapper(ioFunctions)(evaluated) } yield mapped diff --git a/wom/src/main/scala/wom/expression/IoFunctionSetAdapter.scala b/wom/src/main/scala/wom/expression/IoFunctionSetAdapter.scala index 6695113c7de..475d6295045 100644 --- a/wom/src/main/scala/wom/expression/IoFunctionSetAdapter.scala +++ b/wom/src/main/scala/wom/expression/IoFunctionSetAdapter.scala @@ -2,7 +2,8 @@ package wom.expression class IoFunctionSetAdapter(delegate: IoFunctionSet) extends IoFunctionSet { override def pathFunctions = delegate.pathFunctions - override def readFile(path: String, maxBytes: Option[Int], failOnOverflow: Boolean) = delegate.readFile(path, maxBytes, failOnOverflow) + override def readFile(path: String, maxBytes: Option[Int], failOnOverflow: Boolean) = + delegate.readFile(path, maxBytes, failOnOverflow) override def writeFile(path: String, content: String) = delegate.writeFile(path, content) override def createTemporaryDirectory(name: Option[String]) = delegate.createTemporaryDirectory(name) override def copyFile(source: String, destination: String) = delegate.copyFile(source, destination) @@ -11,5 +12,5 @@ class IoFunctionSetAdapter(delegate: IoFunctionSet) extends IoFunctionSet { override def listDirectory(path: String)(visited: Vector[String]) = delegate.listDirectory(path)(visited) override def isDirectory(path: String) = delegate.isDirectory(path) override def size(path: String) = delegate.size(path) - override implicit def ec = delegate.ec + implicit override def ec = delegate.ec } diff --git a/wom/src/main/scala/wom/expression/NoIoFunctionSet.scala b/wom/src/main/scala/wom/expression/NoIoFunctionSet.scala index c10bb74b456..d99a3986b3b 100644 --- a/wom/src/main/scala/wom/expression/NoIoFunctionSet.scala +++ b/wom/src/main/scala/wom/expression/NoIoFunctionSet.scala @@ -11,32 +11,41 @@ object EmptyIoFunctionSet { } class EmptyIoFunctionSet extends IoFunctionSet { - override def readFile(path: String, maxBytes: Option[Int] = None, failOnOverflow: Boolean = false): Future[String] = Future.failed(new UnsupportedOperationException("readFile is not available here")) + override def readFile(path: String, maxBytes: Option[Int] = None, failOnOverflow: Boolean = false): Future[String] = + Future.failed(new UnsupportedOperationException("readFile is not available here")) - override def writeFile(path: String, content: String): Future[WomSingleFile] = { + override def writeFile(path: String, content: String): Future[WomSingleFile] = Future.failed(new UnsupportedOperationException("writeFile is not available here")) - } - override def copyFile(pathFrom: String, targetName: String): Future[WomSingleFile] = { + override def copyFile(pathFrom: String, targetName: String): Future[WomSingleFile] = throw new Exception("copyFile is not available here") - } - override def glob(pattern: String): Future[Seq[String]] = throw new UnsupportedOperationException("glob is not available here") + override def glob(pattern: String): Future[Seq[String]] = throw new UnsupportedOperationException( + "glob is not available here" + ) override def listAllFilesUnderDirectory(dirPath: String): Nothing = throw new UnsupportedOperationException("listAllFilesUnderDirectory is not available here") - override def size(path: String): Future[Long] = Future.failed(new UnsupportedOperationException("size is not available here")) + override def size(path: String): Future[Long] = + Future.failed(new UnsupportedOperationException("size is not available here")) - override implicit def ec: ExecutionContext = EmptyIoFunctionSet.singleThreadEc + implicit override def ec: ExecutionContext = EmptyIoFunctionSet.singleThreadEc override def pathFunctions = NoPathFunctionSet - override def listDirectory(path: String)(visited: Vector[String] = Vector.empty) = throw new UnsupportedOperationException("listDirectory is not available here") - override def isDirectory(path: String): Future[Boolean] = throw new UnsupportedOperationException("isDirectory is not available here") - override def createTemporaryDirectory(name: Option[String]) = throw new UnsupportedOperationException("createTemporaryDirectory is not available here") + override def listDirectory(path: String)(visited: Vector[String] = Vector.empty) = + throw new UnsupportedOperationException("listDirectory is not available here") + override def isDirectory(path: String): Future[Boolean] = throw new UnsupportedOperationException( + "isDirectory is not available here" + ) + override def createTemporaryDirectory(name: Option[String]) = throw new UnsupportedOperationException( + "createTemporaryDirectory is not available here" + ) } class EmptyPathFunctionSet extends PathFunctionSet { - override def sibling(of: String, path: String) = throw new UnsupportedOperationException("sibling is not available here") + override def sibling(of: String, path: String) = throw new UnsupportedOperationException( + "sibling is not available here" + ) override def isAbsolute(path: String) = false override def relativeToHostCallRoot(path: String) = path override def name(path: String) = throw new UnsupportedOperationException("name is not available here") diff --git a/wom/src/main/scala/wom/expression/WomExpression.scala b/wom/src/main/scala/wom/expression/WomExpression.scala index 50b66d57ebd..db950e654a4 100644 --- a/wom/src/main/scala/wom/expression/WomExpression.scala +++ b/wom/src/main/scala/wom/expression/WomExpression.scala @@ -30,7 +30,10 @@ trait WomExpression { def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] - def evaluateFiles(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] + def evaluateFiles(inputValues: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] /** Returns `true` if all file types within the specified `WomType` are optional. If not all the file types are * optional, return `false` since the current file evaluation structure doesn't allow for mapping individual @@ -39,10 +42,12 @@ trait WomExpression { def innerAreAllFileTypesInWomTypeOptional(womType: WomType): Boolean = womType match { case WomOptionalType(_: WomPrimitiveFileType) => true case _: WomPrimitiveFileType => false - case _: WomPrimitiveType => true // WomPairTypes and WomCompositeTypes may have non-File components here which is fine. + case _: WomPrimitiveType => + true // WomPairTypes and WomCompositeTypes may have non-File components here which is fine. case WomArrayType(inner) => innerAreAllFileTypesInWomTypeOptional(inner) case WomMapType(_, inner) => innerAreAllFileTypesInWomTypeOptional(inner) - case WomPairType(leftType, rightType) => innerAreAllFileTypesInWomTypeOptional(leftType) && innerAreAllFileTypesInWomTypeOptional(rightType) + case WomPairType(leftType, rightType) => + innerAreAllFileTypesInWomTypeOptional(leftType) && innerAreAllFileTypesInWomTypeOptional(rightType) case WomCompositeType(typeMap, _) => typeMap.values.forall(innerAreAllFileTypesInWomTypeOptional) case _ => false } @@ -60,9 +65,13 @@ trait WomExpression { */ final case class ValueAsAnExpression(value: WomValue) extends WomExpression { override def sourceString: String = value.valueString - override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = Valid(value) + override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = + Valid(value) override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = Valid(value.womType) - override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = Valid(Set.empty) + override def evaluateFiles(inputTypes: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = Valid(Set.empty) override val inputs: Set[String] = Set.empty } @@ -70,6 +79,7 @@ final case class ValueAsAnExpression(value: WomValue) extends WomExpression { * Functions only requiring path manipulation and NO I/O */ trait PathFunctionSet { + /** * Similar to java.nio.Path.resolveSibling with * of == a string representation of a java.nio.Path @@ -84,7 +94,8 @@ trait PathFunctionSet { /** * Similar to sibling only if "of" IS an absolute path and "other" IS NOT an absolute path, otherwise return other */ - def absoluteSibling(of: String, other: String): String = if (isAbsolute(of) && !isAbsolute(other)) sibling(of, other) else other + def absoluteSibling(of: String, other: String): String = + if (isAbsolute(of) && !isAbsolute(other)) sibling(of, other) else other /** * If path is relative, prefix it with the _host_ call root. @@ -108,6 +119,7 @@ trait PathFunctionSet { } object IoFunctionSet { + /** * Simple wrapper class providing information on whether a path is a File or a Directory * Avoids repeated calls to isDirectory. @@ -187,7 +199,7 @@ trait IoFunctionSet { * To map/flatMap over IO results */ implicit def ec: ExecutionContext - + implicit def cs = IO.contextShift(ec) /** diff --git a/wom/src/main/scala/wom/format/MemorySize.scala b/wom/src/main/scala/wom/format/MemorySize.scala index c9ac793f661..22934ae7cbf 100644 --- a/wom/src/main/scala/wom/format/MemorySize.scala +++ b/wom/src/main/scala/wom/format/MemorySize.scala @@ -11,11 +11,10 @@ import wdl4s.parser.MemoryUnit import scala.language.postfixOps import scala.util.{Failure, Success, Try} - object MemorySize { val memoryPattern = """(\d+(?:\.\d+)?)\s*(\w+)""".r - def parse(unparsed: String): Try[MemorySize] = { + def parse(unparsed: String): Try[MemorySize] = unparsed match { case memoryPattern(amountString, unitString) => val amount: ErrorOr[Double] = amountString.parseDouble leftMap { @@ -31,9 +30,11 @@ object MemorySize { case Valid(memorySize) => Success(memorySize) case Invalid(nel) => Failure(new UnsupportedOperationException(nel.toList.mkString("\n"))) } - case _ => Failure(new UnsupportedOperationException(s"$unparsed should be of the form 'X Unit' where X is a number, e.g. 8 GB")) + case _ => + Failure( + new UnsupportedOperationException(s"$unparsed should be of the form 'X Unit' where X is a number, e.g. 8 GB") + ) } - } } case class MemorySize(amount: Double, unit: MemoryUnit) { diff --git a/wom/src/main/scala/wom/graph/CallNode.scala b/wom/src/main/scala/wom/graph/CallNode.scala index 088b242af1c..2b67229a9fe 100644 --- a/wom/src/main/scala/wom/graph/CallNode.scala +++ b/wom/src/main/scala/wom/graph/CallNode.scala @@ -38,44 +38,57 @@ sealed abstract class CallNode extends GraphNode { val sourceLocation: Option[SourceFileLocation] } -final case class ExpressionCallNode private(override val identifier: WomIdentifier, - callable: ExpressionTaskDefinition, - override val inputPorts: Set[GraphNodePort.InputPort], - inputDefinitionMappings: InputDefinitionMappings, - outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, - override val sourceLocation : Option[SourceFileLocation]) extends CallNode with ExpressionNodeLike { +final case class ExpressionCallNode private ( + override val identifier: WomIdentifier, + callable: ExpressionTaskDefinition, + override val inputPorts: Set[GraphNodePort.InputPort], + inputDefinitionMappings: InputDefinitionMappings, + outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, + override val sourceLocation: Option[SourceFileLocation] +) extends CallNode + with ExpressionNodeLike { val callType: String = "expression task" - lazy val expressionBasedOutputPorts: List[ExpressionBasedOutputPort] = { - callable.outputs.map(o => ExpressionBasedOutputPort(outputIdentifierCompoundingFunction(identifier, o.localName.value), o.womType, this, o.expression)) - } + lazy val expressionBasedOutputPorts: List[ExpressionBasedOutputPort] = + callable.outputs.map(o => + ExpressionBasedOutputPort(outputIdentifierCompoundingFunction(identifier, o.localName.value), + o.womType, + this, + o.expression + ) + ) override lazy val outputPorts: Set[OutputPort] = expressionBasedOutputPorts.toSet[OutputPort] - override def evaluate(outputPortLookup: OutputPort => ErrorOr[WomValue], ioFunctionSet: IoFunctionSet) = { + override def evaluate(outputPortLookup: OutputPort => ErrorOr[WomValue], ioFunctionSet: IoFunctionSet) = for { // Evaluate the inputs to get a lookup to evaluate the actual expression womEvaluatedInputs <- CallNode.resolveAndEvaluateInputs(this, ioFunctionSet, outputPortLookup).toEither // Make a usable lookup - lookup = womEvaluatedInputs.map({ case (inputDefinition, value) => inputDefinition.name -> value }) + lookup = womEvaluatedInputs.map { case (inputDefinition, value) => inputDefinition.name -> value } // Evaluate the expression evaluated <- callable.evaluate(lookup, ioFunctionSet, expressionBasedOutputPorts) } yield evaluated - } override val nonInputBasedPrerequisites = Set.empty } -final case class CommandCallNode private(override val identifier: WomIdentifier, - callable: CommandTaskDefinition, - override val inputPorts: Set[GraphNodePort.InputPort], - inputDefinitionMappings: InputDefinitionMappings, - override val nonInputBasedPrerequisites: Set[GraphNode], - outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, - override val sourceLocation : Option[SourceFileLocation]) extends CallNode { +final case class CommandCallNode private (override val identifier: WomIdentifier, + callable: CommandTaskDefinition, + override val inputPorts: Set[GraphNodePort.InputPort], + inputDefinitionMappings: InputDefinitionMappings, + override val nonInputBasedPrerequisites: Set[GraphNode], + outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, + override val sourceLocation: Option[SourceFileLocation] +) extends CallNode { val callType: String = "task" - lazy val expressionBasedOutputPorts: List[ExpressionBasedOutputPort] = { - callable.outputs.map(o => ExpressionBasedOutputPort(outputIdentifierCompoundingFunction(identifier, o.localName.value), o.womType, this, o.expression)) - } + lazy val expressionBasedOutputPorts: List[ExpressionBasedOutputPort] = + callable.outputs.map(o => + ExpressionBasedOutputPort(outputIdentifierCompoundingFunction(identifier, o.localName.value), + o.womType, + this, + o.expression + ) + ) override lazy val outputPorts: Set[OutputPort] = expressionBasedOutputPorts.toSet[OutputPort] @@ -83,27 +96,30 @@ final case class CommandCallNode private(override val identifier: WomIdentifier, * Evaluate outputs using the custom evaluation function of the task definition. * An empty return value means the engine should fall back to its default evaluation method. */ - def customOutputEvaluation(inputs: Map[String, WomValue], ioFunctionSet: IoFunctionSet, executionContext: ExecutionContext): OutputFunctionResponse = { + def customOutputEvaluation(inputs: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + executionContext: ExecutionContext + ): OutputFunctionResponse = callable.customizedOutputEvaluation(outputPorts, inputs, ioFunctionSet, executionContext) - } } -final case class WorkflowCallNode private (override val identifier: WomIdentifier, - callable: WorkflowDefinition, - override val inputPorts: Set[GraphNodePort.InputPort], - inputDefinitionMappings: InputDefinitionMappings, - override val nonInputBasedPrerequisites: Set[GraphNode], - outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, - override val sourceLocation: Option[SourceFileLocation]) extends CallNode { +final case class WorkflowCallNode private ( + override val identifier: WomIdentifier, + callable: WorkflowDefinition, + override val inputPorts: Set[GraphNodePort.InputPort], + inputDefinitionMappings: InputDefinitionMappings, + override val nonInputBasedPrerequisites: Set[GraphNode], + outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, + override val sourceLocation: Option[SourceFileLocation] +) extends CallNode { val callType: String = "workflow" - val subworkflowCallOutputPorts: Set[SubworkflowCallOutputPort] = { - callable.innerGraph.nodes.collect { case gon: GraphOutputNode => SubworkflowCallOutputPort(outputIdentifierCompoundingFunction(identifier, gon.localName), gon, this) } - } + val subworkflowCallOutputPorts: Set[SubworkflowCallOutputPort] = + callable.innerGraph.nodes.collect { case gon: GraphOutputNode => + SubworkflowCallOutputPort(outputIdentifierCompoundingFunction(identifier, gon.localName), gon, this) + } override val outputPorts: Set[OutputPort] = subworkflowCallOutputPorts.toSet[OutputPort] } - - object TaskCall { def graphFromDefinition(taskDefinition: TaskDefinition): ErrorOr[Graph] = { val taskDefinitionLocalName = LocalName(taskDefinition.name) @@ -119,12 +135,14 @@ object TaskCall { val callNodeBuilder = new CallNodeBuilder() - val inputDefinitionFold = taskDefinition.inputs.foldMap({ inputDef => - { + val inputDefinitionFold = taskDefinition.inputs.foldMap { inputDef => val newNode: Option[ExternalGraphInputNode] = inputDef match { - case RequiredInputDefinition(name, womType, valueMapper, _) => Some(RequiredGraphInputNode(identifier(name), womType, name.value, valueMapper)) - case OverridableInputDefinitionWithDefault(name, womType, default, valueMapper, _) => Some(OptionalGraphInputNodeWithDefault(identifier(name), womType, default, name.value, valueMapper)) - case OptionalInputDefinition(name, womType, valueMapper, _) => Some(OptionalGraphInputNode(identifier(name), womType, name.value, valueMapper)) + case RequiredInputDefinition(name, womType, valueMapper, _) => + Some(RequiredGraphInputNode(identifier(name), womType, name.value, valueMapper)) + case OverridableInputDefinitionWithDefault(name, womType, default, valueMapper, _) => + Some(OptionalGraphInputNodeWithDefault(identifier(name), womType, default, name.value, valueMapper)) + case OptionalInputDefinition(name, womType, valueMapper, _) => + Some(OptionalGraphInputNode(identifier(name), womType, name.value, valueMapper)) case _: FixedInputDefinitionWithDefault => None } @@ -138,15 +156,26 @@ object TaskCall { case None => InputDefinitionFold() // No-op } - } - })(inputDefinitionFoldMonoid) + }(inputDefinitionFoldMonoid) val uniqueIdentifier = WomIdentifier(taskDefinition.name) - val callWithInputs = callNodeBuilder.build(uniqueIdentifier, taskDefinition, inputDefinitionFold, Set.empty, taskDefinition.sourceLocation) - - val outputNodes: ErrorOr[Seq[GraphOutputNode]] = callWithInputs.node.outputPorts.map(output => PortBasedGraphOutputNode( - identifier(LocalName(output.internalName)), output.womType, output - )).toList.validNel + val callWithInputs = callNodeBuilder.build(uniqueIdentifier, + taskDefinition, + inputDefinitionFold, + Set.empty, + taskDefinition.sourceLocation + ) + + val outputNodes: ErrorOr[Seq[GraphOutputNode]] = callWithInputs.node.outputPorts + .map(output => + PortBasedGraphOutputNode( + identifier(LocalName(output.internalName)), + output.womType, + output + ) + ) + .toList + .validNel val result = for { outputs <- outputNodes callSet = Set[GraphNode](callWithInputs.node) @@ -162,30 +191,38 @@ object TaskCall { object CallNode { def resolveAndEvaluateInputs(callNode: CallNode, expressionLanguageFunctions: IoFunctionSet, - outputStoreLookup: OutputPort => ErrorOr[WomValue]): ErrorOr[WomEvaluatedCallInputs] = { - callNode.inputDefinitionMappings.foldLeft(Map.empty[InputDefinition, ErrorOr[WomValue]]) { - case (accumulatedInputsSoFar, (inputDefinition, pointer)) => - // We could have a commons method for this kind of "filtering valid values" - val validInputsAccumulated: Map[String, WomValue] = accumulatedInputsSoFar.collect({ - case (input, Valid(errorOrWdlValue)) => input.name -> errorOrWdlValue - }) - - val coercedValue = pointer.fold(InputPointerToWomValue).apply( - validInputsAccumulated, expressionLanguageFunctions, outputStoreLookup, inputDefinition - ) flatMap(inputDefinition.womType.coerceRawValue(_).toIOChecked) - - val contextualizedValue = coercedValue.contextualizeErrors(s"evaluate input '${inputDefinition.localName.value}'") - - accumulatedInputsSoFar + (inputDefinition -> contextualizedValue.toErrorOr) - }.sequence - } + outputStoreLookup: OutputPort => ErrorOr[WomValue] + ): ErrorOr[WomEvaluatedCallInputs] = + callNode.inputDefinitionMappings + .foldLeft(Map.empty[InputDefinition, ErrorOr[WomValue]]) { + case (accumulatedInputsSoFar, (inputDefinition, pointer)) => + // We could have a commons method for this kind of "filtering valid values" + val validInputsAccumulated: Map[String, WomValue] = accumulatedInputsSoFar.collect { + case (input, Valid(errorOrWdlValue)) => input.name -> errorOrWdlValue + } + + val coercedValue = pointer + .fold(InputPointerToWomValue) + .apply( + validInputsAccumulated, + expressionLanguageFunctions, + outputStoreLookup, + inputDefinition + ) flatMap (inputDefinition.womType.coerceRawValue(_).toIOChecked) + + val contextualizedValue = + coercedValue.contextualizeErrors(s"evaluate input '${inputDefinition.localName.value}'") + + accumulatedInputsSoFar + (inputDefinition -> contextualizedValue.toErrorOr) + } + .sequence /* A monoid can't be derived automatically for this class because it contains a Map[InputDefinition, InputDefinitionPointer], * and there's no monoid defined over InputDefinitionPointer */ implicit val inputDefinitionFoldMonoid = new Monoid[InputDefinitionFold] { override def empty: InputDefinitionFold = InputDefinitionFold() - override def combine(x: InputDefinitionFold, y: InputDefinitionFold): InputDefinitionFold = { + override def combine(x: InputDefinitionFold, y: InputDefinitionFold): InputDefinitionFold = InputDefinitionFold( mappings = x.mappings ++ y.mappings, callInputPorts = x.callInputPorts ++ y.callInputPorts, @@ -193,14 +230,14 @@ object CallNode { newExpressionNodes = x.newExpressionNodes ++ y.newExpressionNodes, usedOuterGraphInputNodes = x.usedOuterGraphInputNodes ++ y.usedOuterGraphInputNodes ) - } } final case class InputDefinitionFold(mappings: InputDefinitionMappings = List.empty, callInputPorts: Set[InputPort] = Set.empty, newGraphInputNodes: Set[ExternalGraphInputNode] = Set.empty, newExpressionNodes: Set[ExpressionNode] = Set.empty, - usedOuterGraphInputNodes: Set[OuterGraphInputNode] = Set.empty) + usedOuterGraphInputNodes: Set[OuterGraphInputNode] = Set.empty + ) type InputDefinitionPointer = OutputPort :+: WomExpression :+: WomValue :+: CNil // This is a List rather than Map because the order of 'InputDefinition's is important: @@ -209,7 +246,8 @@ object CallNode { final case class CallNodeAndNewNodes(node: CallNode, newInputs: Set[ExternalGraphInputNode], newExpressions: Set[ExpressionNode], - override val usedOuterGraphInputNodes: Set[OuterGraphInputNode]) extends GeneratedNodeAndNewNodes + override val usedOuterGraphInputNodes: Set[OuterGraphInputNode] + ) extends GeneratedNodeAndNewNodes /** * Don't use this directly; go via callWithInputs to make sure everything's in order when constructing a CallNode. @@ -220,10 +258,34 @@ object CallNode { inputDefinitionMappings: InputDefinitionMappings, mustFollow: Set[GraphNode], outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier, - sourceLocation : Option[SourceFileLocation]): CallNode = callable match { - case t: CommandTaskDefinition => CommandCallNode(nodeIdentifier, t, inputPorts, inputDefinitionMappings, mustFollow, outputIdentifierCompoundingFunction, sourceLocation) - case w: WorkflowDefinition => WorkflowCallNode(nodeIdentifier, w, inputPorts, inputDefinitionMappings, mustFollow, outputIdentifierCompoundingFunction, sourceLocation) - case w: ExpressionTaskDefinition => ExpressionCallNode(nodeIdentifier, w, inputPorts, inputDefinitionMappings, outputIdentifierCompoundingFunction, sourceLocation) + sourceLocation: Option[SourceFileLocation] + ): CallNode = callable match { + case t: CommandTaskDefinition => + CommandCallNode(nodeIdentifier, + t, + inputPorts, + inputDefinitionMappings, + mustFollow, + outputIdentifierCompoundingFunction, + sourceLocation + ) + case w: WorkflowDefinition => + WorkflowCallNode(nodeIdentifier, + w, + inputPorts, + inputDefinitionMappings, + mustFollow, + outputIdentifierCompoundingFunction, + sourceLocation + ) + case w: ExpressionTaskDefinition => + ExpressionCallNode(nodeIdentifier, + w, + inputPorts, + inputDefinitionMappings, + outputIdentifierCompoundingFunction, + sourceLocation + ) } /** @@ -237,24 +299,34 @@ object CallNode { * Makes an input port for this call. * Ensures that the port will contain the reference to the node when it gets created. */ - def makeInputPort(inputDefinition: InputDefinition, outputPort: OutputPort) = { + def makeInputPort(inputDefinition: InputDefinition, outputPort: OutputPort) = ConnectedInputPort(inputDefinition.name, inputDefinition.womType, outputPort, graphNodeSetter.get) - } def build(nodeIdentifier: WomIdentifier, callable: Callable, inputDefinitionFold: InputDefinitionFold, mustFollow: Set[GraphNode], sourceLocation: Option[SourceFileLocation], - outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier = defaultOutputIdentifierCompounder - ): CallNodeAndNewNodes = { - val callNode = CallNode(nodeIdentifier, callable, inputDefinitionFold.callInputPorts, inputDefinitionFold.mappings, mustFollow, outputIdentifierCompoundingFunction, sourceLocation) + outputIdentifierCompoundingFunction: (WomIdentifier, String) => WomIdentifier = + defaultOutputIdentifierCompounder + ): CallNodeAndNewNodes = { + val callNode = CallNode(nodeIdentifier, + callable, + inputDefinitionFold.callInputPorts, + inputDefinitionFold.mappings, + mustFollow, + outputIdentifierCompoundingFunction, + sourceLocation + ) graphNodeSetter._graphNode = callNode - CallNodeAndNewNodes(callNode, inputDefinitionFold.newGraphInputNodes, inputDefinitionFold.newExpressionNodes, inputDefinitionFold.usedOuterGraphInputNodes) + CallNodeAndNewNodes(callNode, + inputDefinitionFold.newGraphInputNodes, + inputDefinitionFold.newExpressionNodes, + inputDefinitionFold.usedOuterGraphInputNodes + ) } - def defaultOutputIdentifierCompounder(callIdentifier: WomIdentifier, outputName: String): WomIdentifier = { + def defaultOutputIdentifierCompounder(callIdentifier: WomIdentifier, outputName: String): WomIdentifier = callIdentifier.combine(outputName) - } } } diff --git a/wom/src/main/scala/wom/graph/ConditionalNode.scala b/wom/src/main/scala/wom/graph/ConditionalNode.scala index 7faa2c38189..cabf64ff45f 100644 --- a/wom/src/main/scala/wom/graph/ConditionalNode.scala +++ b/wom/src/main/scala/wom/graph/ConditionalNode.scala @@ -13,23 +13,29 @@ import common.collections.EnhancedCollections._ * @param conditionExpression The (boolean) expression on which the conditional is predicated. * @param conditionalOutputPorts Output ports for the conditional node which link back to GraphOutputNodes of the inner graph. */ -final case class ConditionalNode private(override val innerGraph: Graph, - conditionExpression: ExpressionNode, - conditionalOutputPorts: Set[ConditionalOutputPort]) extends GraphNode with GraphNodeWithInnerGraph { +final case class ConditionalNode private (override val innerGraph: Graph, + conditionExpression: ExpressionNode, + conditionalOutputPorts: Set[ConditionalOutputPort] +) extends GraphNode + with GraphNodeWithInnerGraph { override val identifier: WomIdentifier = WomIdentifier("ConditionalNode") - override val inputPorts: Set[InputPort] = Set(ConnectedInputPort("condition", WomBooleanType, conditionExpression.singleOutputPort, _ => this)) + override val inputPorts: Set[InputPort] = Set( + ConnectedInputPort("condition", WomBooleanType, conditionExpression.singleOutputPort, _ => this) + ) override val outputPorts: Set[GraphNodePort.OutputPort] = conditionalOutputPorts.toSet[OutputPort] } -object ConditionalNode { +object ConditionalNode { final case class ConditionalNodeWithNewNodes(node: ConditionalNode) extends GeneratedNodeAndNewNodes { override val newInputs = node.innerGraph.externalInputNodes override val usedOuterGraphInputNodes = (node.conditionExpression.upstream.filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) ++ - (node.innerGraph.outerGraphInputNodes.map(_.linkToOuterGraphNode).filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) + (node.innerGraph.outerGraphInputNodes + .map(_.linkToOuterGraphNode) + .filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) override val newExpressions = Set(node.conditionExpression) } diff --git a/wom/src/main/scala/wom/graph/Graph.scala b/wom/src/main/scala/wom/graph/Graph.scala index 8ebf4e9f20e..b5c08ac5e4e 100644 --- a/wom/src/main/scala/wom/graph/Graph.scala +++ b/wom/src/main/scala/wom/graph/Graph.scala @@ -53,19 +53,26 @@ object Graph { def upstreamNodeInGraph(port: InputPort): ErrorOr[Unit] = { val upstreamOutputPort = port.upstream - boolToErrorOr(nodes.exists(_ eq upstreamOutputPort.graphNode), s"The input link ${port.name} on ${port.graphNode.localName} is linked to a node outside the graph set (${upstreamOutputPort.name})") + boolToErrorOr( + nodes.exists(_ eq upstreamOutputPort.graphNode), + s"The input link ${port.name} on ${port.graphNode.localName} is linked to a node outside the graph set (${upstreamOutputPort.name})" + ) } - def portProperlyEmbedded(port: GraphNodePort, portFinder: GraphNode => Set[_ <: GraphNodePort]): ErrorOr[Unit] = { - boolToErrorOr(portFinder(port.graphNode).exists(_ eq port), s"The port ${port.name} thinks it belongs to a Node (${port.graphNode}), but that Node doesn't think it owns it.") - } + def portProperlyEmbedded(port: GraphNodePort, portFinder: GraphNode => Set[_ <: GraphNodePort]): ErrorOr[Unit] = + boolToErrorOr( + portFinder(port.graphNode).exists(_ eq port), + s"The port ${port.name} thinks it belongs to a Node (${port.graphNode}), but that Node doesn't think it owns it." + ) - def outerGraphInputNodePointsHere(ogin: OuterGraphInputNode): ErrorOr[Unit] = { - boolToErrorOr(nodes.contains(ogin.linkToOuterGraph.graphNode), s"The OuterGraphInputNode ${ogin.identifier} does not link into the outer graph") - } + def outerGraphInputNodePointsHere(ogin: OuterGraphInputNode): ErrorOr[Unit] = + boolToErrorOr(nodes.contains(ogin.linkToOuterGraph.graphNode), + s"The OuterGraphInputNode ${ogin.identifier} does not link into the outer graph" + ) def validateInnerNodes(node: GraphNode): ErrorOr[Unit] = node match { - case g: GraphNodeWithInnerGraph => g.innerGraph.nodes.filterByType[OuterGraphInputNode].toList.traverse(outerGraphInputNodePointsHere).void + case g: GraphNodeWithInnerGraph => + g.innerGraph.nodes.filterByType[OuterGraphInputNode].toList.traverse(outerGraphInputNodePointsHere).void case _ => ().validNel } @@ -77,26 +84,29 @@ object Graph { (upstreamNodeValidation, inputPortEmbeddedValidation, upstreamPortEmbeddedValidation).tupled.void } - def validateNode(node: GraphNode): ErrorOr[Unit] = { + def validateNode(node: GraphNode): ErrorOr[Unit] = (node.inputPorts.toList.traverse(goodLink), validateInnerNodes(node)).tupled.void - } // from https://stackoverflow.com/a/24729587/1498572 def fqnUniqueness: ErrorOr[Unit] = nodes - .collect({ + .collect { case callNode: CallNode => callNode case gin: GraphInputNode => gin case gon: GraphOutputNode => gon - }) + } .toList // Important since nodes is a Set, we don't want duplicates to disappear automatically when mapping to FQN .map(_.identifier.fullyQualifiedName) .groupBy(identity) - .collect({ + .collect { case (fqn, list) if list.lengthCompare(1) > 0 => fqn - }).toList match { + } + .toList match { case Nil => ().validNel case head :: tail => - NonEmptyList.of(head, tail: _*).map(fqn => s"Two or more nodes have the same FullyQualifiedName: ${fqn.value}").invalid + NonEmptyList + .of(head, tail: _*) + .map(fqn => s"Two or more nodes have the same FullyQualifiedName: ${fqn.value}") + .invalid } (fqnUniqueness, nodes.toList.traverse(validateNode)) mapN { case (_, _) => diff --git a/wom/src/main/scala/wom/graph/GraphInputNode.scala b/wom/src/main/scala/wom/graph/GraphInputNode.scala index 211f7ae2202..d6faaa31bf8 100644 --- a/wom/src/main/scala/wom/graph/GraphInputNode.scala +++ b/wom/src/main/scala/wom/graph/GraphInputNode.scala @@ -16,6 +16,7 @@ sealed trait GraphInputNode extends GraphNodeWithSingleOutputPort { } sealed trait ExternalGraphInputNode extends GraphInputNode { + /** * The fully qualified name should be the same as the one we expect the key in the input file to have. * e.g in WDL: @@ -46,37 +47,49 @@ sealed trait ExternalGraphInputNode extends GraphInputNode { * Key that should be looked for in the input set to satisfy this EGIN */ def nameInInputSet: String - + def valueMapper: InputValueMapper } final case class RequiredGraphInputNode(override val identifier: WomIdentifier, womType: WomType, nameInInputSet: String, - valueMapper: InputValueMapper = Callable.InputDefinition.IdentityValueMapper) extends ExternalGraphInputNode + valueMapper: InputValueMapper = Callable.InputDefinition.IdentityValueMapper +) extends ExternalGraphInputNode final case class OptionalGraphInputNode(override val identifier: WomIdentifier, womType: WomOptionalType, nameInInputSet: String, - valueMapper: InputValueMapper = Callable.InputDefinition.IdentityValueMapper) extends ExternalGraphInputNode + valueMapper: InputValueMapper = Callable.InputDefinition.IdentityValueMapper +) extends ExternalGraphInputNode // If we want to allow defaults to be "complex" expressions with dependencies we may need to make it an InstantiatedExpression here instead final case class OptionalGraphInputNodeWithDefault(override val identifier: WomIdentifier, womType: WomType, default: WomExpression, nameInInputSet: String, - valueMapper: InputValueMapper = Callable.InputDefinition.IdentityValueMapper) extends ExternalGraphInputNode + valueMapper: InputValueMapper = + Callable.InputDefinition.IdentityValueMapper +) extends ExternalGraphInputNode object OuterGraphInputNode { - def apply(forIdentifier: WomIdentifier, linkToOuterGraph: GraphNodePort.OutputPort, preserveScatterIndex: Boolean): OuterGraphInputNode = { - new OuterGraphInputNode(forIdentifier.copy(fullyQualifiedName = forIdentifier.fullyQualifiedName.prefixWith("^")), linkToOuterGraph, preserveScatterIndex) - } + def apply(forIdentifier: WomIdentifier, + linkToOuterGraph: GraphNodePort.OutputPort, + preserveScatterIndex: Boolean + ): OuterGraphInputNode = + new OuterGraphInputNode(forIdentifier.copy(fullyQualifiedName = forIdentifier.fullyQualifiedName.prefixWith("^")), + linkToOuterGraph, + preserveScatterIndex + ) } /** * Used to represent an input to any GraphNode's inner graph which is a link to a value somewhere in the outer graph. */ -class OuterGraphInputNode protected(override val identifier: WomIdentifier, val linkToOuterGraph: GraphNodePort.OutputPort, val preserveScatterIndex: Boolean) extends GraphInputNode { +class OuterGraphInputNode protected (override val identifier: WomIdentifier, + val linkToOuterGraph: GraphNodePort.OutputPort, + val preserveScatterIndex: Boolean +) extends GraphInputNode { override def womType: WomType = linkToOuterGraph.womType override lazy val singleOutputPort: GraphNodeOutputPort = GraphNodeOutputPort(identifier, womType, this) lazy val linkToOuterGraphNode = linkToOuterGraph.graphNode @@ -92,58 +105,59 @@ class OuterGraphInputNode protected(override val identifier: WomIdentifier, val */ final case class ScatterVariableNode(override val identifier: WomIdentifier, scatterExpressionNode: ExpressionNode, - override val womType: WomType) extends OuterGraphInputNode(identifier, scatterExpressionNode.singleOutputPort, preserveScatterIndex = true) { + override val womType: WomType +) extends OuterGraphInputNode(identifier, scatterExpressionNode.singleOutputPort, preserveScatterIndex = true) { /* - * This is the key element of the indexForShard function. - * Here is an example: - * Let's say we're scattering over 3 arrays of size 2, 3, and 2 respectively, using a cross product. - * We will then have 2 * 3 * 2 = 12 shards - * - * The possible combinations of input indices are - * - * Shard #0: (0, 0, 0) - * Shard #1: (0, 0, 1) - * Shard #2: (0, 1, 0) - * Shard #3: (0, 1, 1) - * Shard #4: (0, 2, 0) - * Shard #5: (0, 2, 1) - * Shard #6: (1, 0, 0) - * Shard #7: (1, 0, 1) - * Shard #8: (1, 1, 0) - * Shard #9: (1, 1, 1) - * Shard #10: (1, 2, 0) - * Shard #11: (1, 2, 1) - * - * We will also have 3 SVNs, one for each array. - * Each SVN needs to output one column of the above "matrix", based on the shard number - * - * For SVN 1: For SVN 2: For SVN 3: - * 0 -> 0 0 -> 0 0 -> 0 - * 1 -> 0 1 -> 0 1 -> 1 - * 2 -> 0 2 -> 1 2 -> 0 - * 3 -> 0 3 -> 1 3 -> 1 - * 4 -> 0 4 -> 2 4 -> 0 - * 5 -> 0 5 -> 2 5 -> 1 - * 6 -> 1 6 -> 0 6 -> 0 - * 7 -> 1 7 -> 0 7 -> 1 - * 8 -> 1 8 -> 1 8 -> 0 - * 9 -> 1 9 -> 1 9 -> 1 - * 10 -> 1 10 -> 2 10 -> 0 - * 11 -> 1 11 -> 2 11 -> 1 - * - * shard / 6 (shard / 2) % 3 shard % 2 - * - * This can be generalized to (shard / relativeIndexLength) % arraySize - * where - * - shard is the shard index as an Int - * - relativeIndexLength is the number of possible combinations for the arrays on the "right" of this variable - * - arraySize is the size of the array for this variable - * - * Having a var here is not great as it makes the node mutable, the alternative would be to store this information outside of - * the node and make it an argument to indexForShard. + * This is the key element of the indexForShard function. + * Here is an example: + * Let's say we're scattering over 3 arrays of size 2, 3, and 2 respectively, using a cross product. + * We will then have 2 * 3 * 2 = 12 shards + * + * The possible combinations of input indices are + * + * Shard #0: (0, 0, 0) + * Shard #1: (0, 0, 1) + * Shard #2: (0, 1, 0) + * Shard #3: (0, 1, 1) + * Shard #4: (0, 2, 0) + * Shard #5: (0, 2, 1) + * Shard #6: (1, 0, 0) + * Shard #7: (1, 0, 1) + * Shard #8: (1, 1, 0) + * Shard #9: (1, 1, 1) + * Shard #10: (1, 2, 0) + * Shard #11: (1, 2, 1) + * + * We will also have 3 SVNs, one for each array. + * Each SVN needs to output one column of the above "matrix", based on the shard number + * + * For SVN 1: For SVN 2: For SVN 3: + * 0 -> 0 0 -> 0 0 -> 0 + * 1 -> 0 1 -> 0 1 -> 1 + * 2 -> 0 2 -> 1 2 -> 0 + * 3 -> 0 3 -> 1 3 -> 1 + * 4 -> 0 4 -> 2 4 -> 0 + * 5 -> 0 5 -> 2 5 -> 1 + * 6 -> 1 6 -> 0 6 -> 0 + * 7 -> 1 7 -> 0 7 -> 1 + * 8 -> 1 8 -> 1 8 -> 0 + * 9 -> 1 9 -> 1 9 -> 1 + * 10 -> 1 10 -> 2 10 -> 0 + * 11 -> 1 11 -> 2 11 -> 1 + * + * shard / 6 (shard / 2) % 3 shard % 2 + * + * This can be generalized to (shard / relativeIndexLength) % arraySize + * where + * - shard is the shard index as an Int + * - relativeIndexLength is the number of possible combinations for the arrays on the "right" of this variable + * - arraySize is the size of the array for this variable + * + * Having a var here is not great as it makes the node mutable, the alternative would be to store this information outside of + * the node and make it an argument to indexForShard. */ private var _relativeIndexLength: Int = 1 - + def relativeIndexLength = _relativeIndexLength def withRelativeIndexLength(relativeIndexLength: Int) = this._relativeIndexLength = relativeIndexLength @@ -152,7 +166,6 @@ final case class ScatterVariableNode(override val identifier: WomIdentifier, * Given a shard number, and the total size of the array, returns at which index in the array the value is located. */ // Maybe this whole function should not be hardcoded here but rather defined by the language.. So far it works for all the use cases we have - def indexForShard(shardIndex: Int, arraySize: Int) = { + def indexForShard(shardIndex: Int, arraySize: Int) = (shardIndex / _relativeIndexLength) % arraySize - } } diff --git a/wom/src/main/scala/wom/graph/GraphNode.scala b/wom/src/main/scala/wom/graph/GraphNode.scala index c6be2041097..bd058143bb5 100644 --- a/wom/src/main/scala/wom/graph/GraphNode.scala +++ b/wom/src/main/scala/wom/graph/GraphNode.scala @@ -38,12 +38,11 @@ trait GraphNode { val completionPort = NodeCompletionPort(_ => this) - def outputByName(name: String): ErrorOr[GraphNodePort.OutputPort] = { + def outputByName(name: String): ErrorOr[GraphNodePort.OutputPort] = outputPorts.find(_.name == name) match { case Some(port) => port.validNel case None => s"No such output: $name".invalidNel } - } /** * The set of all graph nodes which are (transitively) upstream from this one. @@ -76,7 +75,9 @@ object GraphNode { def inputPortNamesMatch(required: Set[InputPort], provided: Set[InputPort]): ErrorOr[Unit] = { def requiredInputFound(r: InputPort): ErrorOr[Unit] = provided.find(_.name == r.name) match { - case Some(p) => if (r.womType.isCoerceableFrom(p.womType)) ().validNel else s"Cannot link a ${p.womType.stableName} to the input ${r.name}: ${r.womType}".invalidNel + case Some(p) => + if (r.womType.isCoerceableFrom(p.womType)) ().validNel + else s"Cannot link a ${p.womType.stableName} to the input ${r.name}: ${r.womType}".invalidNel case None => s"The required input ${r.name}: ${r.womType.stableName} was not provided.".invalidNel } @@ -93,7 +94,8 @@ object GraphNode { def get: Unit => A = _ => getGraphNode } - private[wom] implicit class EnhancedGraphNodeSet(val nodes: Set[GraphNode]) extends AnyVal { + implicit private[wom] class EnhancedGraphNodeSet(val nodes: Set[GraphNode]) extends AnyVal { + /** * Interpret this graph's "GraphInputNode"s as "Callable.InputDefinition"s */ @@ -103,12 +105,16 @@ object GraphNode { case optional: OptionalGraphInputNode => OptionalInputDefinition(optional.identifier.localName, optional.womType) case withDefault: OptionalGraphInputNodeWithDefault => - OverridableInputDefinitionWithDefault(withDefault.identifier.localName, withDefault.womType, withDefault.default) + OverridableInputDefinitionWithDefault(withDefault.identifier.localName, + withDefault.womType, + withDefault.default + ) } def outputDefinitions: Set[_ <: Callable.OutputDefinition] = nodes collect { // TODO: FIXME: Do something for PortBasedGraphOutputNodes - case gin: ExpressionBasedGraphOutputNode => OutputDefinition(gin.identifier.localName, gin.womType, gin.womExpression) + case gin: ExpressionBasedGraphOutputNode => + OutputDefinition(gin.identifier.localName, gin.womType, gin.womExpression) } } diff --git a/wom/src/main/scala/wom/graph/GraphNodeInputExpression.scala b/wom/src/main/scala/wom/graph/GraphNodeInputExpression.scala index 4efbac93783..af5f3d15159 100644 --- a/wom/src/main/scala/wom/graph/GraphNodeInputExpression.scala +++ b/wom/src/main/scala/wom/graph/GraphNodeInputExpression.scala @@ -11,7 +11,12 @@ import wom.types.WomType * - This one remembers which input the expression is being assigned to. * - InstantiatedExpression has created InputPorts for the expression inputs. This one only has references to OutputPorts. */ -case class GraphNodeInputExpression(inputName: String, expression: WomExpression, inputMapping: Map[String, OutputPort]) { +case class GraphNodeInputExpression(inputName: String, + expression: WomExpression, + inputMapping: Map[String, OutputPort] +) { - private[graph] lazy val evaluateType: ErrorOr[WomType] = expression.evaluateType(inputMapping.map { case (name, port) => (name, port.womType) }) + private[graph] lazy val evaluateType: ErrorOr[WomType] = expression.evaluateType(inputMapping.map { + case (name, port) => (name, port.womType) + }) } diff --git a/wom/src/main/scala/wom/graph/GraphNodePort.scala b/wom/src/main/scala/wom/graph/GraphNodePort.scala index d2b2bb3a48f..83cd85d301a 100644 --- a/wom/src/main/scala/wom/graph/GraphNodePort.scala +++ b/wom/src/main/scala/wom/graph/GraphNodePort.scala @@ -46,25 +46,38 @@ object GraphNodePort { override lazy val graphNode = g.apply(()) } - final case class ConnectedInputPort(name: String, womType: WomType, upstream: OutputPort, g: Unit => GraphNode) extends InputPort with DelayedGraphNodePort + final case class ConnectedInputPort(name: String, womType: WomType, upstream: OutputPort, g: Unit => GraphNode) + extends InputPort + with DelayedGraphNodePort /** * For any graph node that uses a declarations to produce outputs (e.g. call, declaration): */ object GraphNodeOutputPort { - def apply(name: String, womType: WomType, graphNode: GraphNode): GraphNodeOutputPort = { - GraphNodeOutputPort(WomIdentifier(LocalName(name), graphNode.identifier.fullyQualifiedName.combine(name)), womType, graphNode) - } + def apply(name: String, womType: WomType, graphNode: GraphNode): GraphNodeOutputPort = + GraphNodeOutputPort(WomIdentifier(LocalName(name), graphNode.identifier.fullyQualifiedName.combine(name)), + womType, + graphNode + ) } - case class GraphNodeOutputPort(override val identifier: WomIdentifier, womType: WomType, graphNode: GraphNode) extends OutputPort + case class GraphNodeOutputPort(override val identifier: WomIdentifier, womType: WomType, graphNode: GraphNode) + extends OutputPort object ExpressionBasedOutputPort - case class ExpressionBasedOutputPort(override val identifier: WomIdentifier, womType: WomType, graphNode: GraphNode, expression: WomExpression) extends OutputPort + case class ExpressionBasedOutputPort(override val identifier: WomIdentifier, + womType: WomType, + graphNode: GraphNode, + expression: WomExpression + ) extends OutputPort /** * Represents the gathered output from a call/declaration in a ScatterNode. */ - final case class ScatterGathererPort(womType: WomArrayType, outputToGather: PortBasedGraphOutputNode, g: Unit => GraphNode) extends OutputPort with DelayedGraphNodePort { + final case class ScatterGathererPort(womType: WomArrayType, + outputToGather: PortBasedGraphOutputNode, + g: Unit => GraphNode + ) extends OutputPort + with DelayedGraphNodePort { // Since this port just wraps a PortBasedGraphOutputNode which itself wraps an output port, we can re-use the same identifier override def identifier: WomIdentifier = outputToGather.identifier } @@ -72,7 +85,9 @@ object GraphNodePort { /** * Represents the conditional output from a call or declaration in a ConditionalNode */ - final case class ConditionalOutputPort(outputToExpose: PortBasedGraphOutputNode, g: Unit => ConditionalNode) extends OutputPort with DelayedGraphNodePort { + final case class ConditionalOutputPort(outputToExpose: PortBasedGraphOutputNode, g: Unit => ConditionalNode) + extends OutputPort + with DelayedGraphNodePort { // Since this port just wraps a PortBasedGraphOutputNode which itself wraps an output port, we can re-use the same identifier override def identifier: WomIdentifier = outputToExpose.identifier override val womType: WomOptionalType = WomOptionalType(outputToExpose.womType).flatOptionalType @@ -90,7 +105,10 @@ object GraphNodePort { /** * Represents an output port from a workflow call, based on the output that it exposes. */ - final case class SubworkflowCallOutputPort(identifier: WomIdentifier, outputToExpose: GraphOutputNode, workflowCallNode: WorkflowCallNode) extends OutputPort { + final case class SubworkflowCallOutputPort(identifier: WomIdentifier, + outputToExpose: GraphOutputNode, + workflowCallNode: WorkflowCallNode + ) extends OutputPort { override val womType: WomType = outputToExpose.womType override val graphNode: GraphNode = workflowCallNode } diff --git a/wom/src/main/scala/wom/graph/GraphNodeWithSingleOutputPort.scala b/wom/src/main/scala/wom/graph/GraphNodeWithSingleOutputPort.scala index 8f6858e0515..87a985efba8 100644 --- a/wom/src/main/scala/wom/graph/GraphNodeWithSingleOutputPort.scala +++ b/wom/src/main/scala/wom/graph/GraphNodeWithSingleOutputPort.scala @@ -6,6 +6,7 @@ import wom.graph.GraphNodePort.OutputPort trait GraphNodeWithSingleOutputPort extends GraphNode { def singleOutputPort: OutputPort + /** * Can be used to use this node as an InputDefinitionPointer */ diff --git a/wom/src/main/scala/wom/graph/GraphOutputNode.scala b/wom/src/main/scala/wom/graph/GraphOutputNode.scala index d6c921cd208..dbcf4037bd1 100644 --- a/wom/src/main/scala/wom/graph/GraphOutputNode.scala +++ b/wom/src/main/scala/wom/graph/GraphOutputNode.scala @@ -15,7 +15,8 @@ sealed trait GraphOutputNode extends GraphNode { /** * Exposes an existing output port as a graph output. */ -final case class PortBasedGraphOutputNode(override val identifier: WomIdentifier, womType: WomType, source: OutputPort) extends GraphOutputNode { +final case class PortBasedGraphOutputNode(override val identifier: WomIdentifier, womType: WomType, source: OutputPort) + extends GraphOutputNode { val singleInputPort: InputPort = ConnectedInputPort(localName, womType, source, _ => this) lazy val singleUpstreamNode: GraphNode = singleInputPort.upstream.graphNode lazy val singleUpstreamPort: OutputPort = singleInputPort.upstream @@ -28,14 +29,15 @@ object ExpressionBasedGraphOutputNode { def fromInputMapping(identifier: WomIdentifier, expression: WomExpression, explicitWomType: WomType, - inputMapping: Map[String, OutputPort]): ErrorOr[ExpressionBasedGraphOutputNode] = { + inputMapping: Map[String, OutputPort] + ): ErrorOr[ExpressionBasedGraphOutputNode] = { // This constructor ignores the evaluated type and uses the explicit type instead def constructor(identifier: WomIdentifier, expression: WomExpression, evaluatedType: WomType, - inputPorts: Map[String, InputPort]) = { + inputPorts: Map[String, InputPort] + ) = new ExpressionNode(identifier, expression, explicitWomType, inputPorts) with ExpressionBasedGraphOutputNode - } buildFromConstructor(constructor)(identifier, expression, inputMapping) } } diff --git a/wom/src/main/scala/wom/graph/ScatterNode.scala b/wom/src/main/scala/wom/graph/ScatterNode.scala index 0240bc11d28..1c534a1a1df 100644 --- a/wom/src/main/scala/wom/graph/ScatterNode.scala +++ b/wom/src/main/scala/wom/graph/ScatterNode.scala @@ -16,11 +16,13 @@ import wom.values.{WomArray, WomValue} * @param scatterVariableNodes Inner graph nodes for each scatter collection expression being scattered over. WDL uses exactly one, CWL >= 1. * @param outputMapping Output ports for the scatter node, which also link back to GraphOutputNodes of the inner graph. */ -final case class ScatterNode private(override val innerGraph: Graph, - scatterVariableNodes: List[ScatterVariableNode], - outputMapping: Set[ScatterGathererPort], - scatterProcessingFunction: ScatterProcessingFunction, - scatterCollectionFunctionBuilder: ScatterCollectionFunctionBuilder) extends GraphNode with GraphNodeWithInnerGraph { +final case class ScatterNode private (override val innerGraph: Graph, + scatterVariableNodes: List[ScatterVariableNode], + outputMapping: Set[ScatterGathererPort], + scatterProcessingFunction: ScatterProcessingFunction, + scatterCollectionFunctionBuilder: ScatterCollectionFunctionBuilder +) extends GraphNode + with GraphNodeWithInnerGraph { override val identifier: WomIdentifier = WomIdentifier("ScatterNode") @@ -29,13 +31,14 @@ final case class ScatterNode private(override val innerGraph: Graph, // NB if you find yourself calling .filter on this set of inputPorts, you probably just wanted to access either // the scatterVariableMapping or otherInputPorts fields directly. - override val inputPorts: Set[InputPort] = scatterCollectionExpressionNodes.toSet[ExpressionNode] map { scatterCollectionExpressionNode => - ConnectedInputPort( - scatterCollectionExpressionNode.identifier.localName.value, - scatterCollectionExpressionNode.womType, - scatterCollectionExpressionNode.singleOutputPort, - _ => this - ) + override val inputPorts: Set[InputPort] = scatterCollectionExpressionNodes.toSet[ExpressionNode] map { + scatterCollectionExpressionNode => + ConnectedInputPort( + scatterCollectionExpressionNode.identifier.localName.value, + scatterCollectionExpressionNode.womType, + scatterCollectionExpressionNode.singleOutputPort, + _ => this + ) } override val outputPorts: Set[GraphNodePort.OutputPort] = outputMapping.toSet[OutputPort] @@ -44,21 +47,26 @@ final case class ScatterNode private(override val innerGraph: Graph, object ScatterNode { case class ScatterVariableAndValue(scatterVariableNode: ScatterVariableNode, arrayValue: WomArray) - + type ScatterProcessingFunction = List[ScatterVariableAndValue] => Checked[Int] // Use dot product as the default processing function (works well for single variable scatters too) val DefaultScatterProcessingFunction: ScatterProcessingFunction = { nodesAndValues: List[ScatterVariableAndValue] => nodesAndValues.map(_.arrayValue.size).distinct match { case head :: Nil => head.validNelCheck - case _ => "All arrays must have the same number of element when using the dot product scatter method".invalidNelCheck + case _ => + "All arrays must have the same number of element when using the dot product scatter method".invalidNelCheck } } - + type ScatterCollectionFunction = (List[WomValue], WomArrayType) => WomArray - val DefaultScatterCollectionFunction: ScatterCollectionFunction = { (shards: List[WomValue], valueType: WomArrayType) => WomArray(valueType, shards) } - + val DefaultScatterCollectionFunction: ScatterCollectionFunction = { + (shards: List[WomValue], valueType: WomArrayType) => WomArray(valueType, shards) + } + type ScatterCollectionFunctionBuilder = List[Int] => ScatterCollectionFunction - val DefaultScatterCollectionFunctionBuilder: ScatterCollectionFunctionBuilder = { _: List[Int] => DefaultScatterCollectionFunction } + val DefaultScatterCollectionFunctionBuilder: ScatterCollectionFunctionBuilder = { _: List[Int] => + DefaultScatterCollectionFunction + } /** * Maps from an InstantiatedExpression on the ScatterNode to the GraphInputNode in the innerGraph. @@ -70,8 +78,13 @@ object ScatterNode { override val newExpressions: Set[ExpressionNode] = node.scatterCollectionExpressionNodes.toSet override val newInputs: Set[ExternalGraphInputNode] = node.innerGraph.externalInputNodes override val usedOuterGraphInputNodes: Set[OuterGraphInputNode] = - (node.scatterCollectionExpressionNodes.flatMap(_.upstream).toSet.filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) ++ - (node.innerGraph.outerGraphInputNodes.map(_.linkToOuterGraphNode).filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) + (node.scatterCollectionExpressionNodes + .flatMap(_.upstream) + .toSet + .filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) ++ + (node.innerGraph.outerGraphInputNodes + .map(_.linkToOuterGraphNode) + .filterByType[OuterGraphInputNode]: Set[OuterGraphInputNode]) } /** @@ -79,17 +92,18 @@ object ScatterNode { * Helps making input ports and building the node while making sure node references are set properly. */ def scatterOverGraph(innerGraph: Graph, - scatterVariableInnerGraphInputNode: ScatterVariableNode): ScatterNodeWithNewNodes = { + scatterVariableInnerGraphInputNode: ScatterVariableNode + ): ScatterNodeWithNewNodes = { val scatterNodeBuilder = new ScatterNodeBuilder val outputPorts: Set[ScatterGathererPort] = innerGraph.nodes.collect { case gon: PortBasedGraphOutputNode => scatterNodeBuilder.makeOutputPort(WomArrayType(gon.womType), gon) } scatterNodeBuilder.build(innerGraph, - outputPorts, - List(scatterVariableInnerGraphInputNode), - DefaultScatterProcessingFunction, - DefaultScatterCollectionFunctionBuilder + outputPorts, + List(scatterVariableInnerGraphInputNode), + DefaultScatterProcessingFunction, + DefaultScatterCollectionFunctionBuilder ) } @@ -99,16 +113,16 @@ object ScatterNode { */ class ScatterNodeBuilder { private val graphNodeSetter = new GraphNode.GraphNodeSetter[ScatterNode]() - - def makeOutputPort(womType: WomArrayType, nodeToGather: PortBasedGraphOutputNode): ScatterGathererPort = { + + def makeOutputPort(womType: WomArrayType, nodeToGather: PortBasedGraphOutputNode): ScatterGathererPort = ScatterGathererPort(womType, nodeToGather, graphNodeSetter.get) - } - + def build(innerGraph: Graph, outputPorts: Set[ScatterGathererPort], scatterVariableNodes: List[ScatterVariableNode], scatterProcessingFunction: ScatterProcessingFunction, - scatterCollectionFunctionBuilder: ScatterCollectionFunctionBuilder) = { + scatterCollectionFunctionBuilder: ScatterCollectionFunctionBuilder + ) = { val scatterNode = ScatterNode( innerGraph, scatterVariableNodes, diff --git a/wom/src/main/scala/wom/graph/WomIdentifier.scala b/wom/src/main/scala/wom/graph/WomIdentifier.scala index 47ff39dbbab..f303c560f8d 100644 --- a/wom/src/main/scala/wom/graph/WomIdentifier.scala +++ b/wom/src/main/scala/wom/graph/WomIdentifier.scala @@ -25,14 +25,14 @@ case class FullyQualifiedName(value: String) { object WomIdentifier { def apply(localName: String): WomIdentifier = WomIdentifier(LocalName(localName), FullyQualifiedName(localName)) - def apply(localName: String, fullyQualifiedName: String): WomIdentifier = WomIdentifier(LocalName(localName), FullyQualifiedName(fullyQualifiedName)) + def apply(localName: String, fullyQualifiedName: String): WomIdentifier = + WomIdentifier(LocalName(localName), FullyQualifiedName(fullyQualifiedName)) } case class WomIdentifier(localName: LocalName, fullyQualifiedName: FullyQualifiedName) { def combine(other: LocalName): WomIdentifier = combine(other.value) - def combine(other: String): WomIdentifier = { + def combine(other: String): WomIdentifier = WomIdentifier(localName.combineToLocalName(other), fullyQualifiedName.combine(other)) - } def workflowLocalName: String = fullyQualifiedName.value.split("\\.") match { case fqn if fqn.length > 1 => fqn.tail.mkString(".") case lqn => lqn.head diff --git a/wom/src/main/scala/wom/graph/expression/AnonymousExpressionNode.scala b/wom/src/main/scala/wom/graph/expression/AnonymousExpressionNode.scala index bf67bda07ee..20efe665646 100644 --- a/wom/src/main/scala/wom/graph/expression/AnonymousExpressionNode.scala +++ b/wom/src/main/scala/wom/graph/expression/AnonymousExpressionNode.scala @@ -13,9 +13,9 @@ object AnonymousExpressionNode { def fromInputMapping[T <: AnonymousExpressionNode](identifier: WomIdentifier, expression: WomExpression, inputMapping: Map[String, OutputPort], - constructor: AnonymousExpressionConstructor[T]): ErrorOr[T] = { + constructor: AnonymousExpressionConstructor[T] + ): ErrorOr[T] = ExpressionNode.buildFromConstructor(constructor)(identifier, expression, inputMapping) - } } /** @@ -26,14 +26,17 @@ trait AnonymousExpressionNode extends ExpressionNode case class PlainAnonymousExpressionNode(override val identifier: WomIdentifier, override val womExpression: WomExpression, override val womType: WomType, - override val inputMapping: Map[String, InputPort]) - extends ExpressionNode(identifier, womExpression, womType, inputMapping) with AnonymousExpressionNode + override val inputMapping: Map[String, InputPort] +) extends ExpressionNode(identifier, womExpression, womType, inputMapping) + with AnonymousExpressionNode case class TaskCallInputExpressionNode(override val identifier: WomIdentifier, override val womExpression: WomExpression, override val womType: WomType, - override val inputMapping: Map[String, InputPort]) - extends ExpressionNode(identifier, womExpression, womType, inputMapping) with AnonymousExpressionNode { + override val inputMapping: Map[String, InputPort] +) extends ExpressionNode(identifier, womExpression, womType, inputMapping) + with AnonymousExpressionNode { + /** * The `GraphNodeSetter` that will have a `TaskCallNode` set into it. This is needed in the `WorkflowExecutionActor` * to be able to look up backend mapping for the target call in order to have the correct `IoFunctionSet` to diff --git a/wom/src/main/scala/wom/graph/expression/ExposedExpressionNode.scala b/wom/src/main/scala/wom/graph/expression/ExposedExpressionNode.scala index 7c88d40490a..8d2778107d3 100644 --- a/wom/src/main/scala/wom/graph/expression/ExposedExpressionNode.scala +++ b/wom/src/main/scala/wom/graph/expression/ExposedExpressionNode.scala @@ -10,14 +10,15 @@ object ExposedExpressionNode { def fromInputMapping(identifier: WomIdentifier, expression: WomExpression, explicitWomType: WomType, - inputMapping: Map[String, OutputPort]): ErrorOr[ExposedExpressionNode] = { + inputMapping: Map[String, OutputPort] + ): ErrorOr[ExposedExpressionNode] = { // This constructor ignores the evaluated type and uses the explicit type instead def constructor(identifier: WomIdentifier, expression: WomExpression, evaluatedType: WomType, - inputPorts: Map[String, InputPort]) = { + inputPorts: Map[String, InputPort] + ) = new ExpressionNode(identifier, expression, explicitWomType, inputPorts) with ExposedExpressionNode - } ExpressionNode.buildFromConstructor(constructor)(identifier, expression, inputMapping) } } diff --git a/wom/src/main/scala/wom/graph/expression/ExpressionNode.scala b/wom/src/main/scala/wom/graph/expression/ExpressionNode.scala index f361315a544..a5e4ace0095 100644 --- a/wom/src/main/scala/wom/graph/expression/ExpressionNode.scala +++ b/wom/src/main/scala/wom/graph/expression/ExpressionNode.scala @@ -19,11 +19,13 @@ import scala.util.Try abstract class ExpressionNode(override val identifier: WomIdentifier, val womExpression: WomExpression, val womType: WomType, - val inputMapping: Map[String, InputPort]) extends GraphNodeWithSingleOutputPort with ExpressionNodeLike { + val inputMapping: Map[String, InputPort] +) extends GraphNodeWithSingleOutputPort + with ExpressionNodeLike { override val singleOutputPort = GraphNodeOutputPort(identifier, womType, this) override val outputPorts: Set[GraphNodePort.OutputPort] = Set(singleOutputPort) override val inputPorts = inputMapping.values.toSet - + // Again an instance of not so pretty flatMapping with mix of ErrorOrs, Eithers and Tries.. // TODO: This should return an EitherT or whatever we decide we want to use to package Exceptions + Nel[String] /** @@ -34,7 +36,9 @@ abstract class ExpressionNode(override val identifier: WomIdentifier, coerced <- womType.coerceRawValue(evaluated).toErrorOr } yield coerced).leftMap(_.map(e => s"Evaluating ${womExpression.sourceString} failed: $e")).toEither - override final def evaluate(outputPortLookup: OutputPort => ErrorOr[WomValue], ioFunctionSet: IoFunctionSet): Checked[Map[OutputPort, WomValue]] = { + final override def evaluate(outputPortLookup: OutputPort => ErrorOr[WomValue], + ioFunctionSet: IoFunctionSet + ): Checked[Map[OutputPort, WomValue]] = { import common.validation.ErrorOr._ for { inputs <- inputMapping.traverseValues(inputPort => outputPortLookup(inputPort.upstream)).toEither @@ -44,18 +48,21 @@ abstract class ExpressionNode(override val identifier: WomIdentifier, } object ExpressionNode { + /** * Constructs an ExpressionNode or a subclass of an expression node. * Note: the WomType is the evaluated type derived from the expression. */ - type ExpressionNodeConstructor[E <: ExpressionNode] = (WomIdentifier, WomExpression, WomType, Map[String, InputPort]) => E + type ExpressionNodeConstructor[E <: ExpressionNode] = + (WomIdentifier, WomExpression, WomType, Map[String, InputPort]) => E /** * Using the passed constructor, attempts to build an expression node from input mappings by linking variable references to other * output ports. */ - def buildFromConstructor[E <: ExpressionNode](constructor: ExpressionNodeConstructor[E]) - (identifier: WomIdentifier, expression: WomExpression, inputMapping: Map[String, OutputPort]): ErrorOr[E] = { + def buildFromConstructor[E <: ExpressionNode]( + constructor: ExpressionNodeConstructor[E] + )(identifier: WomIdentifier, expression: WomExpression, inputMapping: Map[String, OutputPort]): ErrorOr[E] = { val graphNodeSetter = new GraphNode.GraphNodeSetter[ExpressionNode]() val builtExpressionNode = for { @@ -65,18 +72,27 @@ object ExpressionNode { _ = graphNodeSetter._graphNode = expressionNode } yield expressionNode - def safeSourceString(e: WomExpression) = Try(expression.sourceString).getOrElse("<>") + def safeSourceString(e: WomExpression) = + Try(expression.sourceString).getOrElse("<>") - builtExpressionNode.leftMap(_.map(e => s"Cannot build expression for '${identifier.fullyQualifiedName.value} = ${safeSourceString(expression)}': $e")) + builtExpressionNode.leftMap( + _.map(e => + s"Cannot build expression for '${identifier.fullyQualifiedName.value} = ${safeSourceString(expression)}': $e" + ) + ) } /** * Attempts to find an output port for all referenced variables in the expression, and created input ports to connect them together. */ - private def linkWithInputs(graphNodeSetter: GraphNode.GraphNodeSetter[ExpressionNode], expression: WomExpression, inputMapping: Map[String, OutputPort]): ErrorOr[(WomType, Map[String, InputPort])] = { + private def linkWithInputs(graphNodeSetter: GraphNode.GraphNodeSetter[ExpressionNode], + expression: WomExpression, + inputMapping: Map[String, OutputPort] + ): ErrorOr[(WomType, Map[String, InputPort])] = { def linkInput(input: String): ErrorOr[(String, InputPort)] = inputMapping.get(input) match { - case Some(upstreamPort) => (input, ConnectedInputPort(input, upstreamPort.womType, upstreamPort, graphNodeSetter.get)).validNel - case None => + case Some(upstreamPort) => + (input, ConnectedInputPort(input, upstreamPort.womType, upstreamPort, graphNodeSetter.get)).validNel + case None => s"Expression cannot be connected without the input $input (provided: ${inputMapping.toString})".invalidNel } diff --git a/wom/src/main/scala/wom/graph/expression/ExpressionNodeLike.scala b/wom/src/main/scala/wom/graph/expression/ExpressionNodeLike.scala index f452513b45d..f0cb0d05616 100644 --- a/wom/src/main/scala/wom/graph/expression/ExpressionNodeLike.scala +++ b/wom/src/main/scala/wom/graph/expression/ExpressionNodeLike.scala @@ -11,5 +11,7 @@ import wom.values.WomValue * Trait for nodes that can be evaluated by the engine */ trait ExpressionNodeLike extends GraphNode { - def evaluate(outputPortLookup: OutputPort => ErrorOr[WomValue], ioFunctionSet: IoFunctionSet): Checked[Map[OutputPort, WomValue]] + def evaluate(outputPortLookup: OutputPort => ErrorOr[WomValue], + ioFunctionSet: IoFunctionSet + ): Checked[Map[OutputPort, WomValue]] } diff --git a/wom/src/main/scala/wom/package.scala b/wom/src/main/scala/wom/package.scala index 3a8188e0060..d05d60df033 100644 --- a/wom/src/main/scala/wom/package.scala +++ b/wom/src/main/scala/wom/package.scala @@ -12,7 +12,10 @@ trait TsvSerializable { } class WomExpressionException(message: String = null, cause: Throwable = null) extends RuntimeException(message, cause) -final case class OptionalNotSuppliedException(operationName: String) extends Exception(s"Sorry! Operation $operationName is not supported on empty optional values. You might resolve this using select_first([optional, default]) to guarantee that you have a filled value.") +final case class OptionalNotSuppliedException(operationName: String) + extends Exception( + s"Sorry! Operation $operationName is not supported on empty optional values. You might resolve this using select_first([optional, default]) to guarantee that you have a filled value." + ) case class JobOutput(womValue: WomValue) @@ -23,9 +26,11 @@ package object values { type WomEvaluatedCallInputs = Map[InputDefinition, WomValue] implicit class EnhancedWomEvaluatedCallInputs(val inputs: WomEvaluatedCallInputs) extends AnyVal { - def prettyString = inputs.map({ - case (inputDef, womValue) => s"${inputDef.name} -> ${womValue.valueString}" - }).mkString(", ") + def prettyString = inputs + .map { case (inputDef, womValue) => + s"${inputDef.name} -> ${womValue.valueString}" + } + .mkString(", ") } implicit class HashableString(val value: String) extends AnyVal with Hashable { @@ -84,7 +89,8 @@ final case class InstantiatedCommand(commandString: String, evaluatedStdoutOverride: Option[String] = None, evaluatedStderrOverride: Option[String] = None, preprocessedInputs: List[(LocalName, WomValue)] = List.empty, - valueMappedPreprocessedInputs: List[(LocalName, WomValue)] = List.empty) + valueMappedPreprocessedInputs: List[(LocalName, WomValue)] = List.empty +) /** * File created as a side effect of instantiating the command. diff --git a/wom/src/main/scala/wom/runtime/WomOutputRuntimeExtractor.scala b/wom/src/main/scala/wom/runtime/WomOutputRuntimeExtractor.scala index ce568c6515b..30835df445d 100644 --- a/wom/src/main/scala/wom/runtime/WomOutputRuntimeExtractor.scala +++ b/wom/src/main/scala/wom/runtime/WomOutputRuntimeExtractor.scala @@ -7,8 +7,8 @@ import net.ceedubs.ficus.Ficus._ object WomOutputRuntimeExtractor { def fromConfig(config: Config) = { - val dockerImage = validate { config.getAs[String]("docker-image") } - val command = validate { config.as[String]("command") } + val dockerImage = validate(config.getAs[String]("docker-image")) + val command = validate(config.as[String]("command")) (dockerImage, command).mapN(WomOutputRuntimeExtractor.apply) } diff --git a/wom/src/main/scala/wom/transforms/WomCallNodeMaker.scala b/wom/src/main/scala/wom/transforms/WomCallNodeMaker.scala index 03201c77e0c..6bae7aa2a3f 100644 --- a/wom/src/main/scala/wom/transforms/WomCallNodeMaker.scala +++ b/wom/src/main/scala/wom/transforms/WomCallNodeMaker.scala @@ -7,5 +7,10 @@ import simulacrum._ @typeclass trait WomCallNodeMaker[A] { - def toWomCallNode(a: A, localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[CallNodeAndNewNodes] + def toWomCallNode(a: A, + localLookup: Map[String, GraphNodePort.OutputPort], + outerLookup: Map[String, GraphNodePort.OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[CallNodeAndNewNodes] } diff --git a/wom/src/main/scala/wom/transforms/WomConditionalNodeMaker.scala b/wom/src/main/scala/wom/transforms/WomConditionalNodeMaker.scala index 48a910cc2b4..44ecb63aa8b 100644 --- a/wom/src/main/scala/wom/transforms/WomConditionalNodeMaker.scala +++ b/wom/src/main/scala/wom/transforms/WomConditionalNodeMaker.scala @@ -8,5 +8,10 @@ import simulacrum._ @typeclass trait WomConditionalNodeMaker[A] { - def toWomConditionalNode(a: A, localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[ConditionalNodeWithNewNodes] + def toWomConditionalNode(a: A, + localLookup: Map[String, GraphNodePort.OutputPort], + outerLookup: Map[String, OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[ConditionalNodeWithNewNodes] } diff --git a/wom/src/main/scala/wom/transforms/WomExecutableMaker.scala b/wom/src/main/scala/wom/transforms/WomExecutableMaker.scala index d1eed8efd9f..dfb6bcd4c1b 100644 --- a/wom/src/main/scala/wom/transforms/WomExecutableMaker.scala +++ b/wom/src/main/scala/wom/transforms/WomExecutableMaker.scala @@ -8,7 +8,11 @@ import wom.expression.IoFunctionSet @typeclass trait WomExecutableMaker[A] { - def toWomExecutable(a: A, inputs: Option[WorkflowJson], ioFunctions: IoFunctionSet, strictValidation: Boolean): Checked[Executable] + def toWomExecutable(a: A, + inputs: Option[WorkflowJson], + ioFunctions: IoFunctionSet, + strictValidation: Boolean + ): Checked[Executable] } @typeclass diff --git a/wom/src/main/scala/wom/transforms/WomGraphMaker.scala b/wom/src/main/scala/wom/transforms/WomGraphMaker.scala index 7547c2e548f..f4b2c5edda1 100644 --- a/wom/src/main/scala/wom/transforms/WomGraphMaker.scala +++ b/wom/src/main/scala/wom/transforms/WomGraphMaker.scala @@ -6,5 +6,10 @@ import simulacrum._ @typeclass trait WomGraphMaker[A] { - def toWomGraph(a: A, includeGraphNodes: Set[GraphNode], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[Graph] + def toWomGraph(a: A, + includeGraphNodes: Set[GraphNode], + outerLookup: Map[String, GraphNodePort.OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[Graph] } diff --git a/wom/src/main/scala/wom/transforms/WomScatterNodeMaker.scala b/wom/src/main/scala/wom/transforms/WomScatterNodeMaker.scala index 23bcac21176..3355ef0b37e 100644 --- a/wom/src/main/scala/wom/transforms/WomScatterNodeMaker.scala +++ b/wom/src/main/scala/wom/transforms/WomScatterNodeMaker.scala @@ -8,5 +8,10 @@ import simulacrum._ @typeclass trait WomScatterNodeMaker[A] { - def toWomScatterNode(a: A, localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, OutputPort], preserveIndexForOuterLookups: Boolean, inASubworkflow: Boolean): ErrorOr[ScatterNodeWithNewNodes] + def toWomScatterNode(a: A, + localLookup: Map[String, GraphNodePort.OutputPort], + outerLookup: Map[String, OutputPort], + preserveIndexForOuterLookups: Boolean, + inASubworkflow: Boolean + ): ErrorOr[ScatterNodeWithNewNodes] } diff --git a/wom/src/main/scala/wom/types/WomAnyType.scala b/wom/src/main/scala/wom/types/WomAnyType.scala index 025873e7d0c..6319364887a 100644 --- a/wom/src/main/scala/wom/types/WomAnyType.scala +++ b/wom/src/main/scala/wom/types/WomAnyType.scala @@ -44,12 +44,13 @@ case object WomAnyType extends WomType { override protected def coercion = { case womValue: WomValue => womValue case any: Any => - - def foldFun(acc: Option[WomValue], nextType: WomType): Option[WomValue] = acc.orElse(nextType.coerceRawValue(any).toOption) + def foldFun(acc: Option[WomValue], nextType: WomType): Option[WomValue] = + acc.orElse(nextType.coerceRawValue(any).toOption) /* This does throw an exception if it couldn't coerce (.get is intentional) */ - WomType.womTypeCoercionOrder.foldLeft[Option[WomValue]](None)(foldFun).getOrElse( - throw new UnsupportedOperationException(s"Could not coerce $any into a WOM type")) + WomType.womTypeCoercionOrder + .foldLeft[Option[WomValue]](None)(foldFun) + .getOrElse(throw new UnsupportedOperationException(s"Could not coerce $any into a WOM type")) // // // WomType.womTypeCoercionOrder.map(_.coerceRawValue(any)).find(_.isSuccess).getOrElse( @@ -57,7 +58,7 @@ case object WomAnyType extends WomType { // ).get } - override final def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = true + final override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = true override def add(rhs: WomType): Try[WomType] = Success(WomAnyType) override def subtract(rhs: WomType): Try[WomType] = Success(WomAnyType) diff --git a/wom/src/main/scala/wom/types/WomArrayType.scala b/wom/src/main/scala/wom/types/WomArrayType.scala index 86343179ddd..41c3f0cb2db 100644 --- a/wom/src/main/scala/wom/types/WomArrayType.scala +++ b/wom/src/main/scala/wom/types/WomArrayType.scala @@ -14,8 +14,8 @@ sealed trait WomArrayType extends WomType { val guaranteedNonEmpty: Boolean private def coerceIterable(values: Seq[Any]): WomArray = values match { - case s:Seq[Any] if s.nonEmpty => - val coerced = s.map {memberType.coerceRawValue(_).get} + case s: Seq[Any] if s.nonEmpty => + val coerced = s.map(memberType.coerceRawValue(_).get) WomArray(this, coerced) case _ => WomArray(this, Seq()) } @@ -27,18 +27,23 @@ sealed trait WomArrayType extends WomType { case javaList: java.util.List[_] if allowEmpty || !javaList.isEmpty => coerceIterable(javaList.asScala.toList) case WomArray(WomMaybeEmptyArrayType.EmptyArrayType, _) => WomArray(this, Seq.empty) case womArray: WomArray - if (allowEmpty || womArray.nonEmpty) - && womArray.womType.memberType == WomStringType - && memberType == WomSingleFileType => + if (allowEmpty || womArray.nonEmpty) + && womArray.womType.memberType == WomStringType + && memberType == WomSingleFileType => WomArray(this, womArray.value.map(str => WomSingleFile(str.asInstanceOf[WomString].value)).toList) - case womArray: WomArray if (allowEmpty || womArray.nonEmpty) && womArray.womType.memberType == memberType => WomArray(this, womArray.value) - case womArray: WomArray if (allowEmpty || womArray.nonEmpty) && womArray.womType.memberType == WomAnyType => coerceIterable(womArray.value) - case womArray: WomArray if (allowEmpty || womArray.nonEmpty) && womArray.womType.memberType.isInstanceOf[WomArrayType] && memberType.isInstanceOf[WomArrayType] => + case womArray: WomArray if (allowEmpty || womArray.nonEmpty) && womArray.womType.memberType == memberType => + WomArray(this, womArray.value) + case womArray: WomArray if (allowEmpty || womArray.nonEmpty) && womArray.womType.memberType == WomAnyType => + coerceIterable(womArray.value) + case womArray: WomArray + if (allowEmpty || womArray.nonEmpty) && womArray.womType.memberType.isInstanceOf[WomArrayType] && memberType + .isInstanceOf[WomArrayType] => TryUtil.sequence(womArray.value.map(memberType.coerceRawValue)) match { case Success(values) => WomArray(this, values) case Failure(ex) => throw ex } - case womArray: WomArray if (allowEmpty || womArray.nonEmpty) && memberType.isCoerceableFrom(womArray.womType.memberType) => + case womArray: WomArray + if (allowEmpty || womArray.nonEmpty) && memberType.isCoerceableFrom(womArray.womType.memberType) => womArray.map(v => memberType.coerceRawValue(v).get) // .get because isCoerceableFrom should make it safe case WomArrayLike(womArray) if this.isCoerceableFrom(womArray.womType) => coercion.apply(womArray) } @@ -81,11 +86,10 @@ object WomArrayType { if (guaranteedNonEmpty) WomNonEmptyArrayType(memberType) else WomMaybeEmptyArrayType(memberType) - def unapply(womType: WomType): Option[WomType] = { + def unapply(womType: WomType): Option[WomType] = womType match { case arrayType: WomArrayType => Option(arrayType.memberType) case mapType: WomMapType => Option(mapType.equivalentArrayType.memberType) case _ => None } - } } diff --git a/wom/src/main/scala/wom/types/WomBooleanType.scala b/wom/src/main/scala/wom/types/WomBooleanType.scala index 46cacb82f36..53d121b50c5 100644 --- a/wom/src/main/scala/wom/types/WomBooleanType.scala +++ b/wom/src/main/scala/wom/types/WomBooleanType.scala @@ -19,7 +19,7 @@ case object WomBooleanType extends WomPrimitiveType { } private def comparisonOperator(rhs: WomType, symbol: String): Try[WomType] = rhs match { - case wct:WomCoproductType => wct.typeExists(WomStringType) + case wct: WomCoproductType => wct.typeExists(WomStringType) case WomBooleanType => Success(WomBooleanType) case WomOptionalType(memberType) => comparisonOperator(memberType, symbol) case _ => invalid(s"$this $symbol $rhs") diff --git a/wom/src/main/scala/wom/types/WomCompositeType.scala b/wom/src/main/scala/wom/types/WomCompositeType.scala index b72d3ff6b0e..597b2623a2a 100644 --- a/wom/src/main/scala/wom/types/WomCompositeType.scala +++ b/wom/src/main/scala/wom/types/WomCompositeType.scala @@ -7,47 +7,52 @@ import common.validation.Validation._ import spray.json.JsObject import wom.values.{WomMap, WomObject, WomObjectLike, WomValue} -case class WomCompositeType(typeMap: Map[String, WomType], structName: Option[String] = None) extends WomObjectTypeLike { +case class WomCompositeType(typeMap: Map[String, WomType], structName: Option[String] = None) + extends WomObjectTypeLike { - private def validateType(values: Map[String, Any])(key: String, expectedType: WomType): ErrorOr[(String, WomValue)] = { + private def validateType(values: Map[String, Any])(key: String, expectedType: WomType): ErrorOr[(String, WomValue)] = (values.get(key), expectedType) match { case (Some(value), _) => expectedType.coerceRawValue(value).toErrorOr.map(key -> _) case (None, coerceTo: WomOptionalType) => (key -> coerceTo.none).validNel case (None, _) => s"No value for field '$key' with non optional type '${expectedType.stableName}' has been provided".invalidNel } - } - override def validateAndCoerceValues(values: Map[String, Any]): ErrorOr[Map[String, WomValue]] = { + override def validateAndCoerceValues(values: Map[String, Any]): ErrorOr[Map[String, WomValue]] = typeMap.toList.traverse(Function.tupled(validateType(values))).map(_.toMap) - } override protected def coercion = { - case composite: WomObjectLike if isCoerceableFrom(composite.womType) => WomObject.withTypeUnsafe(composite.values, this) - case map: WomMap if WomStringType.isCoerceableFrom(map.womType.keyType) => WomObject.withTypeUnsafe(map.value.map({ case (k, v) => k.valueString -> v }), this) + case composite: WomObjectLike if isCoerceableFrom(composite.womType) => + WomObject.withTypeUnsafe(composite.values, this) + case map: WomMap if WomStringType.isCoerceableFrom(map.womType.keyType) => + WomObject.withTypeUnsafe(map.value.map { case (k, v) => k.valueString -> v }, this) case jsObject: JsObject => WomObject.withTypeUnsafe(jsObject.fields, this) } - override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = { + override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = otherType match { // This is as good as we can do here without the values case mapType: WomMapType if WomStringType.isCoerceableFrom(mapType.keyType) => true // Same here, it might not be coerceable but without the values we can't tell case WomObjectType => true - case compositeType: WomCompositeType if (compositeType.typeMap.size == typeMap.size) && - compositeType.typeMap.forall({ case (key, typeValue) => typeMap.get(key).exists(_.isCoerceableFrom(typeValue)) }) => + case compositeType: WomCompositeType + if (compositeType.typeMap.size == typeMap.size) && + compositeType.typeMap.forall { case (key, typeValue) => + typeMap.get(key).exists(_.isCoerceableFrom(typeValue)) + } => true case _ => false } - } override val friendlyName: String = structName.getOrElse("Object") override val stableName = { - val fieldType = typeMap.map({ - case (key, value) => s"$key -> ${value.stableName}" - }).mkString("\n") + val fieldType = typeMap + .map { case (key, value) => + s"$key -> ${value.stableName}" + } + .mkString("\n") s"WomCompositeType {\n $fieldType \n}" } diff --git a/wom/src/main/scala/wom/types/WomCoproductType.scala b/wom/src/main/scala/wom/types/WomCoproductType.scala index 9b05ee92f9c..c83a75acd7a 100644 --- a/wom/src/main/scala/wom/types/WomCoproductType.scala +++ b/wom/src/main/scala/wom/types/WomCoproductType.scala @@ -19,35 +19,48 @@ case class WomCoproductType(types: NonEmptyList[WomType]) extends WomType { * the partial function is not defined are assumed to not be convertible to the target type. */ override def coercion: PartialFunction[Any, WomValue] = { - case wct@WomCoproductValue(tpe, _) if (tpe.equalsType(this).isSuccess) => wct + case wct @ WomCoproductValue(tpe, _) if tpe.equalsType(this).isSuccess => wct - //If we can find this type exactly in our coproduct, use that type for the coercion - case womValue: WomValue if (types.toList.contains(womValue.womType)) => + // If we can find this type exactly in our coproduct, use that type for the coercion + case womValue: WomValue if types.toList.contains(womValue.womType) => val v: Try[WomValue] = womValue.womType.coerceRawValue(womValue) WomCoproductValue(this, v.get) - //If we don't have any information, try to coerce this value one by one, stopping at the first successful try + // If we don't have any information, try to coerce this value one by one, stopping at the first successful try case any => val triedToCoerce: Try[WomValue] = types.map(_.coerceRawValue(any)).toList.reduce(_ orElse _) - triedToCoerce.getOrElse(throw new WomTypeException(s"unable to coerce $any to a member of the set of types ${types.toList.mkString(", ")}")) |> (WomCoproductValue(this, _)) + triedToCoerce.getOrElse( + throw new WomTypeException( + s"unable to coerce $any to a member of the set of types ${types.toList.mkString(", ")}" + ) + ) |> (WomCoproductValue(this, _)) } def typeExists(tpe: WomType): Try[WomBooleanType.type] = types.exists(_.equals(tpe)) match { case true => Success(WomBooleanType) - case _ => Failure(new WomExpressionException(s"Type equality assertion failed because $tpe was not found in the coproduct of ${stableName}")) + case _ => + Failure( + new WomExpressionException( + s"Type equality assertion failed because $tpe was not found in the coproduct of ${stableName}" + ) + ) } override def stableName: String = - types.map(_.stableName).toList.mkString("Coproduct[",", ", "]") + types.map(_.stableName).toList.mkString("Coproduct[", ", ", "]") override def equalsType(rhs: WomType): Try[WomType] = rhs match { case WomCoproductType(tpes) if types.equals(tpes) => Success(WomBooleanType) - case _ if types.exists(_.equals(rhs)) => Success(WomBooleanType) - case _ => Failure(new WomExpressionException(s"Type equality could not be asserted because $rhs was found in the coproduct of $stableName")) + case _ if types.exists(_.equals(rhs)) => Success(WomBooleanType) + case _ => + Failure( + new WomExpressionException( + s"Type equality could not be asserted because $rhs was found in the coproduct of $stableName" + ) + ) } - } diff --git a/wom/src/main/scala/wom/types/WomEnumerationType.scala b/wom/src/main/scala/wom/types/WomEnumerationType.scala index ed9d7ce746c..33a4df63dcc 100644 --- a/wom/src/main/scala/wom/types/WomEnumerationType.scala +++ b/wom/src/main/scala/wom/types/WomEnumerationType.scala @@ -4,18 +4,18 @@ import cats.data.NonEmptyList import spray.json.JsString import wom.values.{WomEnumerationValue, WomValue} - /** * An enumeration of possible states a value can inhabit. */ case class WomEnumerationType(values: NonEmptyList[String]) extends WomPrimitiveType { override def coercion: PartialFunction[Any, WomValue] = { - case womValue: WomValue if (values.toList.contains(womValue.valueString)) => WomEnumerationValue(this, womValue.valueString) - case name: String if (values.toList.contains(name)) => WomEnumerationValue(this, name) - case JsString(name) if (values.toList.contains(name)) => WomEnumerationValue(this, name) + case womValue: WomValue if values.toList.contains(womValue.valueString) => + WomEnumerationValue(this, womValue.valueString) + case name: String if values.toList.contains(name) => WomEnumerationValue(this, name) + case JsString(name) if values.toList.contains(name) => WomEnumerationValue(this, name) } override def stableName: String = - values.toList.mkString("Enumeration[",", ", "]") + values.toList.mkString("Enumeration[", ", ", "]") } diff --git a/wom/src/main/scala/wom/types/WomFileType.scala b/wom/src/main/scala/wom/types/WomFileType.scala index ff82a448be8..a31a4e2f535 100644 --- a/wom/src/main/scala/wom/types/WomFileType.scala +++ b/wom/src/main/scala/wom/types/WomFileType.scala @@ -52,7 +52,7 @@ case object WomSingleFileType extends WomPrimitiveFileType { } override def equalsType(rhs: WomType): Try[WomType] = rhs match { - case wct:WomCoproductType => wct.typeExists(WomStringType) + case wct: WomCoproductType => wct.typeExists(WomStringType) case WomSingleFileType => Success(WomBooleanType) case WomStringType => Success(WomBooleanType) case WomOptionalType(memberType) => equalsType(memberType) @@ -77,7 +77,7 @@ case object WomGlobFileType extends WomPrimitiveFileType { } override def equalsType(rhs: WomType): Try[WomType] = rhs match { - case wct:WomCoproductType => wct.typeExists(WomStringType) + case wct: WomCoproductType => wct.typeExists(WomStringType) case WomGlobFileType => Success(WomBooleanType) case WomStringType => Success(WomBooleanType) case WomOptionalType(memberType) => equalsType(memberType) diff --git a/wom/src/main/scala/wom/types/WomFloatType.scala b/wom/src/main/scala/wom/types/WomFloatType.scala index b7d2d6991b8..f877de38f31 100644 --- a/wom/src/main/scala/wom/types/WomFloatType.scala +++ b/wom/src/main/scala/wom/types/WomFloatType.scala @@ -29,7 +29,7 @@ case object WomFloatType extends WomPrimitiveType { } private def comparisonOperator(rhs: WomType, symbol: String): Try[WomType] = rhs match { - case wct:WomCoproductType => wct.typeExists(WomStringType) + case wct: WomCoproductType => wct.typeExists(WomStringType) case WomIntegerType => Success(WomBooleanType) case WomFloatType => Success(WomBooleanType) case WomOptionalType(memberType) => comparisonOperator(memberType, symbol) diff --git a/wom/src/main/scala/wom/types/WomIntegerLike.scala b/wom/src/main/scala/wom/types/WomIntegerLike.scala index 00dce7751bc..6432b9533f7 100644 --- a/wom/src/main/scala/wom/types/WomIntegerLike.scala +++ b/wom/src/main/scala/wom/types/WomIntegerLike.scala @@ -1,6 +1,5 @@ package wom.types - object WomIntegerLike { implicit class EnhancedLong(val long: Long) extends AnyVal { def inIntRange: Boolean = long >= Int.MinValue && long <= Int.MaxValue diff --git a/wom/src/main/scala/wom/types/WomIntegerType.scala b/wom/src/main/scala/wom/types/WomIntegerType.scala index a177dd5d924..e5f87c9dfdb 100644 --- a/wom/src/main/scala/wom/types/WomIntegerType.scala +++ b/wom/src/main/scala/wom/types/WomIntegerType.scala @@ -14,11 +14,12 @@ case object WomIntegerType extends WomPrimitiveType { case n: JsNumber if n.value.isValidInt => WomInteger(n.value.intValue) case i: WomInteger => i case WomLong(i) if i.inIntRange => WomInteger(i.toInt) - case WomLong(i) => throw new RuntimeException( - s"Tried to convert a Long value $i into an Int but it was outside the bounds of acceptable Ints, namely ${Int.MinValue} <-> ${Int.MaxValue}") - case s: WomString => { - WomInteger(s.value.toInt) - } + case WomLong(i) => + throw new RuntimeException( + s"Tried to convert a Long value $i into an Int but it was outside the bounds of acceptable Ints, namely ${Int.MinValue} <-> ${Int.MaxValue}" + ) + case s: WomString => + WomInteger(s.value.toInt) case s: String => val bigTry = Try(BigDecimal(s)) if (bigTry.isSuccess) @@ -36,14 +37,13 @@ case object WomIntegerType extends WomPrimitiveType { } private def comparisonOperator(rhs: WomType, symbol: String): Try[WomType] = rhs match { - case wct:WomCoproductType => wct.typeExists(WomStringType) + case wct: WomCoproductType => wct.typeExists(WomStringType) case WomIntegerType => Success(WomBooleanType) case WomFloatType => Success(WomBooleanType) case WomOptionalType(memberType) => comparisonOperator(memberType, symbol) case _ => invalid(s"$this $symbol $rhs") } - override def add(rhs: WomType): Try[WomType] = rhs match { case WomStringType => Success(WomStringType) case WomOptionalType(memberType) => add(memberType) diff --git a/wom/src/main/scala/wom/types/WomNothingType.scala b/wom/src/main/scala/wom/types/WomNothingType.scala index 1c7ec161d77..20d9bcd76a7 100644 --- a/wom/src/main/scala/wom/types/WomNothingType.scala +++ b/wom/src/main/scala/wom/types/WomNothingType.scala @@ -7,8 +7,8 @@ import wom.values.{WomOptionalValue, WomValue} * (and yet at the same time, most) interesting of all the types, the WomNothingType! */ case object WomNothingType extends WomType { - override protected def coercion: PartialFunction[Any, WomValue] = { - case WomOptionalValue(WomNothingType, None) => WomOptionalValue(WomNothingType, None) + override protected def coercion: PartialFunction[Any, WomValue] = { case WomOptionalValue(WomNothingType, None) => + WomOptionalValue(WomNothingType, None) } override def stableName: String = "Nothing" } diff --git a/wom/src/main/scala/wom/types/WomObjectType.scala b/wom/src/main/scala/wom/types/WomObjectType.scala index e1a067eaed9..9d28c84b5b3 100644 --- a/wom/src/main/scala/wom/types/WomObjectType.scala +++ b/wom/src/main/scala/wom/types/WomObjectType.scala @@ -10,25 +10,27 @@ import wom.values._ import scala.util.{Failure, Success, Try} trait WomObjectTypeLike extends WomType { + /** * Validate the values against the WomObjectTypeLike and return a valid map of values if possible. * This is an indirection from the usual coercion path but it allows WomObject to validate values against the WomObjectTypeLike at * instantiation time and ensure the WomObject is only built if the values match the constraints of the type. * See apply method in WomObject. */ - def validateAndCoerceValues(values: Map[String, Any]): ErrorOr[Map[String, WomValue]] = { - values.toList.traverse({ - case (k, v) => WomAnyType.coerceRawValue(v).toErrorOr.map(k -> _) - }).map(_.toMap) - } + def validateAndCoerceValues(values: Map[String, Any]): ErrorOr[Map[String, WomValue]] = + values.toList + .traverse { case (k, v) => + WomAnyType.coerceRawValue(v).toErrorOr.map(k -> _) + } + .map(_.toMap) } case object WomObjectType extends WomObjectTypeLike { val stableName: String = "Object" private def handleCoercionFailures(tries: Try[_]*) = { - val errorMessages = tries collect { - case Failure(f) => f.getMessage + val errorMessages = tries collect { case Failure(f) => + f.getMessage } mkString "," throw new UnsupportedOperationException(s"Coercion failed: $errorMessages") @@ -37,8 +39,8 @@ case object WomObjectType extends WomObjectTypeLike { override protected def coercion = { case o: WomObjectLike => WomObject(o.values) case m: WomMap if isMapCoercable(m) => - val coercedMap = m.value map { - case (k, v) => toWomString(k) -> toWomString(v) + val coercedMap = m.value map { case (k, v) => + toWomString(k) -> toWomString(v) } collect { case (Success(k), Success(v)) => k.value -> v case (k, v) => handleCoercionFailures(k, v) @@ -46,7 +48,6 @@ case object WomObjectType extends WomObjectTypeLike { WomObject(coercedMap) case js: JsObject => - val mapToTry = js.fields map { case (key, value) => key -> WomAnyType.coerceRawValue(value) } val mapOfTry = mapToTry map { kvp => kvp._2 map { kvp._1 -> _ } } // The TryUtil exception is ignored, we only use it to tell whether it worked or not. We use handleCoercionFailures @@ -65,6 +66,7 @@ case object WomObjectType extends WomObjectTypeLike { case _ => false } - def isMapTypeCoercable(t: WomMapType) = WomStringType.isCoerceableFrom(t.keyType) && WomStringType.isCoerceableFrom(t.valueType) + def isMapTypeCoercable(t: WomMapType) = + WomStringType.isCoerceableFrom(t.keyType) && WomStringType.isCoerceableFrom(t.valueType) def isMapCoercable(m: WomMap) = isMapTypeCoercable(m.womType) } diff --git a/wom/src/main/scala/wom/types/WomOptionalType.scala b/wom/src/main/scala/wom/types/WomOptionalType.scala index 37b8bcd491a..e1e4e1b5f83 100644 --- a/wom/src/main/scala/wom/types/WomOptionalType.scala +++ b/wom/src/main/scala/wom/types/WomOptionalType.scala @@ -11,6 +11,7 @@ case class WomOptionalType(memberType: WomType) extends WomType { case recursive: WomOptionalType => 1 + recursive.depth case _ => 1 } + /** * Method to be overridden by implementation classes defining a partial function * for the conversion of raw input values to specific implementation class value types. @@ -25,16 +26,19 @@ case class WomOptionalType(memberType: WomType) extends WomType { case None => WomOptionalValue(memberType, None) // Coerce and adjust nesting level of equivalent nested conditionals: - case womOptional: WomOptionalValue if baseMemberType.isCoerceableFrom(womOptional.womType.baseMemberType) => womOptional.coerceAndSetNestingLevel(this).get + case womOptional: WomOptionalValue if baseMemberType.isCoerceableFrom(womOptional.womType.baseMemberType) => + womOptional.coerceAndSetNestingLevel(this).get // It's safe to box up values implicitly: - case womValue: WomValue if baseMemberType.isCoerceableFrom(womValue.womType) => WomOptionalValue(womValue).coerceAndSetNestingLevel(this).get + case womValue: WomValue if baseMemberType.isCoerceableFrom(womValue.womType) => + WomOptionalValue(womValue).coerceAndSetNestingLevel(this).get case WomOptionalValue(WomNothingType, None) => WomOptionalValue(memberType, None) case null => WomOptionalValue(memberType, None) - case coerceable: Any if baseMemberType.coercionDefined(coerceable) => WomOptionalValue(baseMemberType.coerceRawValue(coerceable).get).coerceAndSetNestingLevel(this).get + case coerceable: Any if baseMemberType.coercionDefined(coerceable) => + WomOptionalValue(baseMemberType.coerceRawValue(coerceable).get).coerceAndSetNestingLevel(this).get } override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = otherType match { @@ -46,7 +50,8 @@ case class WomOptionalType(memberType: WomType) extends WomType { case WomOptionalType(otherMemberType) if memberType.isCoerceableFrom(otherMemberType) => true // Check flattening: - case WomOptionalType(otherMemberType: WomOptionalType) => baseMemberType.isCoerceableFrom(otherMemberType.baseMemberType) + case WomOptionalType(otherMemberType: WomOptionalType) => + baseMemberType.isCoerceableFrom(otherMemberType.baseMemberType) case _ => false } diff --git a/wom/src/main/scala/wom/types/WomPairType.scala b/wom/src/main/scala/wom/types/WomPairType.scala index fd9bd3ffd3a..69347d291b6 100644 --- a/wom/src/main/scala/wom/types/WomPairType.scala +++ b/wom/src/main/scala/wom/types/WomPairType.scala @@ -8,7 +8,8 @@ import scala.util.{Failure, Success, Try} case class WomPairType(leftType: WomType, rightType: WomType) extends WomType { override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = otherType match { - case WomPairType(otherType1, otherType2) => leftType.isCoerceableFrom(otherType1) && rightType.isCoerceableFrom(otherType2) + case WomPairType(otherType1, otherType2) => + leftType.isCoerceableFrom(otherType1) && rightType.isCoerceableFrom(otherType2) case _ => false } @@ -27,25 +28,34 @@ case class WomPairType(leftType: WomType, rightType: WomType) extends WomType { def coercePair(m: Map[String, JsValue], womPairType: WomPairType): WomPair = { - val caseNormalizedMap = m map { case(k, v) => k.toLowerCase.capitalize -> v } + val caseNormalizedMap = m map { case (k, v) => k.toLowerCase.capitalize -> v } - def invalidPair(missingArgs: String*) = Seq(Failure(new IllegalArgumentException(s"Pair ${JsObject(m)} requires for ${missingArgs.mkString("/")} value(s) to be defined."))) + def invalidPair(missingArgs: String*) = Seq( + Failure( + new IllegalArgumentException( + s"Pair ${JsObject(m)} requires for ${missingArgs.mkString("/")} value(s) to be defined." + ) + ) + ) val womPair: Seq[Try[WomValue]] = (caseNormalizedMap.get("Left"), caseNormalizedMap.get("Right")) match { - case (Some(leftVal), Some(rightVal)) => Seq(womPairType.leftType.coerceRawValue(leftVal), womPairType.rightType.coerceRawValue(rightVal)) + case (Some(leftVal), Some(rightVal)) => + Seq(womPairType.leftType.coerceRawValue(leftVal), womPairType.rightType.coerceRawValue(rightVal)) case (Some(_), _) => invalidPair("Right") case (_, Some(_)) => invalidPair("Left") case _ => invalidPair("Right", "Left") } - val failures = womPair collect { case f:Failure[_] => f } + val failures = womPair collect { case f: Failure[_] => f } if (failures.isEmpty) { womPair match { case Seq(Success(left), Success(right)) => WomPair(left, right) } } else { - throw new UnsupportedOperationException(s"Failed to coerce one or more values for creating a ${womPairType.stableName}:\n${failures.toList.mkString("\n")}") + throw new UnsupportedOperationException( + s"Failed to coerce one or more values for creating a ${womPairType.stableName}:\n${failures.toList.mkString("\n")}" + ) } } diff --git a/wom/src/main/scala/wom/types/WomPrimitiveType.scala b/wom/src/main/scala/wom/types/WomPrimitiveType.scala index 1fc6dabbd59..fe48758b9d6 100644 --- a/wom/src/main/scala/wom/types/WomPrimitiveType.scala +++ b/wom/src/main/scala/wom/types/WomPrimitiveType.scala @@ -14,10 +14,9 @@ trait WomPrimitiveType extends WomType { WomBooleanType -> Seq(WomStringType, WomBooleanType) ) - override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = { + override def typeSpecificIsCoerceableFrom(otherType: WomType): Boolean = coercionMap.get(otherType) match { case Some(types) => types contains this case None => false } - } } diff --git a/wom/src/main/scala/wom/types/WomStringType.scala b/wom/src/main/scala/wom/types/WomStringType.scala index d9ce21007f5..c417ebbaa84 100644 --- a/wom/src/main/scala/wom/types/WomStringType.scala +++ b/wom/src/main/scala/wom/types/WomStringType.scala @@ -17,7 +17,7 @@ case object WomStringType extends WomPrimitiveType { } private def comparisonOperator(rhs: WomType, symbol: String): Try[WomType] = rhs match { - case wct:WomCoproductType => wct.typeExists(WomStringType) + case wct: WomCoproductType => wct.typeExists(WomStringType) case WomStringType => Success(WomBooleanType) case WomOptionalType(memberType) => comparisonOperator(memberType, symbol) case _ => invalid(s"$this $symbol $rhs") diff --git a/wom/src/main/scala/wom/types/WomType.scala b/wom/src/main/scala/wom/types/WomType.scala index 47d9f3dab80..71c770ea000 100644 --- a/wom/src/main/scala/wom/types/WomType.scala +++ b/wom/src/main/scala/wom/types/WomType.scala @@ -24,19 +24,26 @@ trait WomType { * Public interface for a `Try`-wrapped conversion of an input of type `Any` to * a `WomValue`. */ - final def coerceRawValue(any: Any): Try[WomValue] = { + final def coerceRawValue(any: Any): Try[WomValue] = any match { case womValue: WomValue if womValue.womType == this => Success(womValue) case WomOptionalValue(_, Some(v)) => coerceRawValue(v) - case womValue: WomValue if !coercion.isDefinedAt(any) => Failure(new IllegalArgumentException( - s"No coercion defined from wom value(s) '${WomValue.takeMaxElements(womValue, 3).toWomString}' of type" + - s" '${womValue.womType.stableName}' to '$stableName'.")) - case _ if !coercion.isDefinedAt(any) => Failure(new IllegalArgumentException( - s"No coercion defined from '${ScalaRunTime.stringOf(any, 3)}' of type" + - s" '${Option(any.getClass.getCanonicalName).getOrElse(any.getClass.getName)}' to '$stableName'.")) + case womValue: WomValue if !coercion.isDefinedAt(any) => + Failure( + new IllegalArgumentException( + s"No coercion defined from wom value(s) '${WomValue.takeMaxElements(womValue, 3).toWomString}' of type" + + s" '${womValue.womType.stableName}' to '$stableName'." + ) + ) + case _ if !coercion.isDefinedAt(any) => + Failure( + new IllegalArgumentException( + s"No coercion defined from '${ScalaRunTime.stringOf(any, 3)}' of type" + + s" '${Option(any.getClass.getCanonicalName).getOrElse(any.getClass.getName)}' to '$stableName'." + ) + ) case _ => Try(coercion(any)) } - } final def isCoerceableFrom(otherType: WomType): Boolean = otherType match { case WomAnyType => true @@ -57,28 +64,30 @@ trait WomType { */ def stableName: String - def invalid(operation: String) = Failure(new WomExpressionException(s"Type evaluation cannot determine type from expression: $operation")) + def invalid(operation: String) = Failure( + new WomExpressionException(s"Type evaluation cannot determine type from expression: $operation") + ) def add(rhs: WomType): Try[WomType] = invalid(s"$this + $rhs") def subtract(rhs: WomType): Try[WomType] = invalid(s"$this - $rhs") def multiply(rhs: WomType): Try[WomType] = invalid(s"$this * $rhs") def divide(rhs: WomType): Try[WomType] = invalid(s"$this / $rhs") def mod(rhs: WomType): Try[WomType] = invalid(s"$this % $rhs") def equalsType(rhs: WomType): Try[WomType] = - if(this == rhs) + if (this == rhs) Success(WomBooleanType) else invalid(s"$this == $rhs") - def notEquals(rhs: WomType): Try[WomType] = equalsType(rhs) map { _ => WomBooleanType} + def notEquals(rhs: WomType): Try[WomType] = equalsType(rhs) map { _ => WomBooleanType } def lessThan(rhs: WomType): Try[WomType] = invalid(s"$this < $rhs") def lessThanOrEqual(rhs: WomType): Try[WomType] = (lessThan(rhs), equalsType(rhs)) match { - case (Success(b:WomType), _) if b == WomBooleanType => Success(WomBooleanType) - case (_, Success(b:WomType)) if b == WomBooleanType => Success(WomBooleanType) + case (Success(b: WomType), _) if b == WomBooleanType => Success(WomBooleanType) + case (_, Success(b: WomType)) if b == WomBooleanType => Success(WomBooleanType) case (_, _) => invalid(s"$this <= $rhs") } def greaterThan(rhs: WomType): Try[WomType] = invalid(s"$this > $rhs") def greaterThanOrEqual(rhs: WomType): Try[WomType] = (greaterThan(rhs), equalsType(rhs)) match { - case (Success(b:WomType), _) if b == WomBooleanType => Success(WomBooleanType) - case (_, Success(b:WomType)) if b == WomBooleanType => Success(WomBooleanType) + case (Success(b: WomType), _) if b == WomBooleanType => Success(WomBooleanType) + case (_, Success(b: WomType)) if b == WomBooleanType => Success(WomBooleanType) case (_, _) => invalid(s"$this >= $rhs") } def or(rhs: WomType): Try[WomType] = invalid(s"$this || $rhs") @@ -91,8 +100,14 @@ trait WomType { object WomType { /* This is in the order of coercion from non-wom types */ val womTypeCoercionOrder: Seq[WomType] = Seq( - WomStringType, WomIntegerType, WomFloatType, WomPairType(WomAnyType, WomAnyType), WomMapType(WomAnyType, WomAnyType), - WomArrayType(WomAnyType), WomBooleanType, WomObjectType, + WomStringType, + WomIntegerType, + WomFloatType, + WomPairType(WomAnyType, WomAnyType), + WomMapType(WomAnyType, WomAnyType), + WomArrayType(WomAnyType), + WomBooleanType, + WomObjectType, // Putting optional type last means we'll only coerce to it for JsNull. // That should be OK because every other type X can coerce into X? later if it needs to. WomOptionalType(WomAnyType) @@ -101,13 +116,12 @@ object WomType { def homogeneousTypeFromValues(values: Iterable[WomValue]): WomType = homogeneousTypeFromTypes(values.map(_.womType)) - def homogeneousTypeFromTypes(types: Iterable[WomType]): WomType = { + def homogeneousTypeFromTypes(types: Iterable[WomType]): WomType = types.toSet match { case s if s.isEmpty => WomNothingType case s if s.size == 1 => s.head case _ => lowestCommonSubtype(types) } - } def lowestCommonSubtype(types: Iterable[WomType]): WomType = types match { case e if e.isEmpty => WomNothingType @@ -121,14 +135,19 @@ object WomType { } private object ListOfPrimitives { - def unapply(types: Iterable[WomType]): Option[WomType] = { + def unapply(types: Iterable[WomType]): Option[WomType] = if (types.forall(_.isInstanceOf[WomPrimitiveType])) { firstCommonPrimitive(types) } else None - } val coercePriority = List( - WomStringType, WomSingleFileType, WomUnlistedDirectoryType, WomFloatType, WomIntegerType, WomBooleanType, WomObjectType + WomStringType, + WomSingleFileType, + WomUnlistedDirectoryType, + WomFloatType, + WomIntegerType, + WomBooleanType, + WomObjectType ) private def firstCommonPrimitive(types: Iterable[WomType]): Option[WomType] = { @@ -138,20 +157,18 @@ object WomType { // A type not in the incoming list but which everything could coerce to nonetheless: lazy val unsuppliedOption: Option[WomType] = coercePriority.find(p => types.forall(p.isCoerceableFrom)) - coercePriority.find { p => suppliedOptions.toList.contains(p) } orElse { unsuppliedOption } + coercePriority.find(p => suppliedOptions.toList.contains(p)) orElse unsuppliedOption } } - private object ListOfPairs { - def unapply(types: Iterable[WomType]): Option[WomPairType] = { + def unapply(types: Iterable[WomType]): Option[WomPairType] = if (types.forall(_.isInstanceOf[WomPairType])) { val pairs = types.map(_.asInstanceOf[WomPairType]) val leftType = lowestCommonSubtype(pairs.map(_.leftType)) val rightType = lowestCommonSubtype(pairs.map(_.rightType)) Some(WomPairType(leftType, rightType)) } else None - } } private object ListOfOptionals { @@ -218,7 +235,7 @@ object WomType { that cannot be coerced. Equivalent to logic found in typeSpecificIsCoerceableFrom on WomArrayType. - */ + */ val memberType = lowestCommonSubtype(arrs.map(_.memberType).filterNot(_.equals(WomNothingType))) Some(WomArrayType(memberType)) } else None diff --git a/wom/src/main/scala/wom/types/WomTypeJsonFormatter.scala b/wom/src/main/scala/wom/types/WomTypeJsonFormatter.scala index a8eb4a2905e..28110cf61b7 100644 --- a/wom/src/main/scala/wom/types/WomTypeJsonFormatter.scala +++ b/wom/src/main/scala/wom/types/WomTypeJsonFormatter.scala @@ -11,13 +11,11 @@ object WomTypeJsonFormatter extends DefaultJsonProtocol { } implicit object WorkflowInputJsonFormat extends RootJsonFormat[Map[FullyQualifiedName, InputDefinition]] { - def write(inputs: Map[FullyQualifiedName, InputDefinition]) = { + def write(inputs: Map[FullyQualifiedName, InputDefinition]) = JsObject(inputs map { case (fqn, input) => val optional = if (input.optional) "(optional) " else "" fqn -> JsString(s"$optional${input.womType.stableName}") }) - } def read(value: JsValue) = throw new UnsupportedOperationException } } - diff --git a/wom/src/main/scala/wom/types/coercion/defaults/package.scala b/wom/src/main/scala/wom/types/coercion/defaults/package.scala index c76417ef385..c792970c2f8 100644 --- a/wom/src/main/scala/wom/types/coercion/defaults/package.scala +++ b/wom/src/main/scala/wom/types/coercion/defaults/package.scala @@ -14,20 +14,24 @@ package object defaults { implicit val womIntegerCoercer: WomTypeCoercer[WomInteger] = defaultCoercionForType[WomInteger](WomIntegerType) implicit val womFloatCoercer: WomTypeCoercer[WomFloat] = defaultCoercionForType[WomFloat](WomFloatType) implicit val womStringCoercer: WomTypeCoercer[WomString] = defaultCoercionForType[WomString](WomStringType) - implicit val womSingleFileCoercer: WomTypeCoercer[WomSingleFile] = defaultCoercionForType[WomSingleFile](WomSingleFileType) + implicit val womSingleFileCoercer: WomTypeCoercer[WomSingleFile] = + defaultCoercionForType[WomSingleFile](WomSingleFileType) implicit val womObjectCoercer: WomTypeCoercer[WomObject] = defaultCoercionForType[WomObject](WomObjectType) implicit val womOptionalOfAnyCoercer = defaultCoercionForType[WomOptionalValue](WomOptionalType(WomAnyType)) implicit val womArrayOfAnyCoercer = defaultCoercionForType[WomArray](WomArrayType(WomAnyType)) implicit val womMapOfAnyCoercer = defaultCoercionForType[WomMap](WomMapType(WomAnyType, WomAnyType)) - implicit def womArrayTypeCoercer(arrayType: WomArrayType): WomTypeCoercer[WomArray] = defaultCoercionForType[WomArray](arrayType) + implicit def womArrayTypeCoercer(arrayType: WomArrayType): WomTypeCoercer[WomArray] = + defaultCoercionForType[WomArray](arrayType) - private def defaultCoercionForType[A](typeObject: WomType)(implicit classTag: ClassTag[A]): WomTypeCoercer[A] = new WomTypeCoercer[A] { - override def coerceToType(any: Any): ErrorOr[A] = typeObject.coerceRawValue(any).toErrorOr flatMap { - case womValue: A => womValue.validNel - case other => s"Bad coercion in ${getClass.getSimpleName}! Coercion should have created ${typeObject.stableName} but instead created ${other.womType.stableName}".invalidNel + private def defaultCoercionForType[A](typeObject: WomType)(implicit classTag: ClassTag[A]): WomTypeCoercer[A] = + new WomTypeCoercer[A] { + override def coerceToType(any: Any): ErrorOr[A] = typeObject.coerceRawValue(any).toErrorOr flatMap { + case womValue: A => womValue.validNel + case other => + s"Bad coercion in ${getClass.getSimpleName}! Coercion should have created ${typeObject.stableName} but instead created ${other.womType.stableName}".invalidNel + } + override def coercionDefined(any: Any): Boolean = typeObject.coercionDefined(any) + override def toDisplayString: String = typeObject.stableName } - override def coercionDefined(any: Any): Boolean = typeObject.coercionDefined(any) - override def toDisplayString: String = typeObject.stableName - } } diff --git a/wom/src/main/scala/wom/util/YamlUtils.scala b/wom/src/main/scala/wom/util/YamlUtils.scala index 50dffc0457d..6375006cd7f 100644 --- a/wom/src/main/scala/wom/util/YamlUtils.scala +++ b/wom/src/main/scala/wom/util/YamlUtils.scala @@ -36,7 +36,7 @@ object YamlUtils { def parse(yaml: String, maxNodes: Int Refined NonNegative = defaultMaxNodes, maxDepth: Int Refined NonNegative = defaultMaxDepth - ): Either[ParsingFailure, Json] = { + ): Either[ParsingFailure, Json] = try { val yamlConstructor = new SafeConstructor(loaderOptions) val yamlComposer = new MaxDepthComposer(yaml, maxDepth) @@ -53,16 +53,14 @@ object YamlUtils { case exception: Exception => Left(ParsingFailure(exception.getMessage, exception)) } - } - private[util] implicit val refinedNonNegativeReader: ValueReader[Int Refined NonNegative] = { - (config: Config, path: String) => { + implicit private[util] val refinedNonNegativeReader: ValueReader[Int Refined NonNegative] = { + (config: Config, path: String) => val int = config.getInt(path) refineV[NonNegative](int) match { case Left(error) => throw new BadValue(path, error) case Right(refinedInt) => refinedInt } - } } private val yamlConfig = ConfigFactory.load().getConfig("yaml") @@ -82,11 +80,11 @@ object YamlUtils { /** Extends SnakeYaml's Composer checking for a maximum depth before a StackOverflowError occurs. */ private class MaxDepthComposer(yaml: String, maxDepth: Int Refined NonNegative) - extends Composer( - new ParserImpl(new StreamReader(new StringReader(yaml)), loaderOptions), - new Resolver(), - loaderOptions - ) { + extends Composer( + new ParserImpl(new StreamReader(new StringReader(yaml)), loaderOptions), + new Resolver(), + loaderOptions + ) { private val depth = new Counter @@ -99,29 +97,23 @@ object YamlUtils { result } - override def composeScalarNode(anchor: String, blockComments: util.List[CommentLine]): Node = { + override def composeScalarNode(anchor: String, blockComments: util.List[CommentLine]): Node = checkDepth(super.composeScalarNode(anchor, blockComments)) - } - override def composeSequenceNode(anchor: String): Node = { + override def composeSequenceNode(anchor: String): Node = checkDepth(super.composeSequenceNode(anchor)) - } - override def composeMappingNode(anchor: String): Node = { + override def composeMappingNode(anchor: String): Node = checkDepth(super.composeMappingNode(anchor)) - } - override def composeMappingChildren(children: util.List[NodeTuple], node: MappingNode): Unit = { + override def composeMappingChildren(children: util.List[NodeTuple], node: MappingNode): Unit = checkDepth(super.composeMappingChildren(children, node)) - } - override def composeKeyNode(node: MappingNode): Node = { + override def composeKeyNode(node: MappingNode): Node = checkDepth(super.composeKeyNode(node)) - } - override def composeValueNode(node: MappingNode): Node = { + override def composeValueNode(node: MappingNode): Node = checkDepth(super.composeValueNode(node)) - } } /** A "pointer" reference to a mutable count. */ @@ -147,7 +139,8 @@ object YamlUtils { private def searchForOversizedYaml(node: AnyRef, identitySet: java.util.Set[AnyRef], maxNodes: Int Refined NonNegative, - counter: Counter): Unit = { + counter: Counter + ): Unit = { if (!identitySet.add(node)) { throw new IllegalArgumentException("Loop detected") } @@ -158,15 +151,14 @@ object YamlUtils { } node match { - case iterable: java.lang.Iterable[AnyRef]@unchecked => + case iterable: java.lang.Iterable[AnyRef] @unchecked => iterable.asScala foreach { searchForOversizedYaml(_, identitySet, maxNodes, counter) } - case map: java.util.Map[AnyRef, AnyRef]@unchecked => - map.asScala foreach { - case (key, value) => - searchForOversizedYaml(key, identitySet, maxNodes, counter) - searchForOversizedYaml(value, identitySet, maxNodes, counter) + case map: java.util.Map[AnyRef, AnyRef] @unchecked => + map.asScala foreach { case (key, value) => + searchForOversizedYaml(key, identitySet, maxNodes, counter) + searchForOversizedYaml(value, identitySet, maxNodes, counter) } case _ => /* ignore scalars, only loop through Yaml sequences and mappings: https://yaml.org/spec/1.1/#id861435 */ } diff --git a/wom/src/main/scala/wom/values/WomArray.scala b/wom/src/main/scala/wom/values/WomArray.scala index 0fdcde242be..1e4f0d31cc5 100644 --- a/wom/src/main/scala/wom/values/WomArray.scala +++ b/wom/src/main/scala/wom/values/WomArray.scala @@ -17,15 +17,19 @@ import scala.util.{Failure, Success, Try} object WomArray { def empty = WomArray(WomMaybeEmptyArrayType.EmptyArrayType, List.empty) - def fromTsv(tsv: String): WomArray = { - WomArray(WomArrayType(WomArrayType(WomStringType)), tsv.replaceAll("[\r\n]+$", "").split("[\n\r]+").toSeq map { line => - WomArray(WomArrayType(WomStringType), line.split("\t").toSeq.map(WomString)) - }) - } + def fromTsv(tsv: String): WomArray = + WomArray( + WomArrayType(WomArrayType(WomStringType)), + tsv.replaceAll("[\r\n]+$", "").split("[\n\r]+").toSeq map { line => + WomArray(WomArrayType(WomStringType), line.split("\t").toSeq.map(WomString)) + } + ) def apply(womType: WomArrayType, value: Seq[WomValue]): WomArray = { if (womType == WomMaybeEmptyArrayType.EmptyArrayType && value.nonEmpty) { - throw new UnsupportedOperationException(s"An ${womType.stableName} must be empty but instead has value: ${value.mkString(", ")}") + throw new UnsupportedOperationException( + s"An ${womType.stableName} must be empty but instead has value: ${value.mkString(", ")}" + ) } if (womType.guaranteedNonEmpty && value.isEmpty) { throw new UnsupportedOperationException(s"An ${womType.stableName} must contain at least one element") @@ -34,11 +38,15 @@ object WomArray { val coercedValue = TryUtil.sequence(value map womType.memberType.coerceRawValue) coercedValue match { case Success(coercedArray) => new WomArray(womType, coercedArray) {} - case Failure(f) => throw new UnsupportedOperationException(s"Could not construct array of type $womType with this value: $value", f) + case Failure(f) => + throw new UnsupportedOperationException(s"Could not construct array of type $womType with this value: $value", + f + ) } } - def apply(value: Seq[WomValue]): WomArray = WomArray.apply(WomArrayType(WomType.homogeneousTypeFromValues(value)), value) + def apply(value: Seq[WomValue]): WomArray = + WomArray.apply(WomArrayType(WomType.homogeneousTypeFromValues(value)), value) trait WomArrayLike { def arrayType: WomArrayType @@ -53,49 +61,55 @@ object WomArray { } } -sealed abstract case class WomArray(womType: WomArrayType, value: Seq[WomValue]) extends WomValue with WomArrayLike with TsvSerializable { +sealed abstract case class WomArray(womType: WomArrayType, value: Seq[WomValue]) + extends WomValue + with WomArrayLike + with TsvSerializable { val nonEmpty = value.nonEmpty override def toWomString: String = s"[${value.map(_.toWomString).mkString(", ")}]" override def toString = toWomString - def map[R <: WomValue](f: WomValue => R): WomArray = { - value.map{f} match { + def map[R <: WomValue](f: WomValue => R): WomArray = + value.map(f) match { case s: Seq[R] if s.nonEmpty => WomArray(WomArrayType(s.head.womType), s) case _ => this } - } - def traverse[R <: WomValue, G[_]](f: WomValue => G[R])(implicit applicative: Applicative[G]): G[WomArray] = { + def traverse[R <: WomValue, G[_]](f: WomValue => G[R])(implicit applicative: Applicative[G]): G[WomArray] = if (value.isEmpty) applicative.pure(this) else { applicative.map(value.toList.traverse(f)) { mapped => WomArray(WomArrayType(mapped.head.womType), mapped) } } - } - override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverse(_.initialize(ioFunctionSet)).widen + override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverse( + _.initialize(ioFunctionSet) + ).widen def size = value.size - def tsvSerialize: Try[String] = { + def tsvSerialize: Try[String] = womType.memberType match { case _: WomPrimitiveType => Success(value.map(_.valueString).mkString(start = "", sep = "\n", end = "\n")) case WomObjectType => WomObject.tsvSerializeArray(value map { _.asInstanceOf[WomObject] }) case WomArrayType(_: WomPrimitiveType) => - val tsvString = value.collect({ case a: WomArray => a }) map { a => - a.value.collect({ case p: WomPrimitive => p.valueString }).mkString(start = "", sep = "\t", end = "\n") + val tsvString = value.collect { case a: WomArray => a } map { a => + a.value.collect { case p: WomPrimitive => p.valueString }.mkString(start = "", sep = "\t", end = "\n") } mkString Success(tsvString) - case _ => Failure(new UnsupportedOperationException(s"Cannot TSV serialize a ${this.womType.stableName} (valid types are Array[Primitive], Array[Array[Primitive]], or Array[Object])")) + case _ => + Failure( + new UnsupportedOperationException( + s"Cannot TSV serialize a ${this.womType.stableName} (valid types are Array[Primitive], Array[Array[Primitive]], or Array[Object])" + ) + ) } - } - override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = { + override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = value flatMap { _.collectAsSeq(filterFn) } - } // For WomArrayLike: override val arrayType: WomArrayType = womType diff --git a/wom/src/main/scala/wom/values/WomBoolean.scala b/wom/src/main/scala/wom/values/WomBoolean.scala index 1474819436c..f45587ae16f 100644 --- a/wom/src/main/scala/wom/values/WomBoolean.scala +++ b/wom/src/main/scala/wom/values/WomBoolean.scala @@ -14,23 +14,23 @@ object WomBoolean { /** The constructor is private to force access through the companion * object `apply` which ensures the use of one of the canonical instances. */ -class WomBoolean private(val value: Boolean) extends WomPrimitive { +class WomBoolean private (val value: Boolean) extends WomPrimitive { val womType = WomBooleanType override def equals(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomBoolean => Success(WomBoolean(value == r.value)) + case r: WomBoolean => Success(WomBoolean(value == r.value)) case r: WomOptionalValue => evaluateIfDefined("==", r, equals) case _ => invalid(s"$value || $rhs") } override def lessThan(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomBoolean => Success(WomBoolean(value < r.value)) + case r: WomBoolean => Success(WomBoolean(value < r.value)) case r: WomOptionalValue => evaluateIfDefined("<", r, lessThan) case _ => invalid(s"$value < $rhs") } override def greaterThan(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomBoolean => Success(WomBoolean(value > r.value)) + case r: WomBoolean => Success(WomBoolean(value > r.value)) case r: WomOptionalValue => evaluateIfDefined(">", r, greaterThan) case _ => invalid(s"$value > $rhs") } @@ -42,7 +42,7 @@ class WomBoolean private(val value: Boolean) extends WomPrimitive { } override def and(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomBoolean => Success(WomBoolean(value && r.value)) + case r: WomBoolean => Success(WomBoolean(value && r.value)) case r: WomOptionalValue => evaluateIfDefined("&&", r, and) case _ => invalid(s"$value && $rhs") } diff --git a/wom/src/main/scala/wom/values/WomCoproductValue.scala b/wom/src/main/scala/wom/values/WomCoproductValue.scala index 44274dcf5fa..1973a5e363a 100644 --- a/wom/src/main/scala/wom/values/WomCoproductValue.scala +++ b/wom/src/main/scala/wom/values/WomCoproductValue.scala @@ -4,7 +4,6 @@ import wom.types.WomCoproductType case class WomCoproductValue(womType: WomCoproductType, womValue: WomValue) extends WomValue { - override def toWomString: String = { + override def toWomString: String = womValue.toWomString - } } diff --git a/wom/src/main/scala/wom/values/WomFile.scala b/wom/src/main/scala/wom/values/WomFile.scala index 4638a712404..bcf7484ef33 100644 --- a/wom/src/main/scala/wom/values/WomFile.scala +++ b/wom/src/main/scala/wom/values/WomFile.scala @@ -88,7 +88,7 @@ sealed trait WomFile extends WomValue { * * WomPrimitiveFile instances return just the instance. */ - def flattenFiles: Seq[WomPrimitiveFile] = { + def flattenFiles: Seq[WomPrimitiveFile] = this match { case womMaybeListedDirectory: WomMaybeListedDirectory => womMaybeListedDirectory.listingOption.getOrElse(Nil).toList match { @@ -97,24 +97,21 @@ sealed trait WomFile extends WomValue { } case womMaybePopulatedFile: WomMaybePopulatedFile => val primaryFiles: Seq[WomPrimitiveFile] = womMaybePopulatedFile.valueOption.toList.map(WomSingleFile) - womMaybePopulatedFile.secondaryFiles.foldLeft(primaryFiles) { - (womFiles, secondaryFile) => - womFiles ++ secondaryFile.flattenFiles + womMaybePopulatedFile.secondaryFiles.foldLeft(primaryFiles) { (womFiles, secondaryFile) => + womFiles ++ secondaryFile.flattenFiles } case womPrimitiveFile: WomPrimitiveFile => List(womPrimitiveFile) } - } /** * If relevant, load the size of the file. */ def withSize(ioFunctionSet: IoFunctionSet): IO[WomFile] = IO.pure(this) - + def sizeOption: Option[Long] = None - protected def recoverFileNotFound[A](fallback: A)(t: Throwable): IO[A] = { + protected def recoverFileNotFound[A](fallback: A)(t: Throwable): IO[A] = if (isFileNotFound(t)) IO.pure(fallback) else IO.raiseError(t) - } def isFileNotFound(t: Throwable): Boolean = t match { case _: NoSuchFileException | _: FileNotFoundException => true @@ -124,7 +121,7 @@ sealed trait WomFile extends WomValue { } object WomFile { - def apply(fileType: WomFileType, value: String) = { + def apply(fileType: WomFileType, value: String) = fileType match { case WomUnlistedDirectoryType => WomUnlistedDirectory(value) case WomSingleFileType => WomSingleFile(value) @@ -132,7 +129,6 @@ object WomFile { case WomMaybeListedDirectoryType => WomMaybeListedDirectory(value) case WomMaybePopulatedFileType => WomMaybePopulatedFile(value) } - } } sealed trait WomPrimitiveFile extends WomFile with WomPrimitive @@ -163,13 +159,11 @@ final case class WomUnlistedDirectory(value: String) extends WomPrimitiveFile { case _ => invalid(s"$value == $rhs") } - override def mapFile(f: String => String): WomUnlistedDirectory = { + override def mapFile(f: String => String): WomUnlistedDirectory = this.copy(value = f(value)) - } - override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = { + override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = f.applyOrElse[WomFile, WomFile](this, identity) - } override def mapWomFile(f: WomFile => String) = this.copy(value = f(this)) } @@ -197,21 +191,18 @@ final case class WomSingleFile(value: String) extends WomPrimitiveFile { case _ => invalid(s"$value == $rhs") } - override def mapFile(f: String => String): WomSingleFile = { + override def mapFile(f: String => String): WomSingleFile = this.copy(value = f(value)) - } - override def mapWomFile(f: WomFile => String) = { + override def mapWomFile(f: WomFile => String) = this.copy(value = f(this)) - } - override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = { + override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = f.applyOrElse[WomFile, WomFile](this, identity) - } override def withSize(ioFunctionSet: IoFunctionSet): IO[WomFile] = { implicit def cs = ioFunctionSet.cs - IO.fromFuture(IO { ioFunctionSet.size(value)}) + IO.fromFuture(IO(ioFunctionSet.size(value))) .map(Option.apply) .handleErrorWith(recoverFileNotFound(None)) .map(s => WomMaybePopulatedFile(valueOption = Option(value), sizeOption = s)) @@ -250,38 +241,37 @@ final case class WomGlobFile(value: String) extends WomPrimitiveFile { override def mapWomFile(f: WomFile => String) = this.copy(value = f(this)) - override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = { + override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = f.applyOrElse[WomFile, WomFile](this, identity) - } } - /** * A directory possibly with a listing of other files/directories. * * @param valueOption The location of the directory, possibly in the cloud. * @param listingOption An optional listing of files/directories, either supplied by a user or generated by the engine. */ -case class WomMaybeListedDirectory(valueOption: Option[String] = None, - listingOption: Option[Seq[WomFile]] = None, - basename: Option[String] = None, - initializeFunction: WomMaybeListedDirectory => IoFunctionSet => IOChecked[WomValue] = { dir => _ => IOChecked.pure(dir) }) extends WomFile { - override def value: String = { - valueOption.getOrElse(throw new UnsupportedOperationException(s"value is not available: $this")) +case class WomMaybeListedDirectory( + valueOption: Option[String] = None, + listingOption: Option[Seq[WomFile]] = None, + basename: Option[String] = None, + initializeFunction: WomMaybeListedDirectory => IoFunctionSet => IOChecked[WomValue] = { dir => _ => + IOChecked.pure(dir) } +) extends WomFile { + override def value: String = + valueOption.getOrElse(throw new UnsupportedOperationException(s"value is not available: $this")) override val womFileType: WomFileType = WomMaybeListedDirectoryType // TODO: WOM: WOMFILE: This isn't even close to a WDL representation (and maybe belongs in WDL?) of this class, but w/o it other areas of the code crash override def toWomString = s""""$value"""" - override def mapFile(f: String => String): WomMaybeListedDirectory = { + override def mapFile(f: String => String): WomMaybeListedDirectory = this.copy(valueOption = valueOption.map(f), listingOption.map(_.map(_.mapFile(f)))) - } - override def mapWomFile(f: WomFile => String) = { + override def mapWomFile(f: WomFile => String) = this.copy(valueOption = Option(f(this)), listingOption.map(_.map(_.mapWomFile(f)))) - } override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = { val copy = this.copy(listingOption = listingOption.map(_.map(_.collect(f)))) @@ -291,16 +281,17 @@ case class WomMaybeListedDirectory(valueOption: Option[String] = None, override def withSize(ioFunctionSet: IoFunctionSet): IO[WomFile] = { import ioFunctionSet.cs - listingOption.map({ - _.toList - .parTraverse[IO, WomFile](_.withSize(ioFunctionSet)) - .map(listingWithSize => this.copy(listingOption = Option(listingWithSize))) - }) + listingOption + .map { + _.toList + .parTraverse[IO, WomFile](_.withSize(ioFunctionSet)) + .map(listingWithSize => this.copy(listingOption = Option(listingWithSize))) + } .getOrElse(IO.pure(this)) } override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = initializeFunction(this)(ioFunctionSet) - + override def sizeOption = listingOption.map(_.flatMap(_.sizeOption).sum) } @@ -324,24 +315,23 @@ case class WomMaybePopulatedFile(valueOption: Option[String] = None, formatOption: Option[String] = None, contentsOption: Option[String] = None, secondaryFiles: Seq[WomFile] = Vector.empty, - initializeFunction: WomMaybePopulatedFile => IoFunctionSet => IOChecked[WomValue] = { file => _ => IOChecked.pure(file) } - ) extends WomFile { - override def value: String = { + initializeFunction: WomMaybePopulatedFile => IoFunctionSet => IOChecked[WomValue] = { + file => _ => IOChecked.pure(file) + } +) extends WomFile { + override def value: String = valueOption.getOrElse(throw new UnsupportedOperationException(s"value is not available: $this")) - } override val womFileType: WomFileType = WomMaybePopulatedFileType // TODO: WOM: WOMFILE: This isn't even close to a WDL representation (and maybe belongs in WDL?) of this class, but w/o it other areas of the code crash override def toWomString = s""""$value"""" - override def mapFile(f: String => String): WomMaybePopulatedFile = { + override def mapFile(f: String => String): WomMaybePopulatedFile = this.copy(valueOption = valueOption.map(f), secondaryFiles = secondaryFiles.map(_.mapFile(f))) - } - override def mapWomFile(f: WomFile => String): WomMaybePopulatedFile = { + override def mapWomFile(f: WomFile => String): WomMaybePopulatedFile = this.copy(valueOption = Option(f(this)), secondaryFiles = secondaryFiles.map(_.mapWomFile(f))) - } override def collect(f: PartialFunction[WomFile, WomFile]): WomFile = { val copy = this.copy(secondaryFiles = secondaryFiles.map(_.collect(f))) @@ -354,9 +344,12 @@ case class WomMaybePopulatedFile(valueOption: Option[String] = None, val ioSize: IO[Option[Long]] = (sizeOption, contentsOption) match { case (Some(s), _) => IO.pure(Option(s)) case (None, Some(contents)) => IO.pure(Option(contents.length.toLong)) - case _ => IO.fromFuture(IO { ioFunctionSet.size(value) }).map(Option.apply).handleErrorWith(recoverFileNotFound(sizeOption)) + case _ => + IO.fromFuture(IO(ioFunctionSet.size(value))) + .map(Option.apply) + .handleErrorWith(recoverFileNotFound(sizeOption)) } - + for { size <- ioSize secondaryFilesWithSize <- secondaryFiles.toList.parTraverse[IO, WomFile](_.withSize(ioFunctionSet)) diff --git a/wom/src/main/scala/wom/values/WomFloat.scala b/wom/src/main/scala/wom/values/WomFloat.scala index 08ce5ed912d..74f1dd82fb2 100644 --- a/wom/src/main/scala/wom/values/WomFloat.scala +++ b/wom/src/main/scala/wom/values/WomFloat.scala @@ -7,75 +7,67 @@ import scala.util.{Failure, Success, Try} case class WomFloat(value: Double) extends WomPrimitive { val womType = WomFloatType - override def add(rhs: WomValue): Try[WomValue] = { + override def add(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomFloat => Success(WomFloat(value + r.value)) - case r:WomInteger => Success(WomFloat(value + r.value)) - case r:WomString => Success(WomString(s"$value${r.value}")) + case r: WomFloat => Success(WomFloat(value + r.value)) + case r: WomInteger => Success(WomFloat(value + r.value)) + case r: WomString => Success(WomString(s"$value${r.value}")) case r: WomOptionalValue => evaluateIfDefined("+", r, add) case _ => invalid(s"$this + $rhs") } - } - override def subtract(rhs: WomValue): Try[WomValue] = { + override def subtract(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomFloat => Success(WomFloat(value - r.value)) - case r:WomInteger => Success(WomFloat(value - r.value)) + case r: WomFloat => Success(WomFloat(value - r.value)) + case r: WomInteger => Success(WomFloat(value - r.value)) case r: WomOptionalValue => evaluateIfDefined("-", r, subtract) case _ => invalid(s"$this - $rhs") } - } - override def multiply(rhs: WomValue): Try[WomValue] = { + override def multiply(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomFloat => Success(WomFloat(value * r.value)) - case r:WomInteger => Success(WomFloat(value * r.value)) + case r: WomFloat => Success(WomFloat(value * r.value)) + case r: WomInteger => Success(WomFloat(value * r.value)) case r: WomOptionalValue => evaluateIfDefined("*", r, multiply) case _ => invalid(s"$this * $rhs") } - } - override def divide(rhs: WomValue): Try[WomValue] = { + override def divide(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomFloat if r.value == 0.0 => Failure(new WomExpressionException("Divide by zero")) - case r:WomFloat => Success(WomFloat(value / r.value)) - case r:WomInteger if r.value == 0 => Failure(new WomExpressionException("Divide by zero")) - case r:WomInteger => Success(WomFloat(value / r.value)) + case r: WomFloat if r.value == 0.0 => Failure(new WomExpressionException("Divide by zero")) + case r: WomFloat => Success(WomFloat(value / r.value)) + case r: WomInteger if r.value == 0 => Failure(new WomExpressionException("Divide by zero")) + case r: WomInteger => Success(WomFloat(value / r.value)) case r: WomOptionalValue => evaluateIfDefined("/", r, divide) case _ => invalid(s"$this / $rhs") } - } - override def mod(rhs: WomValue): Try[WomValue] = { + override def mod(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomFloat if r.value == 0.0 => Failure(new WomExpressionException("Divide by zero")) - case r:WomFloat => Success(WomFloat(value % r.value)) - case r:WomInteger if r.value == 0 => Failure(new WomExpressionException("Divide by zero")) - case r:WomInteger => Success(WomFloat(value % r.value)) + case r: WomFloat if r.value == 0.0 => Failure(new WomExpressionException("Divide by zero")) + case r: WomFloat => Success(WomFloat(value % r.value)) + case r: WomInteger if r.value == 0 => Failure(new WomExpressionException("Divide by zero")) + case r: WomInteger => Success(WomFloat(value % r.value)) case r: WomOptionalValue => evaluateIfDefined("%", r, mod) case _ => invalid(s"$this % $rhs") } - } - override def equals(rhs: WomValue): Try[WomBoolean] = { + override def equals(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomFloat => Success(WomBoolean(value == r.value)) - case r:WomInteger => Success(WomBoolean(value == r.value)) + case r: WomFloat => Success(WomBoolean(value == r.value)) + case r: WomInteger => Success(WomBoolean(value == r.value)) case r: WomOptionalValue => evaluateIfDefined("==", r, equals) case _ => invalid(s"$this == $rhs") } - } - override def lessThan(rhs: WomValue): Try[WomBoolean] = { + override def lessThan(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomFloat => Success(WomBoolean(value < r.value)) - case r:WomInteger => Success(WomBoolean(value < r.value)) + case r: WomFloat => Success(WomBoolean(value < r.value)) + case r: WomInteger => Success(WomBoolean(value < r.value)) case r: WomOptionalValue => evaluateIfDefined("<", r, lessThan) case _ => invalid(s"$this < $rhs") } - } - override def greaterThan(rhs: WomValue): Try[WomBoolean] = { + override def greaterThan(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomFloat => Success(WomBoolean(value > r.value)) - case r:WomInteger => Success(WomBoolean(value > r.value)) + case r: WomFloat => Success(WomBoolean(value > r.value)) + case r: WomInteger => Success(WomBoolean(value > r.value)) case r: WomOptionalValue => evaluateIfDefined(">", r, greaterThan) case _ => invalid(s"$this > $rhs") } - } override def unaryPlus: Try[WomValue] = Success(WomFloat(math.abs(value))) override def unaryMinus: Try[WomValue] = Success(WomFloat(-value)) override def toWomString = value.toString diff --git a/wom/src/main/scala/wom/values/WomInteger.scala b/wom/src/main/scala/wom/values/WomInteger.scala index 1bcc2e117ac..68e99c93d80 100644 --- a/wom/src/main/scala/wom/values/WomInteger.scala +++ b/wom/src/main/scala/wom/values/WomInteger.scala @@ -9,62 +9,64 @@ case class WomInteger(value: Int) extends WomPrimitive { val womType = WomIntegerType override def add(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomInteger => Success(WomInteger(value + r.value)) - case r:WomString => Success(WomString(s"$value${r.value}")) - case r:WomFloat => Success(WomFloat(value + r.value)) + case r: WomInteger => Success(WomInteger(value + r.value)) + case r: WomString => Success(WomString(s"$value${r.value}")) + case r: WomFloat => Success(WomFloat(value + r.value)) case r: WomOptionalValue => evaluateIfDefined("+", r, add) case _ => invalid(s"$value + $rhs") } override def subtract(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomInteger => Success(WomInteger(value - r.value)) - case r:WomFloat => Success(WomFloat(value - r.value)) + case r: WomInteger => Success(WomInteger(value - r.value)) + case r: WomFloat => Success(WomFloat(value - r.value)) case r: WomOptionalValue => evaluateIfDefined("-", r, subtract) case _ => invalid(s"$value - $rhs") } override def multiply(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomInteger => Success(WomInteger(value * r.value)) - case r:WomFloat => Success(WomFloat(value * r.value)) + case r: WomInteger => Success(WomInteger(value * r.value)) + case r: WomFloat => Success(WomFloat(value * r.value)) case r: WomOptionalValue => evaluateIfDefined("*", r, multiply) case _ => invalid(s"$value * $rhs") } override def divide(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomInteger if r.value == 0 => Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) - case r:WomInteger => Success(WomInteger(value / r.value)) - case r:WomFloat if r.value == 0.toDouble => Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) - case r:WomFloat => Success(WomFloat(value / r.value)) + case r: WomInteger if r.value == 0 => Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) + case r: WomInteger => Success(WomInteger(value / r.value)) + case r: WomFloat if r.value == 0.toDouble => + Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) + case r: WomFloat => Success(WomFloat(value / r.value)) case r: WomOptionalValue => evaluateIfDefined("/", r, divide) case _ => invalid(s"$value / $rhs") } override def mod(rhs: WomValue): Try[WomValue] = rhs match { - case r:WomInteger if r.value == 0 => Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) - case r:WomInteger => Success(WomInteger(value % r.value)) - case r:WomFloat if r.value == 0.toDouble => Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) - case r:WomFloat => Success(WomFloat(value % r.value)) + case r: WomInteger if r.value == 0 => Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) + case r: WomInteger => Success(WomInteger(value % r.value)) + case r: WomFloat if r.value == 0.toDouble => + Failure(new WomExpressionException(s"Divide by zero error: $value / $rhs")) + case r: WomFloat => Success(WomFloat(value % r.value)) case r: WomOptionalValue => evaluateIfDefined("%", r, mod) case _ => invalid(s"$value % $rhs") } override def equals(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomInteger => Success(WomBoolean(value == r.value)) - case r:WomFloat => Success(WomBoolean(value == r.value)) + case r: WomInteger => Success(WomBoolean(value == r.value)) + case r: WomFloat => Success(WomBoolean(value == r.value)) case r: WomOptionalValue => evaluateIfDefined("==", r, equals) case _ => invalid(s"$value == $rhs") } override def lessThan(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomInteger => Success(WomBoolean(value < r.value)) - case r:WomFloat => Success(WomBoolean(value < r.value)) + case r: WomInteger => Success(WomBoolean(value < r.value)) + case r: WomFloat => Success(WomBoolean(value < r.value)) case r: WomOptionalValue => evaluateIfDefined("<", r, lessThan) case _ => invalid(s"$value < $rhs") } override def greaterThan(rhs: WomValue): Try[WomBoolean] = rhs match { - case r:WomInteger => Success(WomBoolean(value > r.value)) - case r:WomFloat => Success(WomBoolean(value > r.value)) + case r: WomInteger => Success(WomBoolean(value > r.value)) + case r: WomFloat => Success(WomBoolean(value > r.value)) case r: WomOptionalValue => evaluateIfDefined(">", r, greaterThan) case _ => invalid(s"$value > $rhs") } diff --git a/wom/src/main/scala/wom/values/WomLong.scala b/wom/src/main/scala/wom/values/WomLong.scala index b9f567a359f..3b8ef661b85 100644 --- a/wom/src/main/scala/wom/values/WomLong.scala +++ b/wom/src/main/scala/wom/values/WomLong.scala @@ -1,6 +1,6 @@ package wom.values -import wom.types.{WomLongType} +import wom.types.WomLongType case class WomLong(value: Long) extends WomPrimitive { val womType = WomLongType diff --git a/wom/src/main/scala/wom/values/WomMap.scala b/wom/src/main/scala/wom/values/WomMap.scala index 5d42b1d3364..97748c8d24a 100644 --- a/wom/src/main/scala/wom/values/WomMap.scala +++ b/wom/src/main/scala/wom/values/WomMap.scala @@ -16,11 +16,15 @@ import scala.util.{Failure, Success, Try} object WomMap { def coerceMap(m: Map[_, _], womMapType: WomMapType): WomMap = { - val coerced: Map[Try[WomValue], Try[WomValue]] = m map { case(k, v) => womMapType.keyType.coerceRawValue(k) -> womMapType.valueType.coerceRawValue(v) } - val failures = coerced flatMap { case(k,v) => Seq(k,v) } collect { case f:Failure[_] => f } + val coerced: Map[Try[WomValue], Try[WomValue]] = m map { case (k, v) => + womMapType.keyType.coerceRawValue(k) -> womMapType.valueType.coerceRawValue(v) + } + val failures = coerced flatMap { case (k, v) => Seq(k, v) } collect { case f: Failure[_] => f } failures match { case f: Iterable[Failure[_]] if f.nonEmpty => - throw new UnsupportedOperationException(s"Failed to coerce one or more keys or values for creating a ${womMapType.stableName}:\n${TryUtil.stringifyFailures(f)}}") + throw new UnsupportedOperationException( + s"Failed to coerce one or more keys or values for creating a ${womMapType.stableName}:\n${TryUtil.stringifyFailures(f)}}" + ) case _ => val mapCoerced = coerced map { case (k, v) => k.get -> v.get } @@ -31,14 +35,14 @@ object WomMap { } } - def fromTsv(tsv: String, womMapType: WomMapType = WomMapType(WomAnyType, WomAnyType)): Try[WomMap] = { + def fromTsv(tsv: String, womMapType: WomMapType = WomMapType(WomAnyType, WomAnyType)): Try[WomMap] = FileUtil.parseTsv(tsv) match { case Success(table) if table.isEmpty => Success(WomMap(womMapType, Map.empty[WomValue, WomValue])) - case Success(table) if table.head.length != 2 => Failure(new UnsupportedOperationException("TSV must be 2 columns to convert to a Map")) + case Success(table) if table.head.length != 2 => + Failure(new UnsupportedOperationException("TSV must be 2 columns to convert to a Map")) case Success(table) => Try(coerceMap(table.map(row => row(0) -> row(1)).toMap, womMapType)) case Failure(e) => Failure(e) } - } def apply(m: Map[WomValue, WomValue]): WomMap = { val keyType = WomType.lowestCommonSubtype(m.keys.map(_.womType)) @@ -47,27 +51,38 @@ object WomMap { } } -final case class WomMap private(womType: WomMapType, value: Map[WomValue, WomValue]) extends WomValue with WomArrayLike with TsvSerializable { +final case class WomMap private (womType: WomMapType, value: Map[WomValue, WomValue]) + extends WomValue + with WomArrayLike + with TsvSerializable { val typesUsedInKey = value.map { case (k, _) => k.womType }.toSet if (typesUsedInKey.size == 1 && typesUsedInKey.head != womType.keyType) - throw new UnsupportedOperationException(s"Could not construct a $womType with map keys of unexpected type: [${value.keys.mkString(", ")}]") + throw new UnsupportedOperationException( + s"Could not construct a $womType with map keys of unexpected type: [${value.keys.mkString(", ")}]" + ) if (typesUsedInKey.size > 1) - throw new UnsupportedOperationException(s"Cannot construct $womType with mixed types in map keys: [${value.keys.mkString(", ")}]") + throw new UnsupportedOperationException( + s"Cannot construct $womType with mixed types in map keys: [${value.keys.mkString(", ")}]" + ) val typesUsedInValue = value.map { case (_, v) => v.womType }.toSet if (typesUsedInValue.size == 1 && typesUsedInValue.head != womType.valueType) - throw new UnsupportedOperationException(s"Could not construct a $womType with map values of unexpected type: [${value.values.mkString(", ")}]") + throw new UnsupportedOperationException( + s"Could not construct a $womType with map values of unexpected type: [${value.values.mkString(", ")}]" + ) if (typesUsedInValue.size > 1) - throw new UnsupportedOperationException(s"Cannot construct $womType with mixed types in map values: [${value.values.mkString(", ")}]") + throw new UnsupportedOperationException( + s"Cannot construct $womType with mixed types in map values: [${value.values.mkString(", ")}]" + ) override def toWomString: String = - "{" + value.map {case (k,v) => s"${k.toWomString}: ${v.toWomString}"}.mkString(", ") + "}" + "{" + value.map { case (k, v) => s"${k.toWomString}: ${v.toWomString}" }.mkString(", ") + "}" - def tsvSerialize: Try[String] = { + def tsvSerialize: Try[String] = (womType.keyType, womType.valueType) match { case (_: WomPrimitiveType, _: WomPrimitiveType) if value.isEmpty => // WDL 1.1 spec on `write_map()` https://github.com/openwdl/wdl/blob/main/versions/1.1/SPEC.md#file-write_mapmapstring-string @@ -76,39 +91,42 @@ final case class WomMap private(womType: WomMapType, value: Map[WomValue, WomVal Success("") case (_: WomPrimitiveType, _: WomPrimitiveType) => // "All lines are terminated by the newline (\n) character." - Success(value.map({case (k, v) => s"${k.valueString}\t${v.valueString}"}).mkString(start="", sep="\n", end="\n")) + Success( + value + .map { case (k, v) => s"${k.valueString}\t${v.valueString}" } + .mkString(start = "", sep = "\n", end = "\n") + ) case _ => Failure(new UnsupportedOperationException("Can only TSV serialize a Map[Primitive, Primitive]")) } - } - def map(f: PartialFunction[(WomValue, WomValue), (WomValue, WomValue)]): WomMap = { + def map(f: PartialFunction[(WomValue, WomValue), (WomValue, WomValue)]): WomMap = value map f match { case m: Map[WomValue, WomValue] if m.nonEmpty => WomMap(WomMapType(m.head._1.womType, m.head._2.womType), m) case _ => this } - } - def traverseValues[R <: WomValue, G[_]](f: WomValue => G[R])(implicit applicative: Applicative[G]): G[WomMap] = { + def traverseValues[R <: WomValue, G[_]](f: WomValue => G[R])(implicit applicative: Applicative[G]): G[WomMap] = if (value.isEmpty) applicative.pure(this) else { - val traverseFunction: (WomValue, WomValue) => G[(WomValue, R)] = { - case (key, v) => applicative.map(f(v)) { key -> _ } + val traverseFunction: (WomValue, WomValue) => G[(WomValue, R)] = { case (key, v) => + applicative.map(f(v))(key -> _) } applicative.map(value.toList.traverse[G, (WomValue, R)](traverseFunction.tupled)) { mapped => WomMap(mapped.toMap) } } - } override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = { - val collected = value flatMap { - case (k, v) => Seq(k.collectAsSeq(filterFn), v.collectAsSeq(filterFn)) + val collected = value flatMap { case (k, v) => + Seq(k.collectAsSeq(filterFn), v.collectAsSeq(filterFn)) } collected.flatten.toSeq } - override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverseValues(_.initialize(ioFunctionSet)).widen + override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverseValues( + _.initialize(ioFunctionSet) + ).widen // For WomArrayLike: override lazy val arrayType: WomArrayType = WomArrayType(WomPairType(womType.keyType, womType.valueType)) diff --git a/wom/src/main/scala/wom/values/WomObject.scala b/wom/src/main/scala/wom/values/WomObject.scala index 5c4cc1b53e2..4793116a052 100644 --- a/wom/src/main/scala/wom/values/WomObject.scala +++ b/wom/src/main/scala/wom/values/WomObject.scala @@ -26,47 +26,51 @@ trait WomObjectLike extends WomValue { } def womObjectTypeLike: WomObjectTypeLike - override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = { + override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = values.values.toSeq flatMap { _.collectAsSeq(filterFn) } - } - def traverse[R <: WomValue, G[_]](f: WomValue => G[R])(implicit applicative: Applicative[G]): G[WomObjectLike] = { + def traverse[R <: WomValue, G[_]](f: WomValue => G[R])(implicit applicative: Applicative[G]): G[WomObjectLike] = if (values.isEmpty) applicative.pure(this) else { - val traverseFunction: (String, WomValue) => G[(String, R)] = { - case (key, value) => applicative.map(f(value)) { key -> _ } + val traverseFunction: (String, WomValue) => G[(String, R)] = { case (key, value) => + applicative.map(f(value))(key -> _) } applicative.map(values.toList.traverse[G, (String, R)](traverseFunction.tupled)) { mapped => copyWith(mapped.toMap) } } - } - override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverse(_.initialize(ioFunctionSet)).widen + override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverse( + _.initialize(ioFunctionSet) + ).widen } object WomObject { def coerceObject(m: Map[String, String]): WomObject = { - val coerced = WomMap.coerceMap(m, WomMapType(WomStringType, WomAnyType)).value map { - case (k, v) => k.valueString -> v + val coerced = WomMap.coerceMap(m, WomMapType(WomStringType, WomAnyType)).value map { case (k, v) => + k.valueString -> v } WomObject(coerced) } - def fromTsv(tsv: String): Try[Array[WomObject]] = { + def fromTsv(tsv: String): Try[Array[WomObject]] = FileUtil.parseTsv(tsv) match { - case Success(table) if table.isEmpty => Failure(new UnsupportedOperationException("TSV file was empty or could not be parsed.")) - case Success(table) if table.length < 2 => Failure(new UnsupportedOperationException("TSV must be 2 rows (or more) to convert to an Object (Array[Object])")) - case Success(table) => Try { - table.tail map { line => coerceObject((table.head zip line).toMap) } - } + case Success(table) if table.isEmpty => + Failure(new UnsupportedOperationException("TSV file was empty or could not be parsed.")) + case Success(table) if table.length < 2 => + Failure( + new UnsupportedOperationException("TSV must be 2 rows (or more) to convert to an Object (Array[Object])") + ) + case Success(table) => + Try { + table.tail map { line => coerceObject((table.head zip line).toMap) } + } case Failure(e) => Failure(e) } - } - //TODO: Try to stream this out to avoid memory overhead + // TODO: Try to stream this out to avoid memory overhead def tsvSerializeArray(input: Seq[WomObject]): Try[String] = { // Validates that all objects have the same attributes. @@ -86,10 +90,15 @@ object WomObject { val attributesLine = attributes.mkString("\t") val valuesLines = objects map { obj => attributes map { obj.values(_).valueString } mkString "\t" - } mkString(start = "", sep = "\n", end = "\n") + } mkString (start = "", sep = "\n", end = "\n") Success(s"$attributesLine\n$valuesLines") - case _ => Failure(new UnsupportedOperationException("Could not serialize array: Objects in the array have different attributes.")) + case _ => + Failure( + new UnsupportedOperationException( + "Could not serialize array: Objects in the array have different attributes." + ) + ) } } @@ -100,17 +109,17 @@ object WomObject { withTypeErrorOr(values, objectTypeLike).toTry.get } - def withTypeErrorOr(values: Map[String, Any], objectTypeLike: WomObjectTypeLike): ErrorOr[WomObject] = { + def withTypeErrorOr(values: Map[String, Any], objectTypeLike: WomObjectTypeLike): ErrorOr[WomObject] = objectTypeLike.validateAndCoerceValues(values).map(new WomObject(_, objectTypeLike)) - } - def withTypeChecked(values: Map[String, Any], objectTypeLike: WomObjectTypeLike): Checked[WomObject] = { + def withTypeChecked(values: Map[String, Any], objectTypeLike: WomObjectTypeLike): Checked[WomObject] = withTypeErrorOr(values, objectTypeLike).toEither - } } -case class WomObject private[WomObject] (values: Map[String, WomValue], womType: WomObjectTypeLike) extends WomObjectLike with TsvSerializable { +case class WomObject private[WomObject] (values: Map[String, WomValue], womType: WomObjectTypeLike) + extends WomObjectLike + with TsvSerializable { lazy val orderedAttributes = values.keySet.toSeq lazy val orderedValues = orderedAttributes map { values(_) } lazy val womObjectTypeLike = womType diff --git a/wom/src/main/scala/wom/values/WomOptionalValue.scala b/wom/src/main/scala/wom/values/WomOptionalValue.scala index d130dda13fa..a8b00264656 100644 --- a/wom/src/main/scala/wom/values/WomOptionalValue.scala +++ b/wom/src/main/scala/wom/values/WomOptionalValue.scala @@ -83,9 +83,8 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e case None => emptyValueFailure(">") } - override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = { + override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = value.toList flatMap { _.collectAsSeq(filterFn) } - } /** * Unpack a nested option down to a single layer of optionality @@ -93,7 +92,8 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e */ @tailrec def flattenOptional: WomOptionalValue = this match { - case WomOptionalValue(_: WomOptionalType, Some(innerOptionalValue: WomOptionalValue)) => innerOptionalValue.flattenOptional + case WomOptionalValue(_: WomOptionalType, Some(innerOptionalValue: WomOptionalValue)) => + innerOptionalValue.flattenOptional case WomOptionalValue(innerType: WomOptionalType, None) => WomOptionalValue(innerType.baseMemberType, None) case _ => this } @@ -107,7 +107,8 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e assert( targetType.baseMemberTypeIsCompatibleWith(womType.baseMemberType), - s"base member type ${targetType.baseMemberType} and womtype ${womType.baseMemberType} are not compatible") + s"base member type ${targetType.baseMemberType} and womtype ${womType.baseMemberType} are not compatible" + ) if (womType.depth.equals(targetType.depth)) { this } else { @@ -141,7 +142,9 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e } } getOrElse applicative.pure(this) - override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverse(_.initialize(ioFunctionSet)).widen + override def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = traverse( + _.initialize(ioFunctionSet) + ).widen } object WomOptionalValue { diff --git a/wom/src/main/scala/wom/values/WomPair.scala b/wom/src/main/scala/wom/values/WomPair.scala index ba9e78def30..bb68e35ca13 100644 --- a/wom/src/main/scala/wom/values/WomPair.scala +++ b/wom/src/main/scala/wom/values/WomPair.scala @@ -7,7 +7,6 @@ case class WomPair(left: WomValue, right: WomValue) extends WomValue { override def toWomString = s"(${left.toWomString}, ${right.toWomString})" - override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = { + override def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = left.collectAsSeq(filterFn) ++ right.collectAsSeq(filterFn) - } } diff --git a/wom/src/main/scala/wom/values/WomValue.scala b/wom/src/main/scala/wom/values/WomValue.scala index 45aa88abe12..d1cb620287d 100644 --- a/wom/src/main/scala/wom/values/WomValue.scala +++ b/wom/src/main/scala/wom/values/WomValue.scala @@ -15,7 +15,10 @@ trait WomValue { def womType: WomType def invalid(operation: String) = Failure(new WomExpressionException(s"Cannot perform operation: $operation")) def emptyValueFailure(operationName: String) = Failure(OptionalNotSuppliedException(operationName)) - def evaluateIfDefined[A <: WomValue](operationName: String, optionalValue: WomOptionalValue, operation: WomValue => Try[A]): Try[A] = optionalValue match { + def evaluateIfDefined[A <: WomValue](operationName: String, + optionalValue: WomOptionalValue, + operation: WomValue => Try[A] + ): Try[A] = optionalValue match { case WomOptionalValue(_, Some(v)) => operation(v) case _ => emptyValueFailure(operationName) } @@ -26,13 +29,13 @@ trait WomValue { def divide(rhs: WomValue): Try[WomValue] = invalid(s"$this / $rhs") def mod(rhs: WomValue): Try[WomValue] = invalid(s"$this % $rhs") def equals(rhs: WomValue): Try[WomBoolean] = invalid(s"$this == $rhs") - def notEquals(rhs: WomValue): Try[WomBoolean] = equals(rhs).map{ x => WomBoolean(!x.value)} + def notEquals(rhs: WomValue): Try[WomBoolean] = equals(rhs).map(x => WomBoolean(!x.value)) def lessThan(rhs: WomValue): Try[WomBoolean] = invalid(s"$this < $rhs") def lessThanOrEqual(rhs: WomValue): Try[WomBoolean] = - Try(WomBoolean(Seq(lessThan _, equals _).exists{ p => p(rhs).get == WomBoolean.True })) + Try(WomBoolean(Seq(lessThan _, equals _).exists(p => p(rhs).get == WomBoolean.True))) def greaterThan(rhs: WomValue): Try[WomBoolean] = invalid(s"$this > $rhs") def greaterThanOrEqual(rhs: WomValue): Try[WomBoolean] = - Try(WomBoolean(Seq(greaterThan _, equals _).exists{ p => p(rhs).get == WomBoolean.True })) + Try(WomBoolean(Seq(greaterThan _, equals _).exists(p => p(rhs).get == WomBoolean.True))) def or(rhs: WomValue): Try[WomBoolean] = invalid(s"$this || $rhs") def and(rhs: WomValue): Try[WomBoolean] = invalid(s"$this && $rhs") def not: Try[WomValue] = invalid(s"!$this") @@ -52,9 +55,8 @@ trait WomValue { */ def valueString: String = toWomString - def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = { + def collectAsSeq[T <: WomValue](filterFn: PartialFunction[WomValue, T]): Seq[T] = if (filterFn.isDefinedAt(this)) Seq(filterFn(this)) else Nil - } private def symbolHash(hash: String) = SymbolHash((this.getClass.getCanonicalName + hash).md5Sum) @@ -64,7 +66,7 @@ trait WomValue { symbolHash(concatenatedMap) } - def computeHash(implicit hasher: FileHasher): SymbolHash = { + def computeHash(implicit hasher: FileHasher): SymbolHash = this match { case w: WomObject => symbolHash(w.values safeMapValues { _.computeHash(hasher) }) case w: WomMap => symbolHash(w.value map { case (k, v) => k.computeHash(hasher) -> v.computeHash(hasher) }) @@ -72,7 +74,6 @@ trait WomValue { case w: WomFile => hasher(w) case w => symbolHash(w.valueString) } - } def asWomExpression: ValueAsAnExpression = ValueAsAnExpression(this) @@ -87,6 +88,7 @@ trait WomValue { } object WomValue { + /** * Returns the womValue with all collections recursively limited to maximum length `maxElements`. * @@ -95,7 +97,7 @@ object WomValue { * @return The womValue with maximum maxElements per collection. */ def takeMaxElements(womValue: WomValue, maxElements: Int): WomValue = { - def takeMaxElements(recursiveWomValue: WomValue): WomValue = { + def takeMaxElements(recursiveWomValue: WomValue): WomValue = recursiveWomValue match { case WomArray(womType, values) => val subset = values.take(maxElements) @@ -104,16 +106,16 @@ object WomValue { val subset = values.take(maxElements) WomMap( womType, - subset map { - case (mapKey, mapValue) => takeMaxElements(mapKey) -> takeMaxElements(mapValue) + subset map { case (mapKey, mapValue) => + takeMaxElements(mapKey) -> takeMaxElements(mapValue) } ) case objectLike: WomObjectLike => // First take only a limited number of the top-level elements. val shallowSubset = objectLike.values.take(maxElements) // Then recursively take only a limited number of elements. - val deepSubset = shallowSubset map { - case (mapKey, mapValue) => mapKey -> takeMaxElements(mapValue) + val deepSubset = shallowSubset map { case (mapKey, mapValue) => + mapKey -> takeMaxElements(mapValue) } objectLike.copyWith(deepSubset) case WomOptionalValue(innerType, valueOption) => @@ -121,7 +123,6 @@ object WomValue { case WomPair(left, right) => WomPair(takeMaxElements(left), takeMaxElements(right)) case _ => recursiveWomValue } - } takeMaxElements(womValue) } diff --git a/wom/src/main/scala/wom/views/GraphPrint.scala b/wom/src/main/scala/wom/views/GraphPrint.scala index 6bf146d136a..80d53867174 100644 --- a/wom/src/main/scala/wom/views/GraphPrint.scala +++ b/wom/src/main/scala/wom/views/GraphPrint.scala @@ -17,7 +17,7 @@ final class GraphPrint(executableCallable: ExecutableCallable) { def dotString: String = WorkflowDigraph( workflowName = executableCallable.name, - digraph = listAllGraphNodes(executableCallable.graph, new AtomicInteger(0), Map.empty), + digraph = listAllGraphNodes(executableCallable.graph, new AtomicInteger(0), Map.empty) ).dotString // A "monoid" is just a fancy way of saying "thing you can add together". @@ -32,20 +32,24 @@ final class GraphPrint(executableCallable: ExecutableCallable) { */ private def listAllGraphNodes(graph: Graph, clusterCounter: AtomicInteger, - availableScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode]): NodesAndLinks = { - + availableScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode] + ): NodesAndLinks = graph.nodes.toList.filter(worthDisplaying).foldMap { - case ccn: CommandCallNode => NodesAndLinks(Set(DotCallNode(ccn)), upstreamLinks(ccn, DotCallNode(ccn), availableScatterVariables)) - case scn: WorkflowCallNode => NodesAndLinks(Set(DotSubworkflowCallNode(scn)), upstreamLinks(scn, DotSubworkflowCallNode(scn), availableScatterVariables)) + case ccn: CommandCallNode => + NodesAndLinks(Set(DotCallNode(ccn)), upstreamLinks(ccn, DotCallNode(ccn), availableScatterVariables)) + case scn: WorkflowCallNode => + NodesAndLinks(Set(DotSubworkflowCallNode(scn)), + upstreamLinks(scn, DotSubworkflowCallNode(scn), availableScatterVariables) + ) case s: ScatterNode => handleScatter(s, clusterCounter, availableScatterVariables) case c: ConditionalNode => handleConditional(c, clusterCounter, availableScatterVariables) case _ => nodeAndLinkMonoid.empty } - } def handleScatter(scatterNode: ScatterNode, clusterCounter: AtomicInteger, - knownScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode]): NodesAndLinks = { + knownScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode] + ): NodesAndLinks = { val clusterNumber = clusterCounter.getAndIncrement() val id = s"cluster_$clusterNumber" @@ -65,7 +69,8 @@ final class GraphPrint(executableCallable: ExecutableCallable) { svn -> link.get }.toMap - val innerGraphNodesAndLinks = listAllGraphNodes(scatterNode.innerGraph, clusterCounter, knownScatterVariables ++ madeScatterVariableNodes) + val innerGraphNodesAndLinks = + listAllGraphNodes(scatterNode.innerGraph, clusterCounter, knownScatterVariables ++ madeScatterVariableNodes) NodesAndLinks( nodes = Set(DotScatterNode(id, innerGraphNodesAndLinks.nodes ++ scatterExpressionNodesAndLinks.nodes)), @@ -75,7 +80,8 @@ final class GraphPrint(executableCallable: ExecutableCallable) { def handleConditional(conditionalNode: ConditionalNode, clusterCounter: AtomicInteger, - knownScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode]): NodesAndLinks = { + knownScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode] + ): NodesAndLinks = { val clusterNumber = clusterCounter.getAndIncrement() val id = s"cluster_$clusterNumber" @@ -90,7 +96,7 @@ final class GraphPrint(executableCallable: ExecutableCallable) { val innerGraphNodesAndLinks = listAllGraphNodes(conditionalNode.innerGraph, clusterCounter, knownScatterVariables) NodesAndLinks( - nodes = Set(DotConditionalNode(id, innerGraphNodesAndLinks.nodes ++ conditionalExpressionNodesAndLinks.nodes)) , + nodes = Set(DotConditionalNode(id, innerGraphNodesAndLinks.nodes ++ conditionalExpressionNodesAndLinks.nodes)), links = innerGraphNodesAndLinks.links ++ conditionalExpressionNodesAndLinks.links ) } @@ -141,24 +147,27 @@ object GraphPrint { def dotString = s"""$id [shape="hexagon" label="scatter over ${womType.friendlyName} as $valueName"]""" } object DotScatterVariableNode { - def apply(svn: ScatterVariableNode, clusterNumber: Int): DotScatterVariableNode = DotScatterVariableNode(svn.womType, svn.identifier.localName.value, clusterNumber) + def apply(svn: ScatterVariableNode, clusterNumber: Int): DotScatterVariableNode = + DotScatterVariableNode(svn.womType, svn.identifier.localName.value, clusterNumber) } - final case class DotConditionalExpressionNode(womType: WomType, expressionString: String, clusterNumber: Int) extends DotNode { + final case class DotConditionalExpressionNode(womType: WomType, expressionString: String, clusterNumber: Int) + extends DotNode { override def id: String = s"CONDITIONAL_${clusterNumber}_EXPRESSION" def dotString = s"""$id [shape="hexagon" label="if ($expressionString)" style="dashed" ]""" } object DotConditionalExpressionNode { - def apply(en: ExpressionNode, clusterNumber: Int): DotConditionalExpressionNode = DotConditionalExpressionNode(en.womType, escapeQuotes(en.womExpression.sourceString), clusterNumber) + def apply(en: ExpressionNode, clusterNumber: Int): DotConditionalExpressionNode = + DotConditionalExpressionNode(en.womType, escapeQuotes(en.womExpression.sourceString), clusterNumber) } final case class DotScatterNode(id: String, nodes: Set[DotNode]) extends DotNode { override def dotString: String = s"""subgraph $id { - | style="filled,solid"; - | fillcolor=white; - | ${nodes.toList.flatMap(_.dotString.linesIterator).mkString(System.lineSeparator() + " ")} - |}""".stripMargin + | style="filled,solid"; + | fillcolor=white; + | ${nodes.toList.flatMap(_.dotString.linesIterator).mkString(System.lineSeparator() + " ")} + |}""".stripMargin } final case class DotConditionalNode(id: String, nodes: Set[DotNode]) extends DotNode { @@ -172,7 +181,8 @@ object GraphPrint { def upstreamLinks(originNode: GraphNode, origin: DotNode, - availableScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode]): Set[DotLink] = { + availableScatterVariables: Map[ScatterVariableNode, DotScatterVariableNode] + ): Set[DotLink] = { def relevantAsUpstream(nodeToLink: GraphNode): Set[DotNode] = nodeToLink match { case ccn: CommandCallNode => Set(DotCallNode(ccn)) case scn: WorkflowCallNode => Set(DotSubworkflowCallNode(scn)) @@ -185,7 +195,8 @@ object GraphPrint { def upstreamPortToRelevantNodes(p: OutputPort) = p match { case gatherPort: ScatterGathererPort => relevantAsUpstream(gatherPort.outputToGather.singleUpstreamNode) - case conditionalOutputPort: ConditionalOutputPort => relevantAsUpstream(conditionalOutputPort.outputToExpose.singleUpstreamNode) + case conditionalOutputPort: ConditionalOutputPort => + relevantAsUpstream(conditionalOutputPort.outputToExpose.singleUpstreamNode) case other => relevantAsUpstream(other.graphNode) } diff --git a/wom/src/test/scala/wom/WomMatchers.scala b/wom/src/test/scala/wom/WomMatchers.scala index ca292662148..863f4ff4c1c 100644 --- a/wom/src/test/scala/wom/WomMatchers.scala +++ b/wom/src/test/scala/wom/WomMatchers.scala @@ -4,7 +4,7 @@ import org.scalactic.Equality import wom.graph.GraphNode trait WomMatchers { - // This will take precedence when comparing graph nodes or collections of graph nodes + // This will take precedence when comparing graph nodes or collections of graph nodes implicit val graphNodeReferenceEquality = new Equality[GraphNode] { override def areEqual(left: GraphNode, right: Any): Boolean = right match { case node: GraphNode => left eq node diff --git a/wom/src/test/scala/wom/callable/CommandTaskDefinitionSpec.scala b/wom/src/test/scala/wom/callable/CommandTaskDefinitionSpec.scala index 29e6fb72cab..39b65bbe586 100644 --- a/wom/src/test/scala/wom/callable/CommandTaskDefinitionSpec.scala +++ b/wom/src/test/scala/wom/callable/CommandTaskDefinitionSpec.scala @@ -26,7 +26,9 @@ class CommandTaskDefinitionSpec extends AnyFlatSpec with CromwellTimeoutSpec wit executableOneInputTask match { case Valid(task) => task.graph.nodes.size should be(2) - (task.graph.nodes.toList.find(_.isInstanceOf[GraphInputNode]), task.graph.nodes.toList.find(_.isInstanceOf[CallNode])) match { + (task.graph.nodes.toList.find(_.isInstanceOf[GraphInputNode]), + task.graph.nodes.toList.find(_.isInstanceOf[CallNode]) + ) match { case (Some(inputNode), Some(callNode)) => callNode.inputPorts.size should be(1) callNode.inputPorts.head.upstream.graphNode should be(inputNode) @@ -41,7 +43,9 @@ class CommandTaskDefinitionSpec extends AnyFlatSpec with CromwellTimeoutSpec wit case Valid(task) => val graph = task.graph graph.nodes.size should be(2) - (graph.nodes.toList.find(_.isInstanceOf[PortBasedGraphOutputNode]), graph.nodes.toList.find(_.isInstanceOf[CallNode])) match { + (graph.nodes.toList.find(_.isInstanceOf[PortBasedGraphOutputNode]), + graph.nodes.toList.find(_.isInstanceOf[CallNode]) + ) match { case (Some(outputNode), Some(callNode)) => callNode.outputPorts.size should be(1) outputNode.inputPorts.size should be(1) @@ -74,7 +78,8 @@ object CommandTaskDefinitionSpec { inputs = List.empty, adHocFileCreation = Set.empty, environmentExpressions = Map.empty, - sourceLocation = None) + sourceLocation = None + ) val executableNoInputsOrOutputsTask = noInputsOrOutputsTask.toExecutable val oneInputTask = CallableTaskDefinition( @@ -87,7 +92,8 @@ object CommandTaskDefinitionSpec { inputs = List(Callable.RequiredInputDefinition(LocalName("bar"), WomIntegerType)), adHocFileCreation = Set.empty, environmentExpressions = Map.empty, - sourceLocation = None) + sourceLocation = None + ) val executableOneInputTask = oneInputTask.toExecutable val oneOutputTask = CallableTaskDefinition( @@ -100,7 +106,8 @@ object CommandTaskDefinitionSpec { inputs = List.empty, adHocFileCreation = Set.empty, environmentExpressions = Map.empty, - sourceLocation = None) + sourceLocation = None + ) val executableOneOutputTask = oneOutputTask.toExecutable val duplicateFqns = CallableTaskDefinition( @@ -110,7 +117,9 @@ object CommandTaskDefinitionSpec { meta = Map.empty, parameterMeta = Map.empty, outputs = List(Callable.OutputDefinition(LocalName("bar"), WomStringType, null)), - inputs = List(Callable.RequiredInputDefinition(LocalName("bar"), WomStringType), Callable.RequiredInputDefinition(LocalName("bar"), WomStringType)), + inputs = List(Callable.RequiredInputDefinition(LocalName("bar"), WomStringType), + Callable.RequiredInputDefinition(LocalName("bar"), WomStringType) + ), adHocFileCreation = Set.empty, environmentExpressions = Map.empty, sourceLocation = None diff --git a/wom/src/test/scala/wom/expression/PlaceholderWomExpression.scala b/wom/src/test/scala/wom/expression/PlaceholderWomExpression.scala index c3738e6c94a..9206383f1f6 100644 --- a/wom/src/test/scala/wom/expression/PlaceholderWomExpression.scala +++ b/wom/src/test/scala/wom/expression/PlaceholderWomExpression.scala @@ -13,15 +13,22 @@ import scala.concurrent.{ExecutionContext, Future} final case class PlaceholderWomExpression(inputs: Set[String], fixedWomType: WomType) extends WomExpression { override def sourceString: String = "placeholder" override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = - Invalid(NonEmptyList.one(s"couldn't evaluate value from inputs $inputs\tfixedWomType\t$fixedWomType\tinputValues\t$inputValues")) + Invalid( + NonEmptyList.one( + s"couldn't evaluate value from inputs $inputs\tfixedWomType\t$fixedWomType\tinputValues\t$inputValues" + ) + ) override def evaluateType(inputTypes: Map[String, WomType]): ErrorOr[WomType] = Valid(fixedWomType) - override def evaluateFiles(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = + override def evaluateFiles(inputValues: Map[String, WomValue], + ioFunctionSet: IoFunctionSet, + coerceTo: WomType + ): ErrorOr[Set[FileEvaluation]] = Valid(Set.empty) } case object DefaultSizeIoFunctionSet extends EmptyIoFunctionSet { val DefaultFileSize = -12345L - override implicit def ec: ExecutionContext = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(1)) + implicit override def ec: ExecutionContext = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(1)) override def size(path: String): Future[Long] = Future.successful(DefaultFileSize) } diff --git a/wom/src/test/scala/wom/format/MemorySizeSpec.scala b/wom/src/test/scala/wom/format/MemorySizeSpec.scala index bcd0ad7396d..af7980d5d33 100644 --- a/wom/src/test/scala/wom/format/MemorySizeSpec.scala +++ b/wom/src/test/scala/wom/format/MemorySizeSpec.scala @@ -5,14 +5,15 @@ import wdl4s.parser.MemoryUnit import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers - class MemorySizeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "MemorySize" - + it should "provide the rounded up multiple of a number" in { MemorySize(2, MemoryUnit.GB).to(MemoryUnit.MB).asRoundedUpMultipleOf(256) shouldBe MemorySize(2048, MemoryUnit.MB) MemorySize(2000, MemoryUnit.MB).asRoundedUpMultipleOf(256) shouldBe MemorySize(2048, MemoryUnit.MB) - MemorySize(3.75, MemoryUnit.GB).to(MemoryUnit.MB).asRoundedUpMultipleOf(256) shouldBe MemorySize(3840, MemoryUnit.MB) + MemorySize(3.75, MemoryUnit.GB).to(MemoryUnit.MB).asRoundedUpMultipleOf(256) shouldBe MemorySize(3840, + MemoryUnit.MB + ) assertThrows[IllegalArgumentException](MemorySize(2000, MemoryUnit.MB).asRoundedUpMultipleOf(0)) assertThrows[IllegalArgumentException](MemorySize(2000, MemoryUnit.MB).asRoundedUpMultipleOf(-5)) } diff --git a/wom/src/test/scala/wom/graph/ExpressionAsCallInputSpec.scala b/wom/src/test/scala/wom/graph/ExpressionAsCallInputSpec.scala index 0075df49bbe..59cfd98980c 100644 --- a/wom/src/test/scala/wom/graph/ExpressionAsCallInputSpec.scala +++ b/wom/src/test/scala/wom/graph/ExpressionAsCallInputSpec.scala @@ -37,8 +37,12 @@ class ExpressionAsCallInputSpec extends AnyFlatSpec with CromwellTimeoutSpec wit val ijExpression = PlaceholderWomExpression(Set("i", "j"), WomIntegerType) // Use that as an input to a one-input task: - val expressionNode = AnonymousExpressionNode.fromInputMapping( - WomIdentifier("bar"), ijExpression, Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort), TaskCallInputExpressionNode.apply) + val expressionNode = AnonymousExpressionNode + .fromInputMapping(WomIdentifier("bar"), + ijExpression, + Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort), + TaskCallInputExpressionNode.apply + ) .getOrElse(fail("Failed to build expression node")) val callNodeBuilder = new CallNodeBuilder() diff --git a/wom/src/test/scala/wom/graph/ExpressionNodeSpec.scala b/wom/src/test/scala/wom/graph/ExpressionNodeSpec.scala index 6c3a46864f0..39cd794a2d6 100644 --- a/wom/src/test/scala/wom/graph/ExpressionNodeSpec.scala +++ b/wom/src/test/scala/wom/graph/ExpressionNodeSpec.scala @@ -37,7 +37,12 @@ class ExpressionNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match // Declare the expression node using both i and j: import common.validation.ErrorOr.ShortCircuitingFlatMap val graph = for { - xDeclarationNode <- AnonymousExpressionNode.fromInputMapping(WomIdentifier("x"), ijExpression, Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort), PlainAnonymousExpressionNode.apply) + xDeclarationNode <- AnonymousExpressionNode.fromInputMapping( + WomIdentifier("x"), + ijExpression, + Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort), + PlainAnonymousExpressionNode.apply + ) xOutputNode = PortBasedGraphOutputNode(WomIdentifier("x_out"), WomIntegerType, xDeclarationNode.singleOutputPort) g <- Graph.validateAndConstruct(Set(iInputNode, jInputNode, xDeclarationNode, xOutputNode)) } yield g @@ -48,10 +53,12 @@ class ExpressionNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } def validate(graph: Graph) = { - val x_outGraphOutputNode = graph.nodes.find { - case g: GraphOutputNode => g.localName == "x_out" - case _ => false - }.getOrElse(fail("No 'x_out' GraphOutputNode in the graph")) + val x_outGraphOutputNode = graph.nodes + .find { + case g: GraphOutputNode => g.localName == "x_out" + case _ => false + } + .getOrElse(fail("No 'x_out' GraphOutputNode in the graph")) x_outGraphOutputNode.upstream.size should be(1) val xExpressionNode = x_outGraphOutputNode.upstream.head.asInstanceOf[ExpressionNode] diff --git a/wom/src/test/scala/wom/graph/GraphOutputNodeSpec.scala b/wom/src/test/scala/wom/graph/GraphOutputNodeSpec.scala index 5fbb36d4ea0..297ef7a8114 100644 --- a/wom/src/test/scala/wom/graph/GraphOutputNodeSpec.scala +++ b/wom/src/test/scala/wom/graph/GraphOutputNodeSpec.scala @@ -7,7 +7,6 @@ import org.scalatest.matchers.should.Matchers import wom.expression._ import wom.types.{WomIntegerType, WomStringType} - class GraphOutputNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ExpressionBasedGraphOutputNode" @@ -24,12 +23,17 @@ class GraphOutputNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val ijExpression = PlaceholderWomExpression(Set("i", "j"), WomIntegerType) // Declare the expression output using both i and j: - val xOutputValidation = ExpressionBasedGraphOutputNode.fromInputMapping(WomIdentifier("x_out"), ijExpression, WomStringType, Map( - "i" -> iInputNode.singleOutputPort, - "j" -> jInputNode.singleOutputPort)) + val xOutputValidation = ExpressionBasedGraphOutputNode.fromInputMapping( + WomIdentifier("x_out"), + ijExpression, + WomStringType, + Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort) + ) import common.validation.ErrorOr.ShortCircuitingFlatMap - val graph = xOutputValidation flatMap { xOutput => Graph.validateAndConstruct(Set(iInputNode, jInputNode, jOutput, xOutput)) } + val graph = xOutputValidation flatMap { xOutput => + Graph.validateAndConstruct(Set(iInputNode, jInputNode, jOutput, xOutput)) + } graph match { case Valid(g) => validate(g) diff --git a/wom/src/test/scala/wom/graph/GraphSpec.scala b/wom/src/test/scala/wom/graph/GraphSpec.scala index 8cb71a985b0..738e6833ef5 100644 --- a/wom/src/test/scala/wom/graph/GraphSpec.scala +++ b/wom/src/test/scala/wom/graph/GraphSpec.scala @@ -12,7 +12,6 @@ import wom.graph.CallNode.{CallNodeAndNewNodes, CallNodeBuilder, InputDefinition import wom.graph.GraphNodePort.OutputPort import wom.types.{WomIntegerType, WomSingleFileType, WomStringType} - class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "Graph" @@ -64,8 +63,10 @@ class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val psNodeBuilder = new CallNodeBuilder() - val CallNodeAndNewNodes(psCall, psGraphInputs, _, _) = psNodeBuilder.build(WomIdentifier("ps"), taskDefinition_ps, InputDefinitionFold(), Set.empty, None) - val ps_procsOutputPort = psCall.outputByName("ps.procs").getOrElse(fail("Unexpectedly unable to find 'ps.procs' output")) + val CallNodeAndNewNodes(psCall, psGraphInputs, _, _) = + psNodeBuilder.build(WomIdentifier("ps"), taskDefinition_ps, InputDefinitionFold(), Set.empty, None) + val ps_procsOutputPort = + psCall.outputByName("ps.procs").getOrElse(fail("Unexpectedly unable to find 'ps.procs' output")) val cgrepNodeBuilder = new CallNodeBuilder() val cgrepInputDefinitionFold = InputDefinitionFold( @@ -79,8 +80,10 @@ class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { ), Set(workflowInputNode) ) - val CallNodeAndNewNodes(cgrepCall, cgrepGraphInputs, _, _) = cgrepNodeBuilder.build(WomIdentifier("cgrep"), taskDefinition_cgrep, cgrepInputDefinitionFold, Set.empty, None) - val cgrep_countOutputPort = cgrepCall.outputByName("cgrep.count").getOrElse(fail("Unexpectedly unable to find 'cgrep.count' output")) + val CallNodeAndNewNodes(cgrepCall, cgrepGraphInputs, _, _) = + cgrepNodeBuilder.build(WomIdentifier("cgrep"), taskDefinition_cgrep, cgrepInputDefinitionFold, Set.empty, None) + val cgrep_countOutputPort = + cgrepCall.outputByName("cgrep.count").getOrElse(fail("Unexpectedly unable to find 'cgrep.count' output")) val wcNodeBuilder = new CallNodeBuilder() val wcInputDefinitionFold = InputDefinitionFold( @@ -93,11 +96,14 @@ class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { Set.empty ) - val CallNodeAndNewNodes(wcCall, wcGraphInputs, _, _) = wcNodeBuilder.build(WomIdentifier("wc"), taskDefinition_wc, wcInputDefinitionFold, Set.empty, None) - val wc_countOutputPort = wcCall.outputByName("wc.count").getOrElse(fail("Unexpectedly unable to find 'wc.count' output")) + val CallNodeAndNewNodes(wcCall, wcGraphInputs, _, _) = + wcNodeBuilder.build(WomIdentifier("wc"), taskDefinition_wc, wcInputDefinitionFold, Set.empty, None) + val wc_countOutputPort = + wcCall.outputByName("wc.count").getOrElse(fail("Unexpectedly unable to find 'wc.count' output")) val psProcsOutputNode = PortBasedGraphOutputNode(WomIdentifier("ps.procs"), WomSingleFileType, ps_procsOutputPort) - val cgrepCountOutputNode = PortBasedGraphOutputNode(WomIdentifier("cgrep.count"), WomIntegerType, cgrep_countOutputPort) + val cgrepCountOutputNode = + PortBasedGraphOutputNode(WomIdentifier("cgrep.count"), WomIntegerType, cgrep_countOutputPort) val wcCountOutputNode = PortBasedGraphOutputNode(WomIdentifier("wc.count"), WomIntegerType, wc_countOutputPort) val graphNodes: Set[GraphNode] = @@ -116,7 +122,9 @@ class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val workflowGraph = makeThreeStep workflowGraph.nodes collect { case gin: GraphInputNode => gin.localName } should be(Set("cgrep.pattern")) - workflowGraph.nodes collect { case gon: PortBasedGraphOutputNode => gon.localName } should be(Set("wc.count", "cgrep.count", "ps.procs")) + workflowGraph.nodes collect { case gon: PortBasedGraphOutputNode => gon.localName } should be( + Set("wc.count", "cgrep.count", "ps.procs") + ) workflowGraph.nodes collect { case cn: CallNode => cn.localName } should be(Set("wc", "cgrep", "ps")) } @@ -125,27 +133,54 @@ class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val threeStepWorkflow = WorkflowDefinition("three_step", threeStepGraph, Map.empty, Map.empty, None) val threeStepNodeBuilder = new CallNodeBuilder() - val workflowInputNode = RequiredGraphInputNode(WomIdentifier("three_step.cgrep.pattern"), WomStringType, "three_step.cgrep.pattern") + val workflowInputNode = + RequiredGraphInputNode(WomIdentifier("three_step.cgrep.pattern"), WomStringType, "three_step.cgrep.pattern") val inputDefinitionFold = InputDefinitionFold( mappings = List.empty, Set.empty, Set(workflowInputNode) ) - val CallNodeAndNewNodes(threeStepCall, threeStepInputs, _, _) = threeStepNodeBuilder.build(WomIdentifier("three_step"), threeStepWorkflow, inputDefinitionFold, Set.empty, None) + val CallNodeAndNewNodes(threeStepCall, threeStepInputs, _, _) = + threeStepNodeBuilder.build(WomIdentifier("three_step"), threeStepWorkflow, inputDefinitionFold, Set.empty, None) // This is painful manually, but it's not up to WOM to decide which subworkflow outputs are forwarded through: - val psProcsOutputNode = PortBasedGraphOutputNode(WomIdentifier("three_step.ps.procs"), WomSingleFileType, threeStepCall.outputByName("three_step.ps.procs").getOrElse(fail(s"Subworkflow didn't expose the ps.procs output in ${threeStepCall.outputPorts.map(_.name).mkString(", ")}"))) - val cgrepCountOutputNode = PortBasedGraphOutputNode(WomIdentifier("three_step.cgrep.count"), WomIntegerType, threeStepCall.outputByName("three_step.cgrep.count").getOrElse(fail("Subworkflow didn't expose the cgrep.count output"))) - val wcCountOutputNode = PortBasedGraphOutputNode(WomIdentifier("three_step.wc.count"), WomIntegerType, threeStepCall.outputByName("three_step.wc.count").getOrElse(fail("Subworkflow didn't expose the wc.count output"))) + val psProcsOutputNode = PortBasedGraphOutputNode( + WomIdentifier("three_step.ps.procs"), + WomSingleFileType, + threeStepCall + .outputByName("three_step.ps.procs") + .getOrElse( + fail( + s"Subworkflow didn't expose the ps.procs output in ${threeStepCall.outputPorts.map(_.name).mkString(", ")}" + ) + ) + ) + val cgrepCountOutputNode = PortBasedGraphOutputNode( + WomIdentifier("three_step.cgrep.count"), + WomIntegerType, + threeStepCall + .outputByName("three_step.cgrep.count") + .getOrElse(fail("Subworkflow didn't expose the cgrep.count output")) + ) + val wcCountOutputNode = PortBasedGraphOutputNode( + WomIdentifier("three_step.wc.count"), + WomIntegerType, + threeStepCall.outputByName("three_step.wc.count").getOrElse(fail("Subworkflow didn't expose the wc.count output")) + ) - val workflowGraph = Graph.validateAndConstruct(Set[GraphNode](threeStepCall, psProcsOutputNode, cgrepCountOutputNode, wcCountOutputNode).union(threeStepInputs.toSet[GraphNode])) match { + val workflowGraph = Graph.validateAndConstruct( + Set[GraphNode](threeStepCall, psProcsOutputNode, cgrepCountOutputNode, wcCountOutputNode) + .union(threeStepInputs.toSet[GraphNode]) + ) match { case Valid(wg) => wg case Invalid(errors) => fail(s"Unable to validate graph: ${errors.toList.mkString("\n", "\n", "\n")}") } workflowGraph.nodes collect { case gin: GraphInputNode => gin.localName } should be(Set("three_step.cgrep.pattern")) - workflowGraph.nodes collect { case gon: GraphOutputNode => gon.localName } should be(Set("three_step.wc.count", "three_step.cgrep.count", "three_step.ps.procs")) + workflowGraph.nodes collect { case gon: GraphOutputNode => gon.localName } should be( + Set("three_step.wc.count", "three_step.cgrep.count", "three_step.ps.procs") + ) workflowGraph.nodes collect { case cn: CallNode => cn.localName } should be(Set("three_step")) } @@ -157,10 +192,11 @@ class GraphSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { Graph.validateAndConstruct(Set(nodeA, nodeB, nodeC, nodeD)) match { case Valid(_) => fail("Graph should not validate") - case Invalid(errors) => errors.toList.toSet shouldBe Set( - "Two or more nodes have the same FullyQualifiedName: foo.baz", - "Two or more nodes have the same FullyQualifiedName: foo.bar" - ) + case Invalid(errors) => + errors.toList.toSet shouldBe Set( + "Two or more nodes have the same FullyQualifiedName: foo.baz", + "Two or more nodes have the same FullyQualifiedName: foo.bar" + ) } } } diff --git a/wom/src/test/scala/wom/graph/ScatterNodeSpec.scala b/wom/src/test/scala/wom/graph/ScatterNodeSpec.scala index 0ad865c1f4c..4d8d6455934 100644 --- a/wom/src/test/scala/wom/graph/ScatterNodeSpec.scala +++ b/wom/src/test/scala/wom/graph/ScatterNodeSpec.scala @@ -15,7 +15,6 @@ import wom.graph.GraphNodePort.OutputPort import wom.graph.expression.{AnonymousExpressionNode, PlainAnonymousExpressionNode} import wom.types.{WomArrayType, WomIntegerType, WomStringType} - class ScatterNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "ScatterNode" @@ -59,7 +58,11 @@ class ScatterNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val xsExpression = PlaceholderWomExpression(Set("xs"), WomArrayType(WomIntegerType)) val xsExpressionAsInput = AnonymousExpressionNode - .fromInputMapping(WomIdentifier("x"), xsExpression, Map("xs" -> xs_inputNode.singleOutputPort), PlainAnonymousExpressionNode.apply) + .fromInputMapping(WomIdentifier("x"), + xsExpression, + Map("xs" -> xs_inputNode.singleOutputPort), + PlainAnonymousExpressionNode.apply + ) .valueOr(failures => fail(s"Failed to create expression node: ${failures.toList.mkString(", ")}")) val x_inputNode = ScatterVariableNode(WomIdentifier("x"), xsExpressionAsInput, WomArrayType(WomIntegerType)) @@ -73,8 +76,13 @@ class ScatterNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers ), newGraphInputNodes = Set.empty ) - val CallNodeAndNewNodes(foo_callNode, _, _, _) = fooNodeBuilder.build(WomIdentifier("foo"), task_foo, fooInputFold, Set.empty, None) - val foo_call_outNode = PortBasedGraphOutputNode(WomIdentifier("foo.out"), WomStringType, foo_callNode.outputByName("foo.out").getOrElse(fail("foo CallNode didn't contain the expected 'out' output"))) + val CallNodeAndNewNodes(foo_callNode, _, _, _) = + fooNodeBuilder.build(WomIdentifier("foo"), task_foo, fooInputFold, Set.empty, None) + val foo_call_outNode = PortBasedGraphOutputNode( + WomIdentifier("foo.out"), + WomStringType, + foo_callNode.outputByName("foo.out").getOrElse(fail("foo CallNode didn't contain the expected 'out' output")) + ) val scatterGraph = Graph.validateAndConstruct(Set(foo_callNode, x_inputNode, foo_call_outNode)) match { case Valid(sg) => sg case Invalid(es) => fail("Failed to make scatter graph: " + es.toList.mkString(", ")) @@ -116,7 +124,10 @@ class ScatterNodeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers finalOutput.inputPorts.head.upstream.name should be("foo.out") // foo.out links back to the correct output in the inner graph: - val innerGraphFooOutNode = scatterNode.outputMapping.find(_.name == "foo.out").getOrElse(fail("Scatter couldn't link back the foo.out output.")).outputToGather + val innerGraphFooOutNode = scatterNode.outputMapping + .find(_.name == "foo.out") + .getOrElse(fail("Scatter couldn't link back the foo.out output.")) + .outputToGather innerGraphFooOutNode.womType should be(WomStringType) innerGraphFooOutNode.upstream.size should be(1) innerGraphFooOutNode.upstream.head match { diff --git a/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala b/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala index c11e2997e2d..3b0ba617c74 100644 --- a/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala @@ -9,8 +9,7 @@ import wom.values._ import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success} - -class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { +class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { val intArray = WomArray(WomArrayType(WomIntegerType), Seq(WomInteger(1), WomInteger(2), WomInteger(3))) "WomArray" should "stringify its value" in { intArray.toWomString shouldEqual "[1, 2, 3]" @@ -42,7 +41,7 @@ class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher } } "WomArrayType" should "coerce Seq(1,2,3) into a WomArray" in { - WomArrayType(WomIntegerType).coerceRawValue(Seq(1,2,3)) match { + WomArrayType(WomIntegerType).coerceRawValue(Seq(1, 2, 3)) match { case Success(array) => array shouldEqual intArray case Failure(f) => fail(s"exception while coercing array: $f") } @@ -54,7 +53,7 @@ class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher } } it should "coerce a Java List into a WomArray" in { - WomArrayType(WomIntegerType).coerceRawValue(List(1,2,3).asJava) match { + WomArrayType(WomIntegerType).coerceRawValue(List(1, 2, 3).asJava) match { case Success(array) => array shouldEqual intArray case Failure(f) => fail(s"exception while coercing Java List: $f") } @@ -62,7 +61,8 @@ class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher it should "not coerce single values into one-element arrays" in { WomArrayType(WomStringType).coerceRawValue(WomString("edamame is tasty")) match { case Success(_) => fail("Unexpected success coercing single value to array") - case Failure(f) => f.getMessage shouldEqual "No coercion defined from wom value(s) '\"edamame is tasty\"' of type 'String' to 'Array[String]'." + case Failure(f) => + f.getMessage shouldEqual "No coercion defined from wom value(s) '\"edamame is tasty\"' of type 'String' to 'Array[String]'." } } it should "stringify its type" in { @@ -96,7 +96,13 @@ class WomArrayTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher val map = WomMap(WomMapType(WomIntegerType, WomStringType), Map(WomInteger(1) -> WomString("one"))) val arrayOfPairsType = WomArrayType(WomPairType(WomIntegerType, WomStringType)) arrayOfPairsType.isCoerceableFrom(map.womType) should be(true) - arrayOfPairsType.coerceRawValue(map) should be(Success(WomArray(WomArrayType(WomPairType(WomIntegerType, WomStringType)), List(WomPair(WomInteger(1), WomString("one")))))) + arrayOfPairsType.coerceRawValue(map) should be( + Success( + WomArray(WomArrayType(WomPairType(WomIntegerType, WomStringType)), + List(WomPair(WomInteger(1), WomString("one"))) + ) + ) + ) } } diff --git a/wom/src/test/scala/wom/types/WomCoercionSpec.scala b/wom/src/test/scala/wom/types/WomCoercionSpec.scala index 184773ee52c..5011c850565 100644 --- a/wom/src/test/scala/wom/types/WomCoercionSpec.scala +++ b/wom/src/test/scala/wom/types/WomCoercionSpec.scala @@ -8,10 +8,12 @@ import wom.values.WomValue import scala.util.{Failure, Success} - abstract class WomCoercionSpec(val goodCoercionTable: TableFor2[_ <: Any, WomValue], val badCoercionTable: TableFor2[_ <: Any, WomType], - val behaviorOf: String) extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers { + val behaviorOf: String +) extends AnyFlatSpecLike + with CromwellTimeoutSpec + with Matchers { import TableDrivenPropertyChecks._ import WomCoercionSpec.StringableAny @@ -19,11 +21,10 @@ abstract class WomCoercionSpec(val goodCoercionTable: TableFor2[_ <: Any, WomVal behavior of behaviorOf forAll(goodCoercionTable) { (fromValue, toValue) => - it should s"Allow coercion from ${fromValue.displayString} (${fromValue.typeDisplayString}) to ${toValue.toWomString} (${toValue.womType.stableName})" in { toValue.womType.coercionDefined(fromValue) should be(true) fromValue match { - case wv: WomValue => + case wv: WomValue => toValue.womType.isCoerceableFrom(wv.womType) should be(true) case _ => // can't test isCoerceableFrom for this fromValue } @@ -35,7 +36,6 @@ abstract class WomCoercionSpec(val goodCoercionTable: TableFor2[_ <: Any, WomVal } forAll(badCoercionTable) { (fromValue, toType) => - it should s"Not allow coercion from ${fromValue.displayString} to ${toType.stableName}" in { toType.coercionDefined(fromValue) should be(false) fromValue match { diff --git a/wom/src/test/scala/wom/types/WomCompositeTypeSpec.scala b/wom/src/test/scala/wom/types/WomCompositeTypeSpec.scala index 639353ed7ec..d6ec388a83c 100644 --- a/wom/src/test/scala/wom/types/WomCompositeTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomCompositeTypeSpec.scala @@ -6,8 +6,10 @@ import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2} import wom.types.WomCompositeTypeSpecDefs._ import wom.values._ - -class WomCompositeTypeSpec() extends WomCoercionSpec(goodCoercionTable, badCoercionTable, behaviorOf) with AnyFlatSpecLike with Matchers { +class WomCompositeTypeSpec() + extends WomCoercionSpec(goodCoercionTable, badCoercionTable, behaviorOf) + with AnyFlatSpecLike + with Matchers { "WomObject with composite type" should "fail to build invalid values" in { val wrongType = the[Exception] thrownBy { @@ -45,7 +47,9 @@ private object WomCompositeTypeSpecDefs { import spray.json._ val stringIntCompositeType = WomCompositeType(Map("a" -> WomStringType, "b" -> WomIntegerType)) val nestedStringIntCompositeType = WomCompositeType(Map("nested_a" -> WomStringType, "nested_b" -> WomIntegerType)) - val complexNestedCompositeType = WomCompositeType(Map("a" -> WomArrayType(WomStringType), "b" -> nestedStringIntCompositeType)) + val complexNestedCompositeType = WomCompositeType( + Map("a" -> WomArrayType(WomStringType), "b" -> nestedStringIntCompositeType) + ) val simpleCompositeValue = WomObject.withTypeUnsafe( Map("a" -> WomString("0"), "b" -> WomInteger(1)), stringIntCompositeType @@ -55,11 +59,13 @@ private object WomCompositeTypeSpecDefs { // Field "a" is an array of string "a" -> WomArray(WomArrayType(WomStringType), List(WomString("5"))), // Field "b" is a composite type itself - "b" -> WomObject.withTypeUnsafe(Map("nested_a" -> WomString("8"), "nested_b" -> WomInteger(2)), nestedStringIntCompositeType) + "b" -> WomObject.withTypeUnsafe(Map("nested_a" -> WomString("8"), "nested_b" -> WomInteger(2)), + nestedStringIntCompositeType + ) ), complexNestedCompositeType ) - + val simpleCompositeValueAsAMap = WomMap( WomMapType(WomStringType, WomIntegerType), Map( @@ -67,7 +73,7 @@ private object WomCompositeTypeSpecDefs { WomString("b") -> WomInteger(1) ) ) - + val goodCoercionTable = Table[Any, WomValue]( ("fromValue", "toValue"), // Identity coercion with simple field types @@ -78,22 +84,26 @@ private object WomCompositeTypeSpecDefs { (simpleCompositeValueAsAMap, simpleCompositeValue), // Coercion from untyped WomObject (WomObject( - Map( - "a" -> WomString("0"), - "b" -> WomInteger(1) - ) - ), simpleCompositeValue), + Map( + "a" -> WomString("0"), + "b" -> WomInteger(1) + ) + ), + simpleCompositeValue + ), (WomObject( - Map( - "a" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(5))), - "b" -> WomObject( - Map( - "nested_a" -> WomInteger(8), - "nested_b" -> WomInteger(2) - ) - ) - ) - ), complexCompositeValue), + Map( + "a" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(5))), + "b" -> WomObject( + Map( + "nested_a" -> WomInteger(8), + "nested_b" -> WomInteger(2) + ) + ) + ) + ), + complexCompositeValue + ), // Coercion from Json ("""{ "a": "0", "b": "1" }""".parseJson, simpleCompositeValue), ("""{ "a": ["5"], "b": { "nested_a": "8", "nested_b": 2 } }""".parseJson, complexCompositeValue), @@ -107,7 +117,8 @@ private object WomCompositeTypeSpecDefs { "b" -> WomMap( WomMapType(WomSingleFileType, WomIntegerType), Map( - WomSingleFile("nested_a") -> WomInteger(8) , WomSingleFile("nested_b") -> WomInteger(2) + WomSingleFile("nested_a") -> WomInteger(8), + WomSingleFile("nested_b") -> WomInteger(2) ) ) ), @@ -123,7 +134,9 @@ private object WomCompositeTypeSpecDefs { // Automatic optional none default ( WomObject.withTypeUnsafe(Map.empty, WomCompositeType(Map("a" -> WomOptionalType(WomStringType)))), - WomObject.withTypeUnsafe(Map("a" -> WomOptionalValue.none(WomStringType)), WomCompositeType(Map("a" -> WomOptionalType(WomStringType)))) + WomObject.withTypeUnsafe(Map("a" -> WomOptionalValue.none(WomStringType)), + WomCompositeType(Map("a" -> WomOptionalType(WomStringType))) + ) ) ) @@ -140,7 +153,8 @@ private object WomCompositeTypeSpecDefs { Map( "a" -> WomString("0"), "b" -> WomBoolean(false) - ), WomCompositeType(Map("a" -> WomStringType, "b" -> WomBooleanType)) + ), + WomCompositeType(Map("a" -> WomStringType, "b" -> WomBooleanType)) ), stringIntCompositeType ), diff --git a/wom/src/test/scala/wom/types/WomCoproductSpec.scala b/wom/src/test/scala/wom/types/WomCoproductSpec.scala index 99a87f85e04..8423c407fba 100644 --- a/wom/src/test/scala/wom/types/WomCoproductSpec.scala +++ b/wom/src/test/scala/wom/types/WomCoproductSpec.scala @@ -6,7 +6,6 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import wom.values.{WomBoolean, WomCoproductValue, WomString} - class WomCoproductSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "Wom Coproduct Type" diff --git a/wom/src/test/scala/wom/types/WomFileTypeSpec.scala b/wom/src/test/scala/wom/types/WomFileTypeSpec.scala index c4a90fadb07..c75b6f4f9ec 100644 --- a/wom/src/test/scala/wom/types/WomFileTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomFileTypeSpec.scala @@ -10,51 +10,60 @@ import wom.values.{WomFloat, WomGlobFile, WomSingleFile, WomString, WomUnlistedD import scala.util.Success - class WomFileTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { behavior of "WomFileType" lazy val coercionTests = Table( ("description", "value", "expected"), - ("a string to a dir", "example/string", WomUnlistedDirectory("example/string")), ("a string to a file", "example/string", WomSingleFile("example/string")), ("a string to a glob", "example/string", WomGlobFile("example/string")), - ("a js string to a dir", JsString("example/js"), WomUnlistedDirectory("example/js")), ("a js string to a file", JsString("example/js"), WomSingleFile("example/js")), ("a js string to a glob", JsString("example/js"), WomGlobFile("example/js")), - ("a wom string to a dir", WomString("example/wom"), WomUnlistedDirectory("example/wom")), ("a wom string to a file", WomString("example/wom"), WomSingleFile("example/wom")), ("a wom string to a glob", WomString("example/wom"), WomGlobFile("example/wom")), - - ("a wom dir to a dir", WomUnlistedDirectory("example/dir"), - WomUnlistedDirectory("example/dir")), + ("a wom dir to a dir", WomUnlistedDirectory("example/dir"), WomUnlistedDirectory("example/dir")), ("a wom file to a file", WomSingleFile("example/dir"), WomSingleFile("example/dir")), ("a wom glob to a glob", WomGlobFile("example/glob*"), WomGlobFile("example/glob*")), - - ("a string with leading and trailing special symbols to a dir", " \n\r\t example/string \n\r\t ", - WomUnlistedDirectory("example/string")), - ("a string with leading and trailing special symbols to a file", " \n\r\t example/string \n\r\t ", - WomSingleFile("example/string")), - ("a string with leading and trailing special symbols to a glob", " \n\r\t example/string \n\r\t ", - WomGlobFile("example/string")), - - ("a js string with leading and trailing special symbols to a dir", JsString(" \n\r\t example/js \n\r\t "), - WomUnlistedDirectory("example/js")), - ("a js string with leading and trailing special symbols to a file", JsString(" \n\r\t example/js \n\r\t "), - WomSingleFile("example/js")), - ("a js string with leading and trailing special symbols to a glob", JsString(" \n\r\t example/js \n\r\t "), - WomGlobFile("example/js")), - - ("a wom string with leading and trailing special symbols to a dir", WomString(" \n\r\t example/wom \n\r\t "), - WomUnlistedDirectory("example/wom")), - ("a wom string with leading and trailing special symbols to a file", WomString(" \n\r\t example/wom \n\r\t "), - WomSingleFile("example/wom")), - ("a wom string with leading and trailing special symbols to a glob", WomString(" \n\r\t example/wom \n\r\t "), - WomGlobFile("example/wom")), + ("a string with leading and trailing special symbols to a dir", + " \n\r\t example/string \n\r\t ", + WomUnlistedDirectory("example/string") + ), + ("a string with leading and trailing special symbols to a file", + " \n\r\t example/string \n\r\t ", + WomSingleFile("example/string") + ), + ("a string with leading and trailing special symbols to a glob", + " \n\r\t example/string \n\r\t ", + WomGlobFile("example/string") + ), + ("a js string with leading and trailing special symbols to a dir", + JsString(" \n\r\t example/js \n\r\t "), + WomUnlistedDirectory("example/js") + ), + ("a js string with leading and trailing special symbols to a file", + JsString(" \n\r\t example/js \n\r\t "), + WomSingleFile("example/js") + ), + ("a js string with leading and trailing special symbols to a glob", + JsString(" \n\r\t example/js \n\r\t "), + WomGlobFile("example/js") + ), + ("a wom string with leading and trailing special symbols to a dir", + WomString(" \n\r\t example/wom \n\r\t "), + WomUnlistedDirectory("example/wom") + ), + ("a wom string with leading and trailing special symbols to a file", + WomString(" \n\r\t example/wom \n\r\t "), + WomSingleFile("example/wom") + ), + ("a wom string with leading and trailing special symbols to a glob", + WomString(" \n\r\t example/wom \n\r\t "), + WomGlobFile("example/wom") + ) ) forAll(coercionTests) { (description, value, expected) => @@ -67,42 +76,81 @@ class WomFileTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers lazy val failedCoercionTests = Table[String, WomType, Any, String]( ("description", "womFileType", "value", "expected"), - - ("a double to a dir", WomUnlistedDirectoryType, 6.28318, - "No coercion defined from '6.28318' of type 'java.lang.Double' to 'Directory'."), - ("a double to a file", WomSingleFileType, 6.28318, - "No coercion defined from '6.28318' of type 'java.lang.Double' to 'File'."), - ("a double to a glob", WomGlobFileType, 6.28318, - "No coercion defined from '6.28318' of type 'java.lang.Double' to 'Glob'."), - - ("a js number to a dir", WomUnlistedDirectoryType, JsNumber(6.28318), - "No coercion defined from '6.28318' of type 'spray.json.JsNumber' to 'Directory'."), - ("a js number to a file", WomSingleFileType, JsNumber(6.28318), - "No coercion defined from '6.28318' of type 'spray.json.JsNumber' to 'File'."), - ("a js number to a glob", WomGlobFileType, JsNumber(6.28318), - "No coercion defined from '6.28318' of type 'spray.json.JsNumber' to 'Glob'."), - - ("a wom float to a dir", WomUnlistedDirectoryType, WomFloat(6.28318), - "No coercion defined from wom value(s) '6.28318' of type 'Float' to 'Directory'."), - ("a wom float to a file", WomSingleFileType, WomFloat(6.28318), - "No coercion defined from wom value(s) '6.28318' of type 'Float' to 'File'."), - ("a wom float to a glob", WomGlobFileType, WomFloat(6.28318), - "No coercion defined from wom value(s) '6.28318' of type 'Float' to 'Glob'."), - - ("a wom file to a dir", WomUnlistedDirectoryType, WomSingleFile("example/file"), - """No coercion defined from wom value(s) '"example/file"' of type 'File' to 'Directory'."""), - ("a wom glob to a dir", WomUnlistedDirectoryType, WomGlobFile("example/glob*"), - """No coercion defined from wom value(s) 'glob("example/glob*")' of type 'Glob' to 'Directory'."""), - - ("a wom dir to a file", WomSingleFileType, WomUnlistedDirectory("example/dir"), - """No coercion defined from wom value(s) '"example/dir"' of type 'Directory' to 'File'."""), - ("a wom glob to a file", WomSingleFileType, WomGlobFile("example/glob*"), - """No coercion defined from wom value(s) 'glob("example/glob*")' of type 'Glob' to 'File'."""), - - ("a wom dir to a glob", WomGlobFileType, WomUnlistedDirectory("example/dir"), - """No coercion defined from wom value(s) '"example/dir"' of type 'Directory' to 'Glob'."""), - ("a wom file to a glob", WomGlobFileType, WomSingleFile("example/file"), - """No coercion defined from wom value(s) '"example/file"' of type 'File' to 'Glob'.""") + ("a double to a dir", + WomUnlistedDirectoryType, + 6.28318, + "No coercion defined from '6.28318' of type 'java.lang.Double' to 'Directory'." + ), + ("a double to a file", + WomSingleFileType, + 6.28318, + "No coercion defined from '6.28318' of type 'java.lang.Double' to 'File'." + ), + ("a double to a glob", + WomGlobFileType, + 6.28318, + "No coercion defined from '6.28318' of type 'java.lang.Double' to 'Glob'." + ), + ("a js number to a dir", + WomUnlistedDirectoryType, + JsNumber(6.28318), + "No coercion defined from '6.28318' of type 'spray.json.JsNumber' to 'Directory'." + ), + ("a js number to a file", + WomSingleFileType, + JsNumber(6.28318), + "No coercion defined from '6.28318' of type 'spray.json.JsNumber' to 'File'." + ), + ("a js number to a glob", + WomGlobFileType, + JsNumber(6.28318), + "No coercion defined from '6.28318' of type 'spray.json.JsNumber' to 'Glob'." + ), + ("a wom float to a dir", + WomUnlistedDirectoryType, + WomFloat(6.28318), + "No coercion defined from wom value(s) '6.28318' of type 'Float' to 'Directory'." + ), + ("a wom float to a file", + WomSingleFileType, + WomFloat(6.28318), + "No coercion defined from wom value(s) '6.28318' of type 'Float' to 'File'." + ), + ("a wom float to a glob", + WomGlobFileType, + WomFloat(6.28318), + "No coercion defined from wom value(s) '6.28318' of type 'Float' to 'Glob'." + ), + ("a wom file to a dir", + WomUnlistedDirectoryType, + WomSingleFile("example/file"), + """No coercion defined from wom value(s) '"example/file"' of type 'File' to 'Directory'.""" + ), + ("a wom glob to a dir", + WomUnlistedDirectoryType, + WomGlobFile("example/glob*"), + """No coercion defined from wom value(s) 'glob("example/glob*")' of type 'Glob' to 'Directory'.""" + ), + ("a wom dir to a file", + WomSingleFileType, + WomUnlistedDirectory("example/dir"), + """No coercion defined from wom value(s) '"example/dir"' of type 'Directory' to 'File'.""" + ), + ("a wom glob to a file", + WomSingleFileType, + WomGlobFile("example/glob*"), + """No coercion defined from wom value(s) 'glob("example/glob*")' of type 'Glob' to 'File'.""" + ), + ("a wom dir to a glob", + WomGlobFileType, + WomUnlistedDirectory("example/dir"), + """No coercion defined from wom value(s) '"example/dir"' of type 'Directory' to 'Glob'.""" + ), + ("a wom file to a glob", + WomGlobFileType, + WomSingleFile("example/file"), + """No coercion defined from wom value(s) '"example/file"' of type 'File' to 'Glob'.""" + ) ) forAll(failedCoercionTests) { (description, womFileType, value, expected) => diff --git a/wom/src/test/scala/wom/types/WomMapTypeSpec.scala b/wom/src/test/scala/wom/types/WomMapTypeSpec.scala index 214e150f873..1f7391295ba 100644 --- a/wom/src/test/scala/wom/types/WomMapTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomMapTypeSpec.scala @@ -8,23 +8,29 @@ import wom.values.{WomInteger, WomMap, WomObject, WomString} import scala.util.{Failure, Success} - -class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val stringIntMap = WomMap(WomMapType(WomStringType, WomIntegerType), Map( - WomString("a") -> WomInteger(1), - WomString("b") -> WomInteger(2), - WomString("c") -> WomInteger(3) - )) - val coerceableObject = WomObject(Map( - "a" -> WomString("1"), - "b" -> WomString("2"), - "c" -> WomString("3") - )) - val coerceableTypedObject = WomObject(Map( - "a" -> WomString("1"), - "b" -> WomString("2"), - "c" -> WomString("3") - ), WomCompositeType(Map("a" -> WomStringType, "b" -> WomStringType, "c" -> WomStringType))) +class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { + val stringIntMap = WomMap(WomMapType(WomStringType, WomIntegerType), + Map( + WomString("a") -> WomInteger(1), + WomString("b") -> WomInteger(2), + WomString("c") -> WomInteger(3) + ) + ) + val coerceableObject = WomObject( + Map( + "a" -> WomString("1"), + "b" -> WomString("2"), + "c" -> WomString("3") + ) + ) + val coerceableTypedObject = WomObject( + Map( + "a" -> WomString("1"), + "b" -> WomString("2"), + "c" -> WomString("3") + ), + WomCompositeType(Map("a" -> WomStringType, "b" -> WomStringType, "c" -> WomStringType)) + ) "WomMap" should "stringify its value" in { stringIntMap.toWomString shouldEqual "{\"a\": 1, \"b\": 2, \"c\": 3}" @@ -36,7 +42,9 @@ class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } } it should "coerce a JsObject into a WomMap" in { - WomMapType(WomStringType, WomIntegerType).coerceRawValue(JsObject(Map("a" -> JsNumber(1), "b" -> JsNumber(2), "c" -> JsNumber(3)))) match { + WomMapType(WomStringType, WomIntegerType).coerceRawValue( + JsObject(Map("a" -> JsNumber(1), "b" -> JsNumber(2), "c" -> JsNumber(3))) + ) match { case Success(array) => array shouldEqual stringIntMap case Failure(f) => fail(s"exception while coercing JsObject: $f") } @@ -63,7 +71,9 @@ class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "detect invalid map construction if there are mixed keys" in { try { - WomMap(WomMapType(WomStringType, WomStringType), Map(WomInteger(0) -> WomString("foo"), WomString("x") -> WomString("y"))) + WomMap(WomMapType(WomStringType, WomStringType), + Map(WomInteger(0) -> WomString("foo"), WomString("x") -> WomString("y")) + ) fail("Map initialization should have failed") } catch { case e: UnsupportedOperationException => @@ -73,7 +83,9 @@ class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "detect invalid map construction if there are mixed values" in { try { - WomMap(WomMapType(WomStringType, WomStringType), Map(WomString("bar") -> WomString("foo"), WomString("x") -> WomInteger(2))) + WomMap(WomMapType(WomStringType, WomStringType), + Map(WomString("bar") -> WomString("foo"), WomString("x") -> WomInteger(2)) + ) fail("Map initialization should have failed") } catch { case e: UnsupportedOperationException => @@ -83,7 +95,9 @@ class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "detect invalid map construction if the keys are all the wrong type" in { try { - WomMap(WomMapType(WomStringType, WomStringType), Map(WomInteger(22) -> WomString("foo"), WomInteger(2222) -> WomString("y"))) + WomMap(WomMapType(WomStringType, WomStringType), + Map(WomInteger(22) -> WomString("foo"), WomInteger(2222) -> WomString("y")) + ) fail("Map initialization should have failed") } catch { case e: UnsupportedOperationException => @@ -93,7 +107,9 @@ class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "detect invalid map construction if the values are all the wrong type" in { try { - WomMap(WomMapType(WomStringType, WomStringType), Map(WomString("bar") -> WomInteger(44), WomString("x") -> WomInteger(4444))) + WomMap(WomMapType(WomStringType, WomStringType), + Map(WomString("bar") -> WomInteger(44), WomString("x") -> WomInteger(4444)) + ) fail("Map initialization should have failed") } catch { case e: UnsupportedOperationException => @@ -113,18 +129,22 @@ class WomMapTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "tsvSerialize non-empty maps correctly and with newlines after every line" in { stringIntMap.tsvSerialize shouldEqual Success("a\t1\nb\t2\nc\t3\n") - val intIntMap = WomMap(WomMapType(WomIntegerType, WomIntegerType), Map( - WomInteger(4) -> WomInteger(1), - WomInteger(5) -> WomInteger(2), - WomInteger(6) -> WomInteger(3), - )) + val intIntMap = WomMap(WomMapType(WomIntegerType, WomIntegerType), + Map( + WomInteger(4) -> WomInteger(1), + WomInteger(5) -> WomInteger(2), + WomInteger(6) -> WomInteger(3) + ) + ) intIntMap.tsvSerialize shouldEqual Success("4\t1\n5\t2\n6\t3\n") - val stringStringMap = WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("a") -> WomString("x"), - WomString("b") -> WomString("y"), - WomString("c") -> WomString("z") - )) + val stringStringMap = WomMap(WomMapType(WomStringType, WomStringType), + Map( + WomString("a") -> WomString("x"), + WomString("b") -> WomString("y"), + WomString("c") -> WomString("z") + ) + ) stringStringMap.tsvSerialize shouldEqual Success("a\tx\nb\ty\nc\tz\n") } diff --git a/wom/src/test/scala/wom/types/WomObjectTypeSpec.scala b/wom/src/test/scala/wom/types/WomObjectTypeSpec.scala index 7616356f945..376b49b27e8 100644 --- a/wom/src/test/scala/wom/types/WomObjectTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomObjectTypeSpec.scala @@ -7,37 +7,39 @@ import wom.values.{WomInteger, WomMap, WomObject, WomString} import scala.util.{Failure, Success} - class WomObjectTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { - val abcObject = WomObject(Map( - "a" -> WomString("one"), - "b" -> WomString("two"), - "c" -> WomString("three") - )) + val abcObject = WomObject( + Map( + "a" -> WomString("one"), + "b" -> WomString("two"), + "c" -> WomString("three") + ) + ) - val coerceableMap = WomMap(WomMapType(WomStringType, WomStringType), Map( - WomString("a") -> WomString("one"), - WomString("b") -> WomString("two"), - WomString("c") -> WomString("three")) + val coerceableMap = WomMap( + WomMapType(WomStringType, WomStringType), + Map(WomString("a") -> WomString("one"), WomString("b") -> WomString("two"), WomString("c") -> WomString("three")) ) - val abcMixedTypedObject = WomObject(Map( - "a" -> WomString("one"), - "b" -> WomInteger(2), - "c" -> WomString("three") - )) + val abcMixedTypedObject = WomObject( + Map( + "a" -> WomString("one"), + "b" -> WomInteger(2), + "c" -> WomString("three") + ) + ) - val coerceableTypedObject = WomObject.withTypeUnsafe(Map( - "a" -> WomString("one"), - "b" -> WomInteger(2), - "c" -> WomString("three")), + val coerceableTypedObject = WomObject.withTypeUnsafe( + Map("a" -> WomString("one"), "b" -> WomInteger(2), "c" -> WomString("three")), WomCompositeType(Map("a" -> WomStringType, "b" -> WomIntegerType, "c" -> WomStringType)) ) - val nonCoerceableMap = WomMap(WomMapType(WomStringType, WomObjectType), Map( - WomString("a") -> WomObject(Map.empty), - WomString("b") -> WomObject(Map.empty), - WomString("c") -> WomObject(Map.empty)) + val nonCoerceableMap = WomMap( + WomMapType(WomStringType, WomObjectType), + Map(WomString("a") -> WomObject(Map.empty), + WomString("b") -> WomObject(Map.empty), + WomString("c") -> WomObject(Map.empty) + ) ) "WomObject" should "stringify its value" in { diff --git a/wom/src/test/scala/wom/types/WomOptionalTypeSpec.scala b/wom/src/test/scala/wom/types/WomOptionalTypeSpec.scala index df81f5d63d2..c8c80e503f2 100644 --- a/wom/src/test/scala/wom/types/WomOptionalTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomOptionalTypeSpec.scala @@ -6,8 +6,10 @@ import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2} import wom.types.WomOptionalTypeSpecDefs._ import wom.values._ - -class WomOptionalTypeSpec() extends WomCoercionSpec(goodCoercionTable, badCoercionTable, behaviorOf) with AnyFlatSpecLike with Matchers { +class WomOptionalTypeSpec() + extends WomCoercionSpec(goodCoercionTable, badCoercionTable, behaviorOf) + with AnyFlatSpecLike + with Matchers { import TableDrivenPropertyChecks._ @@ -15,7 +17,10 @@ class WomOptionalTypeSpec() extends WomCoercionSpec(goodCoercionTable, badCoerci ("optional type", "flat optional type", "base member type"), (WomOptionalType(WomIntegerType), WomOptionalType(WomIntegerType), WomIntegerType), (WomOptionalType(WomOptionalType(WomIntegerType)), WomOptionalType(WomIntegerType), WomIntegerType), - (WomOptionalType(WomOptionalType(WomOptionalType(WomArrayType(WomOptionalType(WomIntegerType))))), WomOptionalType(WomArrayType(WomOptionalType(WomIntegerType))), WomArrayType(WomOptionalType(WomIntegerType))) + (WomOptionalType(WomOptionalType(WomOptionalType(WomArrayType(WomOptionalType(WomIntegerType))))), + WomOptionalType(WomArrayType(WomOptionalType(WomIntegerType))), + WomArrayType(WomOptionalType(WomIntegerType)) + ) ) forAll(baseTypes) { (optType, flatOptType, baseType) => @@ -42,7 +47,9 @@ private object WomOptionalTypeSpecDefs { // Inner coercion defined: (WomOptionalValue(WomInteger(4)), WomOptionalValue(WomString("4"))), (WomOptionalValue(WomString("a.txt")), WomOptionalValue(WomSingleFile("a.txt"))), - (WomOptionalValue(WomOptionalValue(WomString("a.txt"))), WomOptionalValue(WomOptionalValue(WomSingleFile("a.txt")))), + (WomOptionalValue(WomOptionalValue(WomString("a.txt"))), + WomOptionalValue(WomOptionalValue(WomSingleFile("a.txt"))) + ), (WomOptionalValue(WomIntegerType, None), WomOptionalValue(WomStringType, None)), // auto-boxing optionals @@ -57,15 +64,31 @@ private object WomOptionalTypeSpecDefs { (WomOptionalValue(WomOptionalValue(WomIntegerType, None)), WomOptionalValue(WomIntegerType, None)), // flattening and boxing and coercion all at once: - (WomOptionalValue(WomOptionalValue(WomOptionalValue(WomInteger(3)))), WomOptionalValue(WomOptionalValue(WomString("3")))), - (WomOptionalValue(WomOptionalValue(WomInteger(3))), WomOptionalValue(WomOptionalValue(WomOptionalValue(WomString("3"))))), - (WomOptionalValue(WomOptionalValue(WomOptionalValue(WomIntegerType, None))), WomOptionalValue(WomOptionalType(WomStringType), None)), - (WomOptionalValue(WomOptionalValue(WomIntegerType, None)), WomOptionalValue(WomOptionalType(WomOptionalType(WomStringType)), None)), + (WomOptionalValue(WomOptionalValue(WomOptionalValue(WomInteger(3)))), + WomOptionalValue(WomOptionalValue(WomString("3"))) + ), + (WomOptionalValue(WomOptionalValue(WomInteger(3))), + WomOptionalValue(WomOptionalValue(WomOptionalValue(WomString("3")))) + ), + (WomOptionalValue(WomOptionalValue(WomOptionalValue(WomIntegerType, None))), + WomOptionalValue(WomOptionalType(WomStringType), None) + ), + (WomOptionalValue(WomOptionalValue(WomIntegerType, None)), + WomOptionalValue(WomOptionalType(WomOptionalType(WomStringType)), None) + ), // Javascript coercions: (JsNull, WomOptionalValue.none(WomOptionalType(WomIntegerType))), - ("[1, 2, null, 4]".parseJson, WomArray(WomArrayType(WomOptionalType(WomIntegerType)), - List(WomOptionalValue(WomInteger(1)), WomOptionalValue(WomInteger(2)), WomOptionalValue.none(WomIntegerType), WomOptionalValue(WomInteger(4))))), + ("[1, 2, null, 4]".parseJson, + WomArray( + WomArrayType(WomOptionalType(WomIntegerType)), + List(WomOptionalValue(WomInteger(1)), + WomOptionalValue(WomInteger(2)), + WomOptionalValue.none(WomIntegerType), + WomOptionalValue(WomInteger(4)) + ) + ) + ), (""" |{ | "one": 1, @@ -73,11 +96,14 @@ private object WomOptionalTypeSpecDefs { | "three": 3 |} """.stripMargin.parseJson, - WomMap(WomMapType(WomStringType, WomOptionalType(WomIntegerType)), Map( - WomString("one") -> WomOptionalValue(WomInteger(1)), - WomString("two") -> WomOptionalValue.none(WomIntegerType), - WomString("three") ->WomOptionalValue(WomInteger(3)) - )) + WomMap( + WomMapType(WomStringType, WomOptionalType(WomIntegerType)), + Map( + WomString("one") -> WomOptionalValue(WomInteger(1)), + WomString("two") -> WomOptionalValue.none(WomIntegerType), + WomString("three") -> WomOptionalValue(WomInteger(3)) + ) + ) ) ) diff --git a/wom/src/test/scala/wom/types/WomTypeSpec.scala b/wom/src/test/scala/wom/types/WomTypeSpec.scala index 4d3c7befeb5..3a93bf4695c 100644 --- a/wom/src/test/scala/wom/types/WomTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomTypeSpec.scala @@ -12,7 +12,6 @@ import scala.runtime.ScalaRunTime import scala.util.Random import scala.util.matching.Regex - class WomTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "WomType class" should "stringify WomBoolean to 'Boolean'" in { WomBooleanType.stableName shouldEqual "Boolean" @@ -73,12 +72,15 @@ class WomTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { """ 'wom.types.WomTypeSpec\$\$anon\$\1' to 'Boolean'.""").r ), ( - WomArray(WomArrayType(WomOptionalType(WomIntegerType)), Seq( - WomOptionalValue(WomInteger(0)), - WomOptionalValue(WomInteger(1)), - WomOptionalValue(WomInteger(2)), - WomOptionalValue(WomInteger(3)), - WomOptionalValue(WomInteger(4))) + WomArray( + WomArrayType(WomOptionalType(WomIntegerType)), + Seq( + WomOptionalValue(WomInteger(0)), + WomOptionalValue(WomInteger(1)), + WomOptionalValue(WomInteger(2)), + WomOptionalValue(WomInteger(3)), + WomOptionalValue(WomInteger(4)) + ) ), WomOptionalType(WomMaybeEmptyArrayType(WomIntegerType)), classOf[IllegalArgumentException], @@ -98,12 +100,11 @@ class WomTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { ) ) - private def describe(any: Any): String = { + private def describe(any: Any): String = any match { case womValue: WomValue => s"wom value ${womValue.toWomString} of type ${womValue.womType.stableName}" case _ => s"scala value ${ScalaRunTime.stringOf(any)}" } - } forAll(rawValuesCoercedToType) { (rawValue, womType, exceptionClass, exceptionMessage) => it should s"fail coercing ${womType.stableName} from ${describe(rawValue)}" in { @@ -117,25 +118,33 @@ class WomTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { WomBooleanType.coerceRawValue("true").get shouldEqual WomBoolean.True WomBooleanType.coerceRawValue("FALSE").get shouldEqual WomBoolean.False WomBooleanType.coerceRawValue(false).get shouldEqual WomBoolean.False - WomBooleanType.coerceRawValue(WomOptionalValue(WomBooleanType, Option(WomBoolean(true)))).get shouldEqual WomBoolean.True + WomBooleanType + .coerceRawValue(WomOptionalValue(WomBooleanType, Option(WomBoolean(true)))) + .get shouldEqual WomBoolean.True WomBooleanType.coerceRawValue("I like turtles").isFailure shouldBe true } "WomString" should "support expected coercions" in { WomStringType.coerceRawValue("foo").get shouldEqual WomString("foo") - WomStringType.coerceRawValue(WomOptionalValue(WomStringType, Option(WomString("foo")))).get shouldEqual WomString("foo") + WomStringType.coerceRawValue(WomOptionalValue(WomStringType, Option(WomString("foo")))).get shouldEqual WomString( + "foo" + ) WomStringType.coerceRawValue(-1).isFailure shouldBe true } "WomFile" should "support expected coercions" in { WomSingleFileType.coerceRawValue("/etc/passwd").get shouldEqual WomSingleFile("/etc/passwd") - WomSingleFileType.coerceRawValue(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("/etc/passwd")))).get shouldEqual WomSingleFile("/etc/passwd") + WomSingleFileType + .coerceRawValue(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("/etc/passwd")))) + .get shouldEqual WomSingleFile("/etc/passwd") WomSingleFileType.coerceRawValue(-1).isFailure shouldBe true } "WomInteger" should "support expected coercions" in { WomIntegerType.coerceRawValue(42).get shouldEqual WomInteger(42) - WomIntegerType.coerceRawValue(WomOptionalValue(WomIntegerType, Option(WomInteger(42)))).get shouldEqual WomInteger(42) + WomIntegerType.coerceRawValue(WomOptionalValue(WomIntegerType, Option(WomInteger(42)))).get shouldEqual WomInteger( + 42 + ) WomIntegerType.coerceRawValue("42").get shouldEqual WomInteger(42) WomIntegerType.coerceRawValue(JsString("42")).get shouldEqual WomInteger(42) WomIntegerType.coerceRawValue("FAIL").isFailure shouldBe true @@ -167,26 +176,49 @@ class WomTypeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { // Type A in, Type B in, expected results val lcsTestCases: List[(List[WomType], WomType)] = List( (List(WomIntegerType, WomStringType), WomStringType), - (List(WomPairType(WomStringType, WomIntegerType), WomPairType(WomIntegerType, WomStringType)), WomPairType(WomStringType, WomStringType)), + (List(WomPairType(WomStringType, WomIntegerType), WomPairType(WomIntegerType, WomStringType)), + WomPairType(WomStringType, WomStringType) + ), (List(WomOptionalType(WomIntegerType), WomOptionalType(WomStringType)), WomOptionalType(WomStringType)), (List(WomOptionalType(WomIntegerType), WomStringType), WomOptionalType(WomStringType)), - (List(WomArrayType(WomOptionalType(WomIntegerType)), WomArrayType(WomOptionalType(WomStringType))), WomArrayType(WomOptionalType(WomStringType))), - (List(WomOptionalType(WomMapType(WomIntegerType, WomStringType)), WomOptionalType(WomMapType(WomStringType, WomIntegerType))), WomOptionalType(WomMapType(WomStringType, WomStringType))), + (List(WomArrayType(WomOptionalType(WomIntegerType)), WomArrayType(WomOptionalType(WomStringType))), + WomArrayType(WomOptionalType(WomStringType)) + ), + (List(WomOptionalType(WomMapType(WomIntegerType, WomStringType)), + WomOptionalType(WomMapType(WomStringType, WomIntegerType)) + ), + WomOptionalType(WomMapType(WomStringType, WomStringType)) + ), (List( - WomCompositeType(Map( - "i" -> WomIntegerType, - "s" -> WomStringType - )), - WomCompositeType(Map( - "a" -> WomStringType, - "b" -> WomIntegerType - )) - ), WomObjectType), + WomCompositeType( + Map( + "i" -> WomIntegerType, + "s" -> WomStringType + ) + ), + WomCompositeType( + Map( + "a" -> WomStringType, + "b" -> WomIntegerType + ) + ) + ), + WomObjectType + ), (List(WomIntegerType, WomFloatType), WomFloatType), (List(WomIntegerType, WomBooleanType), WomStringType), - (List(WomOptionalType(WomMaybeEmptyArrayType(WomSingleFileType)), WomMaybeEmptyArrayType(WomNothingType)), WomOptionalType(WomMaybeEmptyArrayType(WomSingleFileType))), - (List(WomMaybeEmptyArrayType(WomSingleFileType), WomMaybeEmptyArrayType(WomNothingType)), WomMaybeEmptyArrayType(WomSingleFileType)), - (List(WomMaybeEmptyArrayType(WomStringType), WomMaybeEmptyArrayType(WomIntegerType), WomMaybeEmptyArrayType(WomNothingType)), WomMaybeEmptyArrayType(WomStringType)) + (List(WomOptionalType(WomMaybeEmptyArrayType(WomSingleFileType)), WomMaybeEmptyArrayType(WomNothingType)), + WomOptionalType(WomMaybeEmptyArrayType(WomSingleFileType)) + ), + (List(WomMaybeEmptyArrayType(WomSingleFileType), WomMaybeEmptyArrayType(WomNothingType)), + WomMaybeEmptyArrayType(WomSingleFileType) + ), + (List(WomMaybeEmptyArrayType(WomStringType), + WomMaybeEmptyArrayType(WomIntegerType), + WomMaybeEmptyArrayType(WomNothingType) + ), + WomMaybeEmptyArrayType(WomStringType) + ) ) lcsTestCases foreach { case (types, expectedLcs) => diff --git a/wom/src/test/scala/wom/util/YamlUtilsSpec.scala b/wom/src/test/scala/wom/util/YamlUtilsSpec.scala index 03534567f19..986201a2213 100644 --- a/wom/src/test/scala/wom/util/YamlUtilsSpec.scala +++ b/wom/src/test/scala/wom/util/YamlUtilsSpec.scala @@ -13,8 +13,12 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks - -class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks with EitherValues { +class YamlUtilsSpec + extends AnyFlatSpec + with CromwellTimeoutSpec + with Matchers + with TableDrivenPropertyChecks + with EitherValues { behavior of "YamlUtils" @@ -25,14 +29,14 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w "a stackoverflow yaml bomb", "{a: &b [*b]}", refineMV[NonNegative](10), - "Loop detected", + "Loop detected" ), ( // https://bitbucket.org/asomov/snakeyaml/wiki/Documentation#markdown-header-aliases "a recursive yaml example from the snakeyaml wiki", "&A [ *A ]", refineMV[NonNegative](10), - "Loop detected", + "Loop detected" ), ( "a recursive yaml based on mappings back to the root", @@ -40,7 +44,7 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w | c: *b |""".stripMargin, refineMV[NonNegative](10), - "Loop detected", + "Loop detected" ), ( // https://bitbucket.org/asomov/snakeyaml-engine/src/41b3845/src/test/resources/recursive/recursive-set-1.yaml @@ -54,7 +58,7 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w |? *key |""".stripMargin, refineMV[NonNegative](10), - "Loop detected", + "Loop detected" ), ( // https://en.wikipedia.org/w/index.php?title=Billion_laughs_attack&oldid=871224525#Variations @@ -70,25 +74,25 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w |i: &i [*h,*h,*h,*h,*h,*h,*h,*h,*h] |""".stripMargin, refineMV[NonNegative](10000), - "Loop detection halted at 10000 nodes", + "Loop detection halted at 10000 nodes" ), ( "a null yaml", null, refineMV[NonNegative](0), - null, + null ), ( "an empty yaml mapping when limited to zero nodes", "{}", refineMV[NonNegative](0), - "Loop detection halted at 0 nodes", + "Loop detection halted at 0 nodes" ), ( "an empty yaml sequence when limited to zero nodes", "[]", refineMV[NonNegative](0), - "Loop detection halted at 0 nodes", + "Loop detection halted at 0 nodes" ), ( "a yaml without a closing brace", @@ -102,8 +106,8 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w | in 'reader', line 1, column 2: | { | ^ - |""".stripMargin, - ), + |""".stripMargin + ) ) private val legalYamlTests = Table( @@ -112,19 +116,19 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w "an empty yaml mapping when limited to one node", "{}", refineMV[NonNegative](1), - Json.obj(), + Json.obj() ), ( "an empty yaml", "", refineMV[NonNegative](1), - Json.False, + Json.False ), ( "an empty yaml sequence when limited to one node", "[]", refineMV[NonNegative](1), - Json.arr(), + Json.arr() ), ( "a yaml with the same node for a key and value", @@ -132,14 +136,14 @@ class YamlUtilsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w |: *a |""".stripMargin, refineMV[NonNegative](3), - Json.obj("b" -> Json.fromString("b")), + Json.obj("b" -> Json.fromString("b")) ), ( "a yaml with the same nodes in a sequence", "[ &a b, *a ]", refineMV[NonNegative](3), - Json.arr(Json.fromString("b"), Json.fromString("b")), - ), + Json.arr(Json.fromString("b"), Json.fromString("b")) + ) ) forAll(illegalYamlTests) { (description, yaml, maxNodes, exceptionMessage) => diff --git a/wom/src/test/scala/wom/values/WomFileSpec.scala b/wom/src/test/scala/wom/values/WomFileSpec.scala index 3d4eff63c4c..10409de5730 100644 --- a/wom/src/test/scala/wom/values/WomFileSpec.scala +++ b/wom/src/test/scala/wom/values/WomFileSpec.scala @@ -9,7 +9,6 @@ import wom.types._ import scala.util.{Success, Try} - class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { behavior of "WomFile" @@ -39,16 +38,24 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit ) lazy val nestedFilesAndDirs = WomMaybePopulatedFile( valueOption = Option(singleFile.value), - secondaryFiles = List(WomMaybeListedDirectory( - valueOption = Option(listedDir1.value), - listingOption = Option(List(WomMaybePopulatedFile( - valueOption = Option(secondaryFile1.value), - secondaryFiles = List(WomMaybeListedDirectory( - valueOption = Option(listedDir2.value), - listingOption = Option(List(secondaryFile2)) - )) - ))) - )) + secondaryFiles = List( + WomMaybeListedDirectory( + valueOption = Option(listedDir1.value), + listingOption = Option( + List( + WomMaybePopulatedFile( + valueOption = Option(secondaryFile1.value), + secondaryFiles = List( + WomMaybeListedDirectory( + valueOption = Option(listedDir2.value), + listingOption = Option(List(secondaryFile2)) + ) + ) + ) + ) + ) + ) + ) ) val mapFileTests = Table( @@ -56,47 +63,62 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit ("a single directory", singleDir, WomUnlistedDirectory("prepend/single/dir")), ("a single file", singleFile, WomSingleFile("prepend/single/file")), ("a glob file", globFile, WomGlobFile("prepend/glob/*")), - ("a dir with listed dirs", dirWithListedDirs, - WomMaybeListedDirectory( - valueOption = Option("prepend/single/dir"), - listingOption = Option(List( - WomUnlistedDirectory("prepend/listed/dir1"), - WomUnlistedDirectory("prepend/listed/dir2") - )) - ) + ("a dir with listed dirs", + dirWithListedDirs, + WomMaybeListedDirectory( + valueOption = Option("prepend/single/dir"), + listingOption = Option( + List( + WomUnlistedDirectory("prepend/listed/dir1"), + WomUnlistedDirectory("prepend/listed/dir2") + ) + ) + ) ), - ("a dir with listed files", dirWithListedFiles, - WomMaybeListedDirectory( - valueOption = Option("prepend/single/dir"), - listingOption = Option(List(WomSingleFile("prepend/secondary/file1"), WomSingleFile("prepend/secondary/file2"))) - ) + ("a dir with listed files", + dirWithListedFiles, + WomMaybeListedDirectory( + valueOption = Option("prepend/single/dir"), + listingOption = Option(List(WomSingleFile("prepend/secondary/file1"), WomSingleFile("prepend/secondary/file2"))) + ) ), - ("a file with secondary files", fileWithSecondaryFiles, - WomMaybePopulatedFile( - valueOption = Option("prepend/single/file"), - secondaryFiles = List(WomSingleFile("prepend/secondary/file1"), WomSingleFile("prepend/secondary/file2")) - ) + ("a file with secondary files", + fileWithSecondaryFiles, + WomMaybePopulatedFile( + valueOption = Option("prepend/single/file"), + secondaryFiles = List(WomSingleFile("prepend/secondary/file1"), WomSingleFile("prepend/secondary/file2")) + ) ), - ("a file with secondary dirs", fileWithSecondaryDirs, - WomMaybePopulatedFile( - valueOption = Option("prepend/single/file"), - secondaryFiles = List(WomUnlistedDirectory("prepend/listed/dir1"), WomUnlistedDirectory("prepend/listed/dir2")) - ) + ("a file with secondary dirs", + fileWithSecondaryDirs, + WomMaybePopulatedFile( + valueOption = Option("prepend/single/file"), + secondaryFiles = List(WomUnlistedDirectory("prepend/listed/dir1"), WomUnlistedDirectory("prepend/listed/dir2")) + ) ), - ("a nested file/dir", nestedFilesAndDirs, - WomMaybePopulatedFile( - valueOption = Option("prepend/single/file"), - secondaryFiles = List(WomMaybeListedDirectory( - valueOption = Option("prepend/listed/dir1"), - listingOption = Option(List(WomMaybePopulatedFile( - valueOption = Option("prepend/secondary/file1"), - secondaryFiles = List(WomMaybeListedDirectory( - valueOption = Option("prepend/listed/dir2"), - listingOption = Option(List(WomSingleFile("prepend/secondary/file2"))) - )) - ))) - )) - ) + ("a nested file/dir", + nestedFilesAndDirs, + WomMaybePopulatedFile( + valueOption = Option("prepend/single/file"), + secondaryFiles = List( + WomMaybeListedDirectory( + valueOption = Option("prepend/listed/dir1"), + listingOption = Option( + List( + WomMaybePopulatedFile( + valueOption = Option("prepend/secondary/file1"), + secondaryFiles = List( + WomMaybeListedDirectory( + valueOption = Option("prepend/listed/dir2"), + listingOption = Option(List(WomSingleFile("prepend/secondary/file2"))) + ) + ) + ) + ) + ) + ) + ) + ) ) ) @@ -117,38 +139,43 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit ("a single directory", singleDir, List(WomUnlistedDirectory("single/dir"))), ("a single file", singleFile, List(WomSingleFile("single/file"))), ("a glob file", globFile, List(WomGlobFile("glob/*"))), - ("a dir with listed dirs", dirWithListedDirs, - List( - WomUnlistedDirectory("listed/dir1"), - WomUnlistedDirectory("listed/dir2") - ) + ("a dir with listed dirs", + dirWithListedDirs, + List( + WomUnlistedDirectory("listed/dir1"), + WomUnlistedDirectory("listed/dir2") + ) ), - ("a dir with listed files", dirWithListedFiles, - List( - WomSingleFile("secondary/file1"), - WomSingleFile("secondary/file2") - ) + ("a dir with listed files", + dirWithListedFiles, + List( + WomSingleFile("secondary/file1"), + WomSingleFile("secondary/file2") + ) ), - ("a file with secondary files", fileWithSecondaryFiles, - List( - WomSingleFile("single/file"), - WomSingleFile("secondary/file1"), - WomSingleFile("secondary/file2") - ) + ("a file with secondary files", + fileWithSecondaryFiles, + List( + WomSingleFile("single/file"), + WomSingleFile("secondary/file1"), + WomSingleFile("secondary/file2") + ) ), - ("a file with secondary dirs", fileWithSecondaryDirs, - List( - WomSingleFile("single/file"), - WomUnlistedDirectory("listed/dir1"), - WomUnlistedDirectory("listed/dir2") - ) + ("a file with secondary dirs", + fileWithSecondaryDirs, + List( + WomSingleFile("single/file"), + WomUnlistedDirectory("listed/dir1"), + WomUnlistedDirectory("listed/dir2") + ) ), - ("a nested file/dir", nestedFilesAndDirs, - List( - WomSingleFile("single/file"), - WomSingleFile("secondary/file1"), - WomSingleFile("secondary/file2") - ) + ("a nested file/dir", + nestedFilesAndDirs, + List( + WomSingleFile("single/file"), + WomSingleFile("secondary/file1"), + WomSingleFile("secondary/file2") + ) ) ) @@ -170,7 +197,6 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit globFile.toWomString should be("""glob("glob/*")""") } - val addTests = Table( ("description", "womFile", "expectedPrefix", "expectedSuffix"), ("a single directory", singleDir, WomString("prefix/single/dir"), WomUnlistedDirectory("single/dir/suffix")), @@ -235,17 +261,21 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit it should s"fail to add an optional string prefix to $description" in { a[WomExpressionException] should be thrownBy { - WomOptionalValue(WomStringType, Option(WomString("prefix/"))).add( - WomOptionalValue(womFile.womType, Option(womFile)) - ).get + WomOptionalValue(WomStringType, Option(WomString("prefix/"))) + .add( + WomOptionalValue(womFile.womType, Option(womFile)) + ) + .get } } it should s"fail to add an optional string suffix to $description" in { a[WomExpressionException] should be thrownBy { - WomOptionalValue(womFile.womType, Option(womFile)).add( - WomOptionalValue(WomStringType, Option(WomString("/suffix"))) - ).get + WomOptionalValue(womFile.womType, Option(womFile)) + .add( + WomOptionalValue(WomStringType, Option(WomString("/suffix"))) + ) + .get } } } @@ -263,9 +293,12 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit } it should s"fail to add an optional int to $description" in { - WomOptionalValue(womFile.womType, Option(womFile)).add( - WomOptionalValue(WomIntegerType, Option(WomInteger(42))) - ).failed.get should have message expected + WomOptionalValue(womFile.womType, Option(womFile)) + .add( + WomOptionalValue(WomIntegerType, Option(WomInteger(42))) + ) + .failed + .get should have message expected } } @@ -274,8 +307,11 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit ("a single directory matched to a similar directory", singleDir, WomUnlistedDirectory(singleDir.value), true), ("a single file matched to a similar file", singleFile, WomSingleFile(singleFile.value), true), ("a glob file matched to a similar glob", globFile, WomGlobFile(globFile.value), true), - ("a single directory matched to a dissimilar directory", singleDir, WomUnlistedDirectory("should/not/match"), - false), + ("a single directory matched to a dissimilar directory", + singleDir, + WomUnlistedDirectory("should/not/match"), + false + ), ("a single file matched to a dissimilar file", singleFile, WomSingleFile("should/not/match"), false), ("a glob file matched to a dissimilar glob", globFile, WomGlobFile("should/not/match"), false) ) @@ -311,8 +347,10 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit ("a single directory matched to a similar glob", singleDir, WomGlobFile(globFile.value)), ("a single file matched to a similar glob", singleFile, WomGlobFile(globFile.value)), ("a dir with listed dirs matched to a similar directory", dirWithListedDirs, WomUnlistedDirectory(singleDir.value)), - ("a dir with listed files matched to a similar directory", dirWithListedFiles, - WomUnlistedDirectory(singleDir.value)), + ("a dir with listed files matched to a similar directory", + dirWithListedFiles, + WomUnlistedDirectory(singleDir.value) + ), ("a file with secondary files matched to a similar file", fileWithSecondaryFiles, WomSingleFile(singleFile.value)), ("a file with secondary dirs matched to a similar file", fileWithSecondaryDirs, WomSingleFile(singleFile.value)), ("a nested file/dir matched to a similar file", nestedFilesAndDirs, WomSingleFile(singleFile.value)) @@ -333,17 +371,21 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit it should s"fail comparing (optionally) $description" in { a[WomExpressionException] should be thrownBy { - WomOptionalValue(womFileA.womType, Option(womFileA)).equals( - WomOptionalValue(womFileB.womType, Option(womFileB)) - ).get + WomOptionalValue(womFileA.womType, Option(womFileA)) + .equals( + WomOptionalValue(womFileB.womType, Option(womFileB)) + ) + .get } } it should s"fail symmetrically comparing (optionally) $description" in { a[WomExpressionException] should be thrownBy { - WomOptionalValue(womFileB.womType, Option(womFileB)).equals( - WomOptionalValue(womFileA.womType, Option(womFileA)) - ).get + WomOptionalValue(womFileB.womType, Option(womFileB)) + .equals( + WomOptionalValue(womFileA.womType, Option(womFileA)) + ) + .get } } } @@ -412,17 +454,21 @@ class WomFileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit it should s"expect $expected comparing $description as an optional string" in { a[WomExpressionException] should be thrownBy { - WomOptionalValue(womFile.womType, Option(womFile)).equals( - WomOptionalValue(WomStringType, Option(WomString(string))) - ).get + WomOptionalValue(womFile.womType, Option(womFile)) + .equals( + WomOptionalValue(WomStringType, Option(WomString(string))) + ) + .get } } it should s"expect $expected symmetrically comparing $description as an optional string" in { a[WomExpressionException] should be thrownBy { - WomOptionalValue(WomStringType, Option(WomString(string))).equals( - WomOptionalValue(womFile.womType, Option(womFile)) - ).get + WomOptionalValue(WomStringType, Option(WomString(string))) + .equals( + WomOptionalValue(womFile.womType, Option(womFile)) + ) + .get } } } diff --git a/wom/src/test/scala/wom/values/WomObjectSpec.scala b/wom/src/test/scala/wom/values/WomObjectSpec.scala index bed14c9ae46..aa74c1f4c0e 100644 --- a/wom/src/test/scala/wom/values/WomObjectSpec.scala +++ b/wom/src/test/scala/wom/values/WomObjectSpec.scala @@ -29,13 +29,13 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w val array: Array[WomObject] = parsed.success.value array should have size 1 - //Attributes + // Attributes array.head.values should contain key "one" array.head.values should contain key "two" array.head.values should contain key "three" array.head.values should contain key "four" - //Values + // Values array.head.values.get("one") shouldBe Some(WomString("one")) array.head.values.get("two") shouldBe Some(WomString("four")) array.head.values.get("three") shouldBe Some(WomString("nine")) @@ -73,7 +73,7 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w val array: Array[WomObject] = parsed.success.value array should have size 2 - //Attributes + // Attributes array foreach { a => a.values should contain key "one" a.values should contain key "two" @@ -81,7 +81,7 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w a.values should contain key "four" } - //Values + // Values array.head.values.get("one") shouldBe Some(WomString("one")) array.head.values.get("two") shouldBe Some(WomString("four")) array.head.values.get("three") shouldBe Some(WomString("nine")) diff --git a/wom/src/test/scala/wom/values/WomOptionalValueSpec.scala b/wom/src/test/scala/wom/values/WomOptionalValueSpec.scala index 22dc5afc902..174fc5331ac 100644 --- a/wom/src/test/scala/wom/values/WomOptionalValueSpec.scala +++ b/wom/src/test/scala/wom/values/WomOptionalValueSpec.scala @@ -5,7 +5,6 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import wom.types.{WomIntegerType, WomOptionalType} - class WomOptionalValueSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "WomOptionalValue flattening" diff --git a/womtool/src/main/scala/womtool/WomtoolMain.scala b/womtool/src/main/scala/womtool/WomtoolMain.scala index e1fba99d978..6fc1ccb43db 100644 --- a/womtool/src/main/scala/womtool/WomtoolMain.scala +++ b/womtool/src/main/scala/womtool/WomtoolMain.scala @@ -62,14 +62,12 @@ object WomtoolMain extends App with StrictLogging { case _ => BadUsageTermination(WomtoolCommandLineParser.instance.usage) } - def highlight(workflowSourcePath: String, mode: HighlightMode): Termination = { - def highlight(highlighter: SyntaxHighlighter) = { + def highlight(highlighter: SyntaxHighlighter) = loadWdl(workflowSourcePath) { namespace => SuccessfulTermination(new SyntaxFormatter(highlighter).format(namespace)) } - } mode match { case HtmlHighlighting => highlight(HtmlSyntaxHighlighter) @@ -78,9 +76,8 @@ object WomtoolMain extends App with StrictLogging { } } - def parse(workflowSourcePath: String): Termination = { + def parse(workflowSourcePath: String): Termination = SuccessfulTermination(AstTools.getAst(Paths.get(workflowSourcePath)).toPrettyString) - } def upgrade(workflowSourcePath: String): Termination = { import wdl.draft2.model.Import @@ -91,20 +88,21 @@ object WomtoolMain extends App with StrictLogging { WdlNamespace.loadUsingPath( Paths.get(workflowSourcePath), None, - Option(List( - WdlNamespace.directoryResolver(File(workflowSourcePath).parent), - WdlNamespace.fileResolver, - WdlDraft2LanguageFactory.httpResolver - )) + Option( + List( + WdlNamespace.directoryResolver(File(workflowSourcePath).parent), + WdlNamespace.fileResolver, + WdlDraft2LanguageFactory.httpResolver + ) + ) ) - def upgradeImport(draft2Import: Import): ImportElement = { + def upgradeImport(draft2Import: Import): ImportElement = if (draft2Import.namespaceName.nonEmpty) // draft-2 does not have structs, so the source WDL will not have any for us to rename ImportElement(draft2Import.uri, Option(draft2Import.namespaceName), Map()) else ImportElement(draft2Import.uri, None, Map()) - } val maybeWdl: Try[Path] = DefaultPathBuilder.build(workflowSourcePath) @@ -116,13 +114,16 @@ object WomtoolMain extends App with StrictLogging { val maybeFileElement = womBundleToFileElement.run(womBundle) maybeFileElement match { case Right(fileElement) => - SuccessfulTermination( - fileElement.copy(imports = wdlNamespace.imports.map(upgradeImport)).toWdlV1) + SuccessfulTermination(fileElement.copy(imports = wdlNamespace.imports.map(upgradeImport)).toWdlV1) case Left(errors) => - UnsuccessfulTermination(s"WDL parsing succeeded but could not create WOM: ${errors.toList.mkString("[", ",", "]")}") + UnsuccessfulTermination( + s"WDL parsing succeeded but could not create WOM: ${errors.toList.mkString("[", ",", "]")}" + ) } case Left(errors) => - UnsuccessfulTermination(s"WDL parsing succeeded but could not create WOM: ${errors.toList.mkString("[", ",", "]")}") + UnsuccessfulTermination( + s"WDL parsing succeeded but could not create WOM: ${errors.toList.mkString("[", ",", "]")}" + ) } case (Failure(throwable), _) => UnsuccessfulTermination(s"Failed to load WDL source: ${throwable.getMessage}") @@ -131,31 +132,41 @@ object WomtoolMain extends App with StrictLogging { } } - def graph(workflowSourcePath: Path): Termination = { - WomGraphMaker.getBundle(workflowSourcePath).flatMap(_.toExecutableCallable).contextualizeErrors("create wom bundle") match { - case Right(executable) => SuccessfulTermination (new GraphPrint(executable).dotString) - case Left(errors) => UnsuccessfulTermination(errors.toList.mkString(System.lineSeparator, System.lineSeparator, System.lineSeparator)) + def graph(workflowSourcePath: Path): Termination = + WomGraphMaker + .getBundle(workflowSourcePath) + .flatMap(_.toExecutableCallable) + .contextualizeErrors("create wom bundle") match { + case Right(executable) => SuccessfulTermination(new GraphPrint(executable).dotString) + case Left(errors) => + UnsuccessfulTermination( + errors.toList.mkString(System.lineSeparator, System.lineSeparator, System.lineSeparator) + ) } - } - def womGraph(workflowSourcePath: Path): Termination = { - WomGraphMaker.fromFiles(mainFile = workflowSourcePath, inputs = None).contextualizeErrors("create wom Graph") match { - case Right(graphWithImports) => SuccessfulTermination (new WomGraph(graphName = "workflow", graphWithImports.graph).digraphDot) - case Left(errors) => UnsuccessfulTermination(errors.toList.mkString(System.lineSeparator, System.lineSeparator, System.lineSeparator)) + def womGraph(workflowSourcePath: Path): Termination = + WomGraphMaker + .fromFiles(mainFile = workflowSourcePath, inputs = None) + .contextualizeErrors("create wom Graph") match { + case Right(graphWithImports) => + SuccessfulTermination(new WomGraph(graphName = "workflow", graphWithImports.graph).digraphDot) + case Left(errors) => + UnsuccessfulTermination( + errors.toList.mkString(System.lineSeparator, System.lineSeparator, System.lineSeparator) + ) } - } - private[this] def loadWdl(path: String)(f: WdlNamespace => Termination): Termination = { + private[this] def loadWdl(path: String)(f: WdlNamespace => Termination): Termination = WdlNamespace.loadUsingPath(Paths.get(path), None, None) match { case Success(namespace) => f(namespace) case Failure(r: RuntimeException) => throw new RuntimeException("Unexpected failure mode", r) case Failure(t) => UnsuccessfulTermination(t.getMessage) } - } - def runWomtool(cmdLineArgs: Seq[String]): Termination = { - val parsedArgs = WomtoolCommandLineParser.instance.parse(cmdLineArgs, PartialWomtoolCommandLineArguments()) flatMap WomtoolCommandLineParser.validateCommandLine + val parsedArgs = WomtoolCommandLineParser.instance.parse(cmdLineArgs, + PartialWomtoolCommandLineArguments() + ) flatMap WomtoolCommandLineParser.validateCommandLine parsedArgs match { case Some(pa) => dispatchCommand(pa) diff --git a/womtool/src/main/scala/womtool/cmdline/PartialWomtoolCommandLineArguments.scala b/womtool/src/main/scala/womtool/cmdline/PartialWomtoolCommandLineArguments.scala index 5e516f7c3a5..af33485e173 100644 --- a/womtool/src/main/scala/womtool/cmdline/PartialWomtoolCommandLineArguments.scala +++ b/womtool/src/main/scala/womtool/cmdline/PartialWomtoolCommandLineArguments.scala @@ -8,15 +8,14 @@ final case class PartialWomtoolCommandLineArguments(command: Option[WomtoolComma displayOptionalInputs: Option[Boolean] = None, highlightMode: Option[HighlightMode] = None, listDependencies: Option[Boolean] = None - ) +) sealed trait ValidatedWomtoolCommandLine final case class ParseCommandLine(workflowSource: Path) extends ValidatedWomtoolCommandLine -final case class ValidateCommandLine(workflowSource: Path, - inputs: Option[Path], - listDependencies: Boolean) extends ValidatedWomtoolCommandLine -final case class HighlightCommandLine(workflowSource: Path, - highlightMode: HighlightMode) extends ValidatedWomtoolCommandLine +final case class ValidateCommandLine(workflowSource: Path, inputs: Option[Path], listDependencies: Boolean) + extends ValidatedWomtoolCommandLine +final case class HighlightCommandLine(workflowSource: Path, highlightMode: HighlightMode) + extends ValidatedWomtoolCommandLine final case class InputsCommandLine(workflowSource: Path, showOptionals: Boolean) extends ValidatedWomtoolCommandLine final case class OutputsCommandLine(workflowSource: Path) extends ValidatedWomtoolCommandLine final case class WomtoolGraphCommandLine(workflowSource: Path) extends ValidatedWomtoolCommandLine @@ -43,4 +42,3 @@ object HighlightMode { case object ConsoleHighlighting extends HighlightMode final case class UnrecognizedHighlightingMode(string: String) extends HighlightMode } - diff --git a/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala b/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala index 9bdc9ef11a1..8b5801d8a52 100644 --- a/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala +++ b/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala @@ -13,14 +13,22 @@ object WomtoolCommandLineParser { lazy val instance: scopt.OptionParser[PartialWomtoolCommandLineArguments] = new WomtoolCommandLineParser() def validateCommandLine(args: PartialWomtoolCommandLineArguments): Option[ValidatedWomtoolCommandLine] = args match { - case PartialWomtoolCommandLineArguments(Some(Validate), Some(mainFile), inputs, None, None, listDependencies) => Option(ValidateCommandLine(mainFile, inputs, listDependencies.getOrElse(false))) - case PartialWomtoolCommandLineArguments(Some(Inputs), Some(mainFile), None, showOptionals, None, None) => Option(InputsCommandLine(mainFile, !showOptionals.contains(false))) - case PartialWomtoolCommandLineArguments(Some(Outputs), Some(mainFile), None, None, None, None) => Option(OutputsCommandLine(mainFile)) - case PartialWomtoolCommandLineArguments(Some(Parse), Some(mainFile), None, None, None, None) => Option(ParseCommandLine(mainFile)) - case PartialWomtoolCommandLineArguments(Some(Highlight), Some(mainFile), None, None, Some(mode), None) => Option(HighlightCommandLine(mainFile, mode)) - case PartialWomtoolCommandLineArguments(Some(Graph), Some(mainFile), None, None, None, None) => Option(WomtoolGraphCommandLine(mainFile)) - case PartialWomtoolCommandLineArguments(Some(WomGraph), Some(mainFile), None, None, None, None) => Option(WomtoolWomGraphCommandLine(mainFile)) - case PartialWomtoolCommandLineArguments(Some(Upgrade), Some(mainFile), None, None, None, None) => Option(WomtoolWdlUpgradeCommandLine(mainFile)) + case PartialWomtoolCommandLineArguments(Some(Validate), Some(mainFile), inputs, None, None, listDependencies) => + Option(ValidateCommandLine(mainFile, inputs, listDependencies.getOrElse(false))) + case PartialWomtoolCommandLineArguments(Some(Inputs), Some(mainFile), None, showOptionals, None, None) => + Option(InputsCommandLine(mainFile, !showOptionals.contains(false))) + case PartialWomtoolCommandLineArguments(Some(Outputs), Some(mainFile), None, None, None, None) => + Option(OutputsCommandLine(mainFile)) + case PartialWomtoolCommandLineArguments(Some(Parse), Some(mainFile), None, None, None, None) => + Option(ParseCommandLine(mainFile)) + case PartialWomtoolCommandLineArguments(Some(Highlight), Some(mainFile), None, None, Some(mode), None) => + Option(HighlightCommandLine(mainFile, mode)) + case PartialWomtoolCommandLineArguments(Some(Graph), Some(mainFile), None, None, None, None) => + Option(WomtoolGraphCommandLine(mainFile)) + case PartialWomtoolCommandLineArguments(Some(WomGraph), Some(mainFile), None, None, None, None) => + Option(WomtoolWomGraphCommandLine(mainFile)) + case PartialWomtoolCommandLineArguments(Some(Upgrade), Some(mainFile), None, None, None, None) => + Option(WomtoolWdlUpgradeCommandLine(mainFile)) case _ => None } } @@ -40,14 +48,18 @@ class WomtoolCommandLineParser extends scopt.OptionParser[PartialWomtoolCommandL opt[String]('h', "highlight-mode") .text("Highlighting mode, one of 'html', 'console' (used only with 'highlight' command)") .optional() - .action((s, c) => s match { - case "html" => c.copy(highlightMode = Option(HtmlHighlighting)) - case "console" => c.copy(highlightMode = Option(ConsoleHighlighting)) - case other => c.copy(highlightMode = Option(UnrecognizedHighlightingMode(other))) - }) - - opt[Boolean]('o', name="optional-inputs") - .text("If set, optional inputs are also included in the inputs set. Default is 'true' (used only with the inputs command)") + .action((s, c) => + s match { + case "html" => c.copy(highlightMode = Option(HtmlHighlighting)) + case "console" => c.copy(highlightMode = Option(ConsoleHighlighting)) + case other => c.copy(highlightMode = Option(UnrecognizedHighlightingMode(other))) + } + ) + + opt[Boolean]('o', name = "optional-inputs") + .text( + "If set, optional inputs are also included in the inputs set. Default is 'true' (used only with the inputs command)" + ) .optional() .action((b, c) => c.copy(displayOptionalInputs = Some(b))) @@ -67,25 +79,29 @@ class WomtoolCommandLineParser extends scopt.OptionParser[PartialWomtoolCommandL cmd("validate") .action((_, c) => c.copy(command = Option(Validate))) - .text("Validate a workflow source file. If inputs are provided then 'validate' also checks that the inputs file is a valid set of inputs for the workflow." + System.lineSeparator) + .text( + "Validate a workflow source file. If inputs are provided then 'validate' also checks that the inputs file is a valid set of inputs for the workflow." + System.lineSeparator + ) cmd("inputs") - .action((_, c) => - c.copy(command = Option(Inputs))) + .action((_, c) => c.copy(command = Option(Inputs))) .text("Generate and output a new inputs JSON for this workflow." + System.lineSeparator) cmd("outputs") - .action((_, c) => - c.copy(command = Option(Outputs))) + .action((_, c) => c.copy(command = Option(Outputs))) .text("Generate and output a list of output types in JSON for this workflow." + System.lineSeparator) cmd("parse") .action((_, c) => c.copy(command = Option(Parse))) - .text("(Deprecated; WDL draft 2 only) Print out the Hermes parser's abstract syntax tree for the source file." + System.lineSeparator) + .text( + "(Deprecated; WDL draft 2 only) Print out the Hermes parser's abstract syntax tree for the source file." + System.lineSeparator + ) cmd("highlight") .action((_, c) => c.copy(command = Option(Highlight))) - .text("(Deprecated; WDL draft 2 only) Print out the Hermes parser's abstract syntax tree for the source file. Requires at least one of 'html' or 'console'" + System.lineSeparator) + .text( + "(Deprecated; WDL draft 2 only) Print out the Hermes parser's abstract syntax tree for the source file. Requires at least one of 'html' or 'console'" + System.lineSeparator + ) cmd("graph") .action((_, c) => c.copy(command = Option(Graph))) @@ -97,6 +113,8 @@ class WomtoolCommandLineParser extends scopt.OptionParser[PartialWomtoolCommandL cmd("womgraph") .action((_, c) => c.copy(command = Option(WomGraph))) - .text("(Advanced) Generate and output a graph visualization of Cromwell's internal Workflow Object Model structure for this workflow in .dot format" + System.lineSeparator) + .text( + "(Advanced) Generate and output a graph visualization of Cromwell's internal Workflow Object Model structure for this workflow in .dot format" + System.lineSeparator + ) } diff --git a/womtool/src/main/scala/womtool/graph/WomGraph.scala b/womtool/src/main/scala/womtool/graph/WomGraph.scala index b1957a1545c..61c2ac0399b 100644 --- a/womtool/src/main/scala/womtool/graph/WomGraph.scala +++ b/womtool/src/main/scala/womtool/graph/WomGraph.scala @@ -29,13 +29,13 @@ import scala.jdk.CollectionConverters._ class WomGraph(graphName: String, graph: Graph) { def indent(s: String) = s.linesIterator.map(x => s" $x").mkString(System.lineSeparator) - def combine(ss: Iterable[String]) = ss.mkString(start="", sep=System.lineSeparator, end=System.lineSeparator) + def combine(ss: Iterable[String]) = ss.mkString(start = "", sep = System.lineSeparator, end = System.lineSeparator) def indentAndCombine(ss: Iterable[String]) = combine(ss.map(indent)) implicit val monoid = cats.derived.MkMonoid[NodesAndLinks] val digraphDot: String = { - val workflowDigraph = WorkflowDigraph (dotSafe(graphName), listAllGraphNodes(graph)) + val workflowDigraph = WorkflowDigraph(dotSafe(graphName), listAllGraphNodes(graph)) s"""|digraph ${workflowDigraph.workflowName} |{ @@ -49,12 +49,12 @@ class WomGraph(graphName: String, graph: Graph) { private lazy val clusterCount: AtomicInteger = new AtomicInteger(0) - private[graph] def listAllGraphNodes(graph: Graph): NodesAndLinks = { + private[graph] def listAllGraphNodes(graph: Graph): NodesAndLinks = graph.nodes.toList foldMap nodesAndLinks - } private def upstreamLinks(graphNode: GraphNode): Set[String] = graphNode match { - case ogin: OuterGraphInputNode => Set(s"${ogin.linkToOuterGraph.graphId} -> ${ogin.singleOutputPort.graphId} [style=dashed arrowhead=none]") + case ogin: OuterGraphInputNode => + Set(s"${ogin.linkToOuterGraph.graphId} -> ${ogin.singleOutputPort.graphId} [style=dashed arrowhead=none]") case _ => for { inputPort <- graphNode.inputPorts @@ -70,13 +70,13 @@ class WomGraph(graphName: String, graph: Graph) { // Don't include the scatter expression input port here since they're added later in `internalScatterNodesAndLinks` // Round up the gathered output ports so it's obvious they're being gathered. s""" - |${combine((s.inputPorts -- s.scatterCollectionExpressionNodes.flatMap(_.inputPorts)) map portLine)} - |subgraph $nextCluster { - | style=${s.graphStyle}; - | fillcolor=${s.graphFillColor} - | "${UUID.randomUUID}" [shape=plaintext label="gather ports"] - |${indentAndCombine(s.outputPorts map portLine)} - |} + |${combine((s.inputPorts -- s.scatterCollectionExpressionNodes.flatMap(_.inputPorts)) map portLine)} + |subgraph $nextCluster { + | style=${s.graphStyle}; + | fillcolor=${s.graphFillColor} + | "${UUID.randomUUID}" [shape=plaintext label="gather ports"] + |${indentAndCombine(s.outputPorts map portLine)} + |} """.stripMargin case _ => combine((graphNode.outputPorts ++ graphNode.inputPorts) map portLine) } @@ -121,7 +121,9 @@ class WomGraph(graphName: String, graph: Graph) { |""".stripMargin } - val outputLinks = scatter.outputMapping map { outputPort => s"${outputPort.outputToGather.singleInputPort.graphId} -> ${outputPort.graphId} [style=dashed arrowhead=none]" } + val outputLinks = scatter.outputMapping map { outputPort => + s"${outputPort.outputToGather.singleInputPort.graphId} -> ${outputPort.graphId} [style=dashed arrowhead=none]" + } innerGraph.withLinks(outputLinks) } @@ -136,12 +138,13 @@ class WomGraph(graphName: String, graph: Graph) { |""".stripMargin } - val outputLinks = conditional.conditionalOutputPorts map { outputPort => s"${outputPort.outputToExpose.singleInputPort.graphId} -> ${outputPort.graphId} [style=dashed arrowhead=none]" } + val outputLinks = conditional.conditionalOutputPorts map { outputPort => + s"${outputPort.outputToExpose.singleInputPort.graphId} -> ${outputPort.graphId} [style=dashed arrowhead=none]" + } innerGraph.withLinks(outputLinks) } - def internalSubworkflowNodesAndLinks(subworkflow: WorkflowCallNode): NodesAndLinks = { - + def internalSubworkflowNodesAndLinks(subworkflow: WorkflowCallNode): NodesAndLinks = listAllGraphNodes(subworkflow.callable.innerGraph) wrapNodes { n => s""" |subgraph $nextCluster { @@ -151,7 +154,6 @@ class WomGraph(graphName: String, graph: Graph) { |} |""".stripMargin } - } private def nextCluster: String = "cluster_" + clusterCount.getAndIncrement() } @@ -173,7 +175,8 @@ object WomGraph { new WomGraph("workflow", graph) } - private def readFile(filePath: String): String = Files.readAllLines(Paths.get(filePath)).asScala.mkString(System.lineSeparator()) + private def readFile(filePath: String): String = + Files.readAllLines(Paths.get(filePath)).asScala.mkString(System.lineSeparator()) private def womExecutableFromWdl(filePath: String): Graph = { val workflowFileString = readFile(filePath) @@ -183,18 +186,34 @@ object WomGraph { firstLine.startsWith("version 1.0") } val womBundle: Checked[WomBundle] = if (version1) { - val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map(FileElementToWomBundleInputs(_, "{}", convertNestedScatterToSubworkflow = true, List.empty, List.empty, workflowDefinitionElementToWomWorkflowDefinition, taskDefinitionElementToWomTaskDefinition)) andThen fileElementToWomBundle + val converter: CheckedAtoB[File, WomBundle] = fileToAst andThen wrapAst andThen astToFileElement.map( + FileElementToWomBundleInputs( + _, + "{}", + convertNestedScatterToSubworkflow = true, + List.empty, + List.empty, + workflowDefinitionElementToWomWorkflowDefinition, + taskDefinitionElementToWomTaskDefinition + ) + ) andThen fileElementToWomBundle converter.run(File(filePath)) } else { - WdlNamespaceWithWorkflow.load(readFile(filePath), Seq(WdlNamespace.fileResolver _)).toChecked.flatMap(_.toWomBundle) + WdlNamespaceWithWorkflow + .load(readFile(filePath), Seq(WdlNamespace.fileResolver _)) + .toChecked + .flatMap(_.toWomBundle) } womBundle match { - case Right(wom) if (wom.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).size == 1 => (wom.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).head.graph + case Right(wom) + if (wom.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).size == 1 => + (wom.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).head.graph case Right(_) => throw new Exception("Can only 'wom graph' a WDL with exactly one workflow") case Left(errors) => - val formattedErrors = errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) + val formattedErrors = + errors.toList.mkString(System.lineSeparator(), System.lineSeparator(), System.lineSeparator()) throw new Exception(s"Failed to create WOM: $formattedErrors") } } @@ -209,6 +228,7 @@ object WomGraph { case _: WomOptionalType => JsNull case WomMapType(_, valueType) => JsObject(Map("0" -> fakeInput(valueType))) case WomArrayType(innerType) => JsArray(Vector(fakeInput(innerType))) - case WomPairType(leftType, rightType) => JsObject(Map("left" -> fakeInput(leftType), "right" -> fakeInput(rightType))) + case WomPairType(leftType, rightType) => + JsObject(Map("left" -> fakeInput(leftType), "right" -> fakeInput(rightType))) } } diff --git a/womtool/src/main/scala/womtool/graph/package.scala b/womtool/src/main/scala/womtool/graph/package.scala index e2ae058369c..c20a975e7f3 100644 --- a/womtool/src/main/scala/womtool/graph/package.scala +++ b/womtool/src/main/scala/womtool/graph/package.scala @@ -8,7 +8,7 @@ package object graph { private[graph] def dotSafe(s: String) = s""""${s.replace("\"", "\\\"")}"""" - private[graph] implicit class GraphNodeGraphics(val graphNode: GraphNode) extends AnyVal { + implicit private[graph] class GraphNodeGraphics(val graphNode: GraphNode) extends AnyVal { def graphFillColor = graphNode match { case _: ConditionalNode | _: ScatterNode | _: WorkflowCallNode => "lightgray" case _: ExternalGraphInputNode => "lightskyblue1" @@ -41,7 +41,7 @@ package object graph { def graphId: String = dotSafe("NODE" + graphObjectUniqueId(graphNode)) } - private[graph] implicit class GraphNodePortGraphics(val graphNodePort: GraphNodePort) extends AnyVal { + implicit private[graph] class GraphNodePortGraphics(val graphNodePort: GraphNodePort) extends AnyVal { def graphShape = graphNodePort match { case _: InputPort => "oval" case _: OutputPort => "hexagon" diff --git a/womtool/src/main/scala/womtool/input/WomGraphMaker.scala b/womtool/src/main/scala/womtool/input/WomGraphMaker.scala index bd3ef65e09b..65e8d5a522a 100644 --- a/womtool/src/main/scala/womtool/input/WomGraphMaker.scala +++ b/womtool/src/main/scala/womtool/input/WomGraphMaker.scala @@ -32,7 +32,8 @@ object WomGraphMaker { List( new WdlDraft3LanguageFactory(ConfigFactory.empty()), new WdlBiscayneLanguageFactory(ConfigFactory.empty()), - new WdlCascadesLanguageFactory(ConfigFactory.empty())) + new WdlCascadesLanguageFactory(ConfigFactory.empty()) + ) .find(_.looksParsable(mainFileContents)) .getOrElse(new WdlDraft2LanguageFactory(ConfigFactory.empty())) @@ -42,7 +43,7 @@ object WomGraphMaker { } } - def fromFiles(mainFile: Path, inputs: Option[Path]): Checked[WomGraphWithResolvedImports] = { + def fromFiles(mainFile: Path, inputs: Option[Path]): Checked[WomGraphWithResolvedImports] = getBundleAndFactory(mainFile) flatMap { case (womBundle, languageFactory) => inputs match { case None => @@ -56,11 +57,11 @@ object WomGraphMaker { } yield WomGraphWithResolvedImports(validatedWomNamespace.executable.graph, womBundle.resolvedImportRecords) } } - } - private def readFile(filePath: String): Checked[String] = Try(Files.readAllLines(Paths.get(filePath)).asScala.mkString(System.lineSeparator())).toChecked + private def readFile(filePath: String): Checked[String] = Try( + Files.readAllLines(Paths.get(filePath)).asScala.mkString(System.lineSeparator()) + ).toChecked } - case class WomGraphWithResolvedImports(graph: Graph, resolvedImportRecords: Set[ResolvedImportRecord]) diff --git a/womtool/src/main/scala/womtool/inputs/Inputs.scala b/womtool/src/main/scala/womtool/inputs/Inputs.scala index a983cbd831b..36044358a1a 100644 --- a/womtool/src/main/scala/womtool/inputs/Inputs.scala +++ b/womtool/src/main/scala/womtool/inputs/Inputs.scala @@ -3,7 +3,12 @@ package womtool.inputs import cromwell.core.path.Path import womtool.WomtoolMain.{SuccessfulTermination, Termination, UnsuccessfulTermination} import womtool.input.WomGraphMaker -import wom.graph.{ExternalGraphInputNode, OptionalGraphInputNode, OptionalGraphInputNodeWithDefault, RequiredGraphInputNode} +import wom.graph.{ + ExternalGraphInputNode, + OptionalGraphInputNode, + OptionalGraphInputNodeWithDefault, + RequiredGraphInputNode +} import spray.json._ import spray.json.DefaultJsonProtocol._ import wom.expression.WomExpression @@ -12,8 +17,7 @@ import wom.types.{WomCompositeType, WomOptionalType, WomType} import scala.util.{Failure, Success, Try} object Inputs { - def inputsJson(main: Path, showOptionals: Boolean): Termination = { - + def inputsJson(main: Path, showOptionals: Boolean): Termination = WomGraphMaker.fromFiles(main, inputs = None) match { case Right(graphWithImports) => Try(graphWithImports.graph.externalInputNodes.toJson(inputNodeWriter(showOptionals)).prettyPrint) match { @@ -22,23 +26,25 @@ object Inputs { } case Left(errors) => UnsuccessfulTermination(errors.toList.mkString(System.lineSeparator)) } - } private def inputNodeWriter(showOptionals: Boolean): JsonWriter[Set[ExternalGraphInputNode]] = set => { val valueMap: Seq[(String, JsValue)] = set.toList collect { case RequiredGraphInputNode(_, womType, nameInInputSet, _) => nameInInputSet -> womTypeToJson(womType, None) - case OptionalGraphInputNode(_, womOptionalType, nameInInputSet, _) if showOptionals => nameInInputSet -> womTypeToJson(womOptionalType, None) - case OptionalGraphInputNodeWithDefault(_, womType, default, nameInInputSet, _) if showOptionals => nameInInputSet -> womTypeToJson(womType, Option(default)) + case OptionalGraphInputNode(_, womOptionalType, nameInInputSet, _) if showOptionals => + nameInInputSet -> womTypeToJson(womOptionalType, None) + case OptionalGraphInputNodeWithDefault(_, womType, default, nameInInputSet, _) if showOptionals => + nameInInputSet -> womTypeToJson(womType, Option(default)) } valueMap.toMap.toJson } private def womTypeToJson(womType: WomType, default: Option[WomExpression]): JsValue = (womType, default) match { - case (WomCompositeType(typeMap, _), _) => JsObject( - typeMap.map { case (name, wt) => name -> womTypeToJson(wt, None) } - ) + case (WomCompositeType(typeMap, _), _) => + JsObject( + typeMap.map { case (name, wt) => name -> womTypeToJson(wt, None) } + ) case (_, Some(d)) => JsString(s"${womType.stableName} (optional, default = ${d.sourceString})") case (_: WomOptionalType, _) => JsString(s"${womType.stableName} (optional)") case (_, _) => JsString(s"${womType.stableName}") diff --git a/womtool/src/main/scala/womtool/outputs/Outputs.scala b/womtool/src/main/scala/womtool/outputs/Outputs.scala index f08ebbc5d11..34ef4aa9e9d 100644 --- a/womtool/src/main/scala/womtool/outputs/Outputs.scala +++ b/womtool/src/main/scala/womtool/outputs/Outputs.scala @@ -11,8 +11,7 @@ import womtool.input.WomGraphMaker import scala.util.{Failure, Success, Try} object Outputs { - def outputsJson(main: Path): Termination = { - + def outputsJson(main: Path): Termination = WomGraphMaker.fromFiles(main, inputs = None) match { case Right(graphWithImports) => Try(graphWithImports.graph.outputNodes.asJson.printWith(sprayLikePrettyPrinter)) match { @@ -21,20 +20,22 @@ object Outputs { } case Left(errors) => UnsuccessfulTermination(errors.toList.mkString(System.lineSeparator)) } - } - private implicit val e: Encoder[Set[GraphOutputNode]] = (set: Set[GraphOutputNode]) => { - val valueMap = set.toList.map { - node: GraphOutputNode => node.fullyQualifiedName -> womTypeToJson(node.womType) + implicit private val e: Encoder[Set[GraphOutputNode]] = (set: Set[GraphOutputNode]) => { + val valueMap = set.toList.map { node: GraphOutputNode => + node.fullyQualifiedName -> womTypeToJson(node.womType) } valueMap.toMap.asJson } private def womTypeToJson(womType: WomType): Json = womType match { - case WomCompositeType(typeMap, _) => JsonObject.fromMap( - typeMap.map { case (name, wt) => name -> womTypeToJson(wt) } - ).asJson + case WomCompositeType(typeMap, _) => + JsonObject + .fromMap( + typeMap.map { case (name, wt) => name -> womTypeToJson(wt) } + ) + .asJson case _ => womType.stableName.asJson } diff --git a/womtool/src/main/scala/womtool/validate/Validate.scala b/womtool/src/main/scala/womtool/validate/Validate.scala index 79861cae527..b6dd288f396 100644 --- a/womtool/src/main/scala/womtool/validate/Validate.scala +++ b/womtool/src/main/scala/womtool/validate/Validate.scala @@ -11,7 +11,8 @@ object Validate { def workflowDependenciesMsg(workflowResolvedImports: Set[ResolvedImportRecord]) = { val msgPrefix = "\nList of Workflow dependencies is:\n" - val dependenciesList = if (workflowResolvedImports.nonEmpty) workflowResolvedImports.map(_.importPath).mkString("\n") else "None" + val dependenciesList = + if (workflowResolvedImports.nonEmpty) workflowResolvedImports.map(_.importPath).mkString("\n") else "None" msgPrefix + dependenciesList } diff --git a/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala b/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala index 6d7e499e2cd..07382a3ceef 100644 --- a/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala +++ b/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala @@ -30,19 +30,15 @@ object WomToWdlom { s"Value \'$text\' has no representation in the destination format (WDL)".invalidNelCheck def graphOutputNodeToWorkflowGraphElement: CheckedAtoB[GraphOutputNode, WorkflowGraphElement] = - CheckedAtoB.fromCheck { a: GraphOutputNode => a match { - case a: ExpressionBasedGraphOutputNode => - for { - typeElement <- womTypeToTypeElement(a.womType) - expression <- womExpressionToExpressionElement(a.womExpression) - } yield { - OutputDeclarationElement( - typeElement, - a.identifier.localName.value, - expression) - } - case _ => - invalidFromString(a.toString) + CheckedAtoB.fromCheck { a: GraphOutputNode => + a match { + case a: ExpressionBasedGraphOutputNode => + for { + typeElement <- womTypeToTypeElement(a.womType) + expression <- womExpressionToExpressionElement(a.womExpression) + } yield OutputDeclarationElement(typeElement, a.identifier.localName.value, expression) + case _ => + invalidFromString(a.toString) } } @@ -52,16 +48,20 @@ object WomToWdlom { val workflows: Iterable[WorkflowDefinition] = a.allCallables.values.filterByType[WorkflowDefinition] for { - workflows <- workflows.map(workflowDefinitionToWorkflowDefinitionElement(_)).toList.sequence[Checked, WorkflowDefinitionElement] - tasks <- tasks.map(callableTaskDefinitionToTaskDefinitionElement(_)).toList.sequence[Checked, TaskDefinitionElement] - } yield { - FileElement( - Seq.empty, // Imports do not exist in WOM and cannot sensibly be added at this point - Seq.empty, // Structs do not exist in draft-2 - workflows, - tasks - ) - } + workflows <- workflows + .map(workflowDefinitionToWorkflowDefinitionElement(_)) + .toList + .sequence[Checked, WorkflowDefinitionElement] + tasks <- tasks + .map(callableTaskDefinitionToTaskDefinitionElement(_)) + .toList + .sequence[Checked, TaskDefinitionElement] + } yield FileElement( + Seq.empty, // Imports do not exist in WOM and cannot sensibly be added at this point + Seq.empty, // Structs do not exist in draft-2 + workflows, + tasks + ) } def mapToMetaSectionElement: CheckedAtoB[Map[String, MetaValueElement], Option[MetaSectionElement]] = @@ -72,7 +72,8 @@ object WomToWdlom { None.validNelCheck } - def mapToParameterMetaSectionElement: CheckedAtoB[Map[String, MetaValueElement], Option[ParameterMetaSectionElement]] = + def mapToParameterMetaSectionElement + : CheckedAtoB[Map[String, MetaValueElement], Option[ParameterMetaSectionElement]] = CheckedAtoB.fromCheck { a: Map[String, MetaValueElement] => if (a.nonEmpty) Some(ParameterMetaSectionElement(a)).validNelCheck @@ -80,24 +81,21 @@ object WomToWdlom { None.validNelCheck } - def runtimeAttributesToRuntimeAttributesSectionElement: CheckedAtoB[RuntimeAttributes, Option[RuntimeAttributesSectionElement]] = + def runtimeAttributesToRuntimeAttributesSectionElement + : CheckedAtoB[RuntimeAttributes, Option[RuntimeAttributesSectionElement]] = CheckedAtoB.fromCheck { a: RuntimeAttributes => - def tupleToKvPair(tuple: (String, WomExpression)): Checked[ExpressionElement.KvPair] = { + def tupleToKvPair(tuple: (String, WomExpression)): Checked[ExpressionElement.KvPair] = for { expressionElement <- womExpressionToExpressionElement(tuple._2) - } yield { - ExpressionElement.KvPair(tuple._1, expressionElement) - } - } + } yield ExpressionElement.KvPair(tuple._1, expressionElement) for { kvPairs <- (a.attributes map tupleToKvPair).toList.sequence[Checked, ExpressionElement.KvPair] - } yield { + } yield if (kvPairs.nonEmpty) Some(RuntimeAttributesSectionElement(kvPairs.toVector)) else None - } } def outputDefinitionToOutputDeclarationElement: CheckedAtoB[OutputDefinition, OutputDeclarationElement] = @@ -105,9 +103,7 @@ object WomToWdlom { for { typeElement <- womTypeToTypeElement(a.womType) expression <- womExpressionToExpressionElement(a.expression) - } yield { - OutputDeclarationElement(typeElement, a.name, expression) - } + } yield OutputDeclarationElement(typeElement, a.name, expression) } def inputDefinitionToInputDeclarationElement: CheckedAtoB[InputDefinition, InputDeclarationElement] = @@ -131,76 +127,83 @@ object WomToWdlom { } def callableTaskDefinitionToTaskDefinitionElement: CheckedAtoB[CallableTaskDefinition, TaskDefinitionElement] = - CheckedAtoB.fromCheck { - a: CallableTaskDefinition => - val inputs: Checked[List[InputDeclarationElement]] = - a.inputs.map(inputDefinitionToInputDeclarationElement(_)).sequence[Checked, InputDeclarationElement] - val outputs: Checked[List[OutputDeclarationElement]] = - a.outputs.map(outputDefinitionToOutputDeclarationElement(_)).sequence[Checked, OutputDeclarationElement] + CheckedAtoB.fromCheck { a: CallableTaskDefinition => + val inputs: Checked[List[InputDeclarationElement]] = + a.inputs.map(inputDefinitionToInputDeclarationElement(_)).sequence[Checked, InputDeclarationElement] + val outputs: Checked[List[OutputDeclarationElement]] = + a.outputs.map(outputDefinitionToOutputDeclarationElement(_)).sequence[Checked, OutputDeclarationElement] - for { - runtime <- runtimeAttributesToRuntimeAttributesSectionElement(a.runtimeAttributes) - meta <- mapToMetaSectionElement(a.meta) - parameterMeta <- mapToParameterMetaSectionElement(a.parameterMeta) - inputs <- inputs - outputs <- outputs - commands <- a.commandTemplateBuilder(Map()).toEither - } yield { - val commandLine = CommandSectionLine(commands map { - case s: StringCommandPart => - StringCommandPartElement(s.literal) - case p: ParameterCommandPart => - val attrs = PlaceholderAttributeSet( - defaultAttribute = p.attributes.get("default"), - trueAttribute = p.attributes.get("true"), - falseAttribute = p.attributes.get("false"), - sepAttribute = p.attributes.get("sep") - ) - - PlaceholderCommandPartElement(ExpressionLiteralElement(p.expression.toWomString), attrs) - }) - - TaskDefinitionElement( - a.name, - if (inputs.nonEmpty) Some(InputsSectionElement(inputs)) else None, - Seq.empty, // No such thing in draft-2 - if (outputs.nonEmpty) Some(OutputsSectionElement(outputs)) else None, - CommandSectionElement(Seq(commandLine)), - runtime, - meta, - parameterMeta, - a.sourceLocation - ) - } + for { + runtime <- runtimeAttributesToRuntimeAttributesSectionElement(a.runtimeAttributes) + meta <- mapToMetaSectionElement(a.meta) + parameterMeta <- mapToParameterMetaSectionElement(a.parameterMeta) + inputs <- inputs + outputs <- outputs + commands <- a.commandTemplateBuilder(Map()).toEither + } yield { + val commandLine = CommandSectionLine(commands map { + case s: StringCommandPart => + StringCommandPartElement(s.literal) + case p: ParameterCommandPart => + val attrs = PlaceholderAttributeSet( + defaultAttribute = p.attributes.get("default"), + trueAttribute = p.attributes.get("true"), + falseAttribute = p.attributes.get("false"), + sepAttribute = p.attributes.get("sep") + ) + + PlaceholderCommandPartElement(ExpressionLiteralElement(p.expression.toWomString), attrs) + }) + + TaskDefinitionElement( + a.name, + if (inputs.nonEmpty) Some(InputsSectionElement(inputs)) else None, + Seq.empty, // No such thing in draft-2 + if (outputs.nonEmpty) Some(OutputsSectionElement(outputs)) else None, + CommandSectionElement(Seq(commandLine)), + runtime, + meta, + parameterMeta, + a.sourceLocation + ) } + } def workflowDefinitionToWorkflowDefinitionElement: CheckedAtoB[WorkflowDefinition, WorkflowDefinitionElement] = CheckedAtoB.fromCheck { a: WorkflowDefinition => - // This is a bit odd, so let's explain. "Real" inputs/outputs that are specified by the WDL's author - // cannot have periods in them - period. So any input/output that has a period in it - // is an artifact of WOMification and should be dropped - val inputs = - a.inputs.filter(!_.localName.value.contains(".")).map(inputDefinitionToInputDeclarationElement(_)).sequence[Checked, InputDeclarationElement] - val outputs = - a.outputs.filter(!_.localName.value.contains(".")).map(outputDefinitionToOutputDeclarationElement(_)).sequence[Checked, OutputDeclarationElement] + // This is a bit odd, so let's explain. "Real" inputs/outputs that are specified by the WDL's author + // cannot have periods in them - period. So any input/output that has a period in it + // is an artifact of WOMification and should be dropped + val inputs = + a.inputs + .filter(!_.localName.value.contains(".")) + .map(inputDefinitionToInputDeclarationElement(_)) + .sequence[Checked, InputDeclarationElement] + val outputs = + a.outputs + .filter(!_.localName.value.contains(".")) + .map(outputDefinitionToOutputDeclarationElement(_)) + .sequence[Checked, OutputDeclarationElement] - for { - meta <- mapToMetaSectionElement(a.meta) - parameterMeta <- mapToParameterMetaSectionElement(a.parameterMeta) - inputs <- inputs - outputs <- outputs - nodes <- selectWdlomRepresentableNodes(a.graph.nodes).map(graphNodeToWorkflowGraphElement(_)).toList.sequence[Checked, WorkflowGraphElement] - } yield { - WorkflowDefinitionElement( - a.name, - if (inputs.nonEmpty) Some(InputsSectionElement(inputs)) else None, - nodes.toSet, - if (outputs.nonEmpty) Some(OutputsSectionElement(outputs)) else None, - meta, - parameterMeta, - a.sourceLocation) - } - } + for { + meta <- mapToMetaSectionElement(a.meta) + parameterMeta <- mapToParameterMetaSectionElement(a.parameterMeta) + inputs <- inputs + outputs <- outputs + nodes <- selectWdlomRepresentableNodes(a.graph.nodes) + .map(graphNodeToWorkflowGraphElement(_)) + .toList + .sequence[Checked, WorkflowGraphElement] + } yield WorkflowDefinitionElement( + a.name, + if (inputs.nonEmpty) Some(InputsSectionElement(inputs)) else None, + nodes.toSet, + if (outputs.nonEmpty) Some(OutputsSectionElement(outputs)) else None, + meta, + parameterMeta, + a.sourceLocation + ) + } def expressionNodeLikeToWorkflowGraphElement: CheckedAtoB[ExpressionNodeLike, WorkflowGraphElement] = CheckedAtoB.fromCheck { @@ -208,12 +211,10 @@ object WomToWdlom { for { typeElement <- womTypeToTypeElement(a.womType) expression <- expressionNodeToExpressionElement(a) - } yield { - IntermediateValueDeclarationElement( - typeElement = typeElement, - name = a.identifier.localName.value, - expression = expression) - } + } yield IntermediateValueDeclarationElement(typeElement = typeElement, + name = a.identifier.localName.value, + expression = expression + ) case a: ExpressionCallNode => invalidFromString(a.toString) } @@ -236,13 +237,13 @@ object WomToWdlom { case a: ConditionalNode => for { condition <- expressionNodeToExpressionElement(a.conditionExpression) - nodes <- selectWdlomRepresentableNodes(a.innerGraph.nodes).toList.map(graphNodeToWorkflowGraphElement(_)).sequence[Checked, WorkflowGraphElement] - } yield { - IfElement( - conditionExpression = condition, - graphElements = nodes - ) - } + nodes <- selectWdlomRepresentableNodes(a.innerGraph.nodes).toList + .map(graphNodeToWorkflowGraphElement(_)) + .sequence[Checked, WorkflowGraphElement] + } yield IfElement( + conditionExpression = condition, + graphElements = nodes + ) case a: ExpressionNodeLike => expressionNodeLikeToWorkflowGraphElement(a) case a: GraphNodeWithSingleOutputPort => @@ -256,37 +257,30 @@ object WomToWdlom { else for { expression <- expressionNodeToExpressionElement(a.scatterCollectionExpressionNodes.head) - graph <- selectWdlomRepresentableNodes(a.innerGraph.nodes).toList.map(graphNodeToWorkflowGraphElement(_)).sequence[Checked, WorkflowGraphElement] - } yield { - ScatterElement( - scatterName = a.identifier.localName.value, - scatterExpression = expression, - scatterVariableName = a.inputPorts.toList.head.name, - graphElements = graph, - sourceLocation = None - ) - } - } + graph <- selectWdlomRepresentableNodes(a.innerGraph.nodes).toList + .map(graphNodeToWorkflowGraphElement(_)) + .sequence[Checked, WorkflowGraphElement] + } yield ScatterElement( + scatterName = a.identifier.localName.value, + scatterExpression = expression, + scatterVariableName = a.inputPorts.toList.head.name, + graphElements = graph, + sourceLocation = None + ) + } - def graphNodeWithSingleOutputPortToWorkflowGraphElement: CheckedAtoB[GraphNodeWithSingleOutputPort, WorkflowGraphElement] = + def graphNodeWithSingleOutputPortToWorkflowGraphElement + : CheckedAtoB[GraphNodeWithSingleOutputPort, WorkflowGraphElement] = CheckedAtoB.fromCheck { case a: GraphInputNode => womTypeToTypeElement(a.womType) map { typeElement => - InputDeclarationElement( - typeElement, - a.identifier.localName.value, - None) + InputDeclarationElement(typeElement, a.identifier.localName.value, None) } case a: ExpressionNode => for { typeElement <- womTypeToTypeElement(a.womType) expression <- womExpressionToExpressionElement(a.womExpression) - } yield { - IntermediateValueDeclarationElement( - typeElement, - a.identifier.localName.value, - expression) - } + } yield IntermediateValueDeclarationElement(typeElement, a.identifier.localName.value, expression) } def womTypeToTypeElement: CheckedAtoB[WomType, TypeElement] = @@ -304,9 +298,7 @@ object WomToWdlom { for { keyType <- womTypeToTypeElement(a.keyType) valueType <- womTypeToTypeElement(a.valueType) - } yield { - MapTypeElement(keyType, valueType) - } + } yield MapTypeElement(keyType, valueType) case _: WomNothingType.type => invalidFromString("WDL does not have the Nothing type - is this WOM from CWL?") case _: WomObjectType.type => ObjectTypeElement.validNelCheck case a: WomOptionalType => womOptionalTypeToOptionalTypeElement(a) @@ -314,9 +306,7 @@ object WomToWdlom { for { leftType <- womTypeToTypeElement(a.leftType) rightType <- womTypeToTypeElement(a.rightType) - } yield { - PairTypeElement(leftType, rightType) - } + } yield PairTypeElement(leftType, rightType) case a: WomPrimitiveType => womPrimitiveTypeToPrimitiveTypeElement(a) } @@ -333,8 +323,8 @@ object WomToWdlom { } def expressionNodeToExpressionElement: CheckedAtoB[ExpressionNode, ExpressionElement] = - CheckedAtoB.fromCheck { - a: ExpressionNode => womExpressionToExpressionElement(a.womExpression) + CheckedAtoB.fromCheck { a: ExpressionNode => + womExpressionToExpressionElement(a.womExpression) } def womExpressionToExpressionElement: CheckedAtoB[WomExpression, ExpressionElement] = @@ -346,21 +336,22 @@ object WomToWdlom { def inputDefinitionPointerToExpressionElement: CheckedAtoB[InputDefinitionPointer, Option[ExpressionElement]] = CheckedAtoB.fromCheck { // If the input definition is a node containing an expression, it's been declared explicitly - case Inl(a: GraphNodeOutputPort) => a.graphNode match { - case _: OptionalGraphInputNode => - None.validNelCheck - case _: OptionalGraphInputNodeWithDefault => - None.validNelCheck - case a: PlainAnonymousExpressionNode => - womExpressionToExpressionElement(a.womExpression) map { expressionElement => - Some(expressionElement) - } - case a: TaskCallInputExpressionNode => - womExpressionToExpressionElement(a.womExpression) map { expressionElement => - Some(expressionElement) - } - case _: RequiredGraphInputNode => None.validNelCheck - } + case Inl(a: GraphNodeOutputPort) => + a.graphNode match { + case _: OptionalGraphInputNode => + None.validNelCheck + case _: OptionalGraphInputNodeWithDefault => + None.validNelCheck + case a: PlainAnonymousExpressionNode => + womExpressionToExpressionElement(a.womExpression) map { expressionElement => + Some(expressionElement) + } + case a: TaskCallInputExpressionNode => + womExpressionToExpressionElement(a.womExpression) map { expressionElement => + Some(expressionElement) + } + case _: RequiredGraphInputNode => None.validNelCheck + } // Input definitions that directly contain expressions are the result of accepting a default input defined by the callable case Inr(Inl(_: WomExpression)) => None.validNelCheck case Inr(a) => @@ -371,22 +362,22 @@ object WomToWdlom { def callNodeToCallElement: CheckedAtoB[CallNode, CallElement] = CheckedAtoB.fromCheck { call: CallNode => - def tupleToKvPair(tuple: (InputDefinition, InputDefinitionPointer)): Option[ExpressionElement.KvPair] = { + def tupleToKvPair(tuple: (InputDefinition, InputDefinitionPointer)): Option[ExpressionElement.KvPair] = inputDefinitionPointerToExpressionElement(tuple._2) match { case Right(Some(value)) => Some(ExpressionElement.KvPair(tuple._1.name, value)) case _ => None } - } val inputs = (call.inputDefinitionMappings flatMap tupleToKvPair).toVector val callableName = call.callable.name val callAlias = call.identifier.localName.value // If no alias, this is just the name; we evaluate for that below - val maybeAlias = if (callableName != callAlias) - Some(callAlias) - else - None + val maybeAlias = + if (callableName != callAlias) + Some(callAlias) + else + None val afters = call.nonInputBasedPrerequisites.map(_.localName).toVector diff --git a/womtool/src/test/scala/womtool/SampleWdl.scala b/womtool/src/test/scala/womtool/SampleWdl.scala index 54df7953651..a3b00d9bee0 100644 --- a/womtool/src/test/scala/womtool/SampleWdl.scala +++ b/womtool/src/test/scala/womtool/SampleWdl.scala @@ -20,8 +20,8 @@ trait SampleWdl { def write(x: Any) = x match { case n: Int => JsNumber(n) case s: String => JsString(s) - case b: Boolean => if(b) JsTrue else JsFalse - case s: Seq[Any] => JsArray(s map {_.toJson} toVector) + case b: Boolean => if (b) JsTrue else JsFalse + case s: Seq[Any] => JsArray(s map { _.toJson } toVector) case a: WomArray => write(a.value) case s: WomString => JsString(s.value) case i: WomInteger => JsNumber(i.value) diff --git a/womtool/src/test/scala/womtool/WomtoolJsonCommandSpec.scala b/womtool/src/test/scala/womtool/WomtoolJsonCommandSpec.scala index be11acb7ec5..2fe95118094 100644 --- a/womtool/src/test/scala/womtool/WomtoolJsonCommandSpec.scala +++ b/womtool/src/test/scala/womtool/WomtoolJsonCommandSpec.scala @@ -41,10 +41,10 @@ abstract class WomtoolJsonCommandSpec extends AnyFlatSpec with CromwellTimeoutSp val caseName = validCase.name // The WDL file is expected to be valid: - val wdlFile = mustExist(versionDirectory.path.resolve(s"../../validate/$versionName/valid/$caseName/$caseName.wdl").toFile) + val wdlFile = + mustExist(versionDirectory.path.resolve(s"../../validate/$versionName/valid/$caseName/$caseName.wdl").toFile) testDefinitions foreach { definition => - it should s"validate '${definition.testName}' for $versionName workflow: '$caseName''" in { val expectation = expectedJson(versionDirectory, caseName, definition.expectationFilename) val fullCommandFormat = definition.commandFormat :+ wdlFile.getAbsolutePath @@ -57,7 +57,12 @@ abstract class WomtoolJsonCommandSpec extends AnyFlatSpec with CromwellTimeoutSp val unexpected = actualSet.diff(expectedSet) val ungenerated = expectedSet.diff(actualSet) - assert(actualSet == expectedSet, s"Received lines: $actualContent${System.lineSeparator}with unexpected values: ${unexpected.mkString("[", ",", "]")}${System.lineSeparator}and missing expected values: ${ungenerated.mkString("[", ",", "]")}") + assert( + actualSet == expectedSet, + s"Received lines: $actualContent${System.lineSeparator}with unexpected values: ${unexpected + .mkString("[", ",", "]")}${System.lineSeparator}and missing expected values: ${ungenerated + .mkString("[", ",", "]")}" + ) case other => fail(s"Expected successful termination but got $other") } @@ -67,9 +72,9 @@ abstract class WomtoolJsonCommandSpec extends AnyFlatSpec with CromwellTimeoutSp } } - private def expectedJson(versionDirectory: File, caseName: String, jsonName: String): String = { + private def expectedJson(versionDirectory: File, caseName: String, jsonName: String): String = File(mustExist(versionDirectory.path.resolve(caseName).resolve(jsonName).toFile).getAbsolutePath).contentAsString - } - private def mustExist(file: java.io.File): java.io.File = if (file.exists) file else fail(s"No such file: ${file.getAbsolutePath}") + private def mustExist(file: java.io.File): java.io.File = + if (file.exists) file else fail(s"No such file: ${file.getAbsolutePath}") } diff --git a/womtool/src/test/scala/womtool/WomtoolMainSpec.scala b/womtool/src/test/scala/womtool/WomtoolMainSpec.scala index 048c9408cb8..1a71db15cf3 100644 --- a/womtool/src/test/scala/womtool/WomtoolMainSpec.scala +++ b/womtool/src/test/scala/womtool/WomtoolMainSpec.scala @@ -8,7 +8,6 @@ import org.scalatest.matchers.should.Matchers import womtool.SampleWdl.{EmptyTask, EmptyWorkflow, ThreeStep} import womtool.WomtoolMainSpec._ - class WomtoolMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BeforeAndAfterAll { import WomtoolMain._ @@ -18,8 +17,11 @@ class WomtoolMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers val threeStep = ThreeStep.wdlSource() it should "print usage" in { - WomtoolMain.runWomtool(Seq.empty[String]) match{ - case BadUsageTermination(msg) => msg should include("Usage: java -jar womtool.jar [validate|inputs|outputs|parse|highlight|graph|upgrade|womgraph] [options] workflow-source") + WomtoolMain.runWomtool(Seq.empty[String]) match { + case BadUsageTermination(msg) => + msg should include( + "Usage: java -jar womtool.jar [validate|inputs|outputs|parse|highlight|graph|upgrade|womgraph] [options] workflow-source" + ) case other => fail(s"Expected BadUsageTermination but got $other") } } @@ -59,12 +61,15 @@ class WomtoolMainSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "not return inputs when there is no workflow" in { testWdl(EmptyTask) { wdlAndInputs => val res = WomtoolMain.runWomtool(Seq("inputs", wdlAndInputs.wdl)) - res should be(UnsuccessfulTermination("Cannot convert WOM bundle to executable. No primary callable was available.")) + res should be( + UnsuccessfulTermination("Cannot convert WOM bundle to executable. No primary callable was available.") + ) } } } object WomtoolMainSpec { + /** * Tests running a sample wdl, providing the inputs, and cleaning up the temp files only if no exceptions occur. * @@ -168,7 +173,6 @@ object WomtoolMainSpec { def deleteTempFiles() = tempFiles.foreach(_.delete(swallowIOExceptions = true)) } - def swapExt(filePath: File, oldExt: String, newExt: String): File = { + def swapExt(filePath: File, oldExt: String, newExt: String): File = File(filePath.toString.stripSuffix(oldExt) + newExt) - } } diff --git a/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala b/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala index d0a68a57640..06c482544ce 100644 --- a/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala +++ b/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala @@ -12,7 +12,6 @@ import womtool.WomtoolMain.{SuccessfulTermination, UnsuccessfulTermination} import scala.jdk.CollectionConverters._ import scala.collection.immutable - class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { private val presentWorkingDirectoryName = DefaultPathBuilder.get(".").toAbsolutePath.name @@ -39,7 +38,8 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc List(validTestCases, invalidTestCases) foreach { path => it should s"be set up for testing $versionName in '${versionDirectory.relativize(path).toString}'" in { if (!path.toFile.exists) fail(s"Path doesn't exist: ${path.toAbsolutePath.toString}") - if (Option(path.toFile.list).toList.flatten.isEmpty) fail(s"No test cases found in: ${path.toAbsolutePath.toString}") + if (Option(path.toFile.list).toList.flatten.isEmpty) + fail(s"No test cases found in: ${path.toAbsolutePath.toString}") versionDirectory.list.nonEmpty shouldBe true } } @@ -59,7 +59,9 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc } it should s"successfully validate $versionName workflow: '$validCase'$withInputsAddition" in { - WomtoolMain.runWomtool(Seq("validate", wdl.getAbsolutePath) ++ inputsArgs) should be(SuccessfulTermination("Success!")) + WomtoolMain.runWomtool(Seq("validate", wdl.getAbsolutePath) ++ inputsArgs) should be( + SuccessfulTermination("Success!") + ) } if (!knownUngraphableTests.contains(validCase)) { @@ -70,21 +72,26 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc WomtoolMain.runWomtool(Seq("graph", wdl.getAbsolutePath)) match { case SuccessfulTermination(womtoolGraph) => - // Check that every call in the WDL is represented in the 'womtool graph' output, and vice versa: - val callsInWdl = Files.readAllLines(wdl.toPath).asScala.collect { - case WdlCallRegex(taskName, null, null, null, null) => taskName - case WdlCallRegex(_, _, taskName, null, null) => - taskName - case WdlCallRegex(_, _, _, _, callAlias) => callAlias - }.toSet + val callsInWdl = Files + .readAllLines(wdl.toPath) + .asScala + .collect { + case WdlCallRegex(taskName, null, null, null, null) => taskName + case WdlCallRegex(_, _, taskName, null, null) => + taskName + case WdlCallRegex(_, _, _, _, callAlias) => callAlias + } + .toSet - val callsInWomtoolGraph = womtoolGraph.linesIterator.collect { - case WomtoolGraphCallRegex(call) => call + val callsInWomtoolGraph = womtoolGraph.linesIterator.collect { case WomtoolGraphCallRegex(call) => + call }.toSet if (!callsInWomtoolGraph.exists(_.startsWith("ScatterAt"))) { - withClue(s"In WDL not in Graph: ${callsInWdl -- callsInWomtoolGraph}; In Graph not in WDL: ${callsInWomtoolGraph -- callsInWdl}") { + withClue( + s"In WDL not in Graph: ${callsInWdl -- callsInWomtoolGraph}; In Graph not in WDL: ${callsInWomtoolGraph -- callsInWdl}" + ) { callsInWdl should be(callsInWomtoolGraph) } } @@ -100,7 +107,9 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should s"fail to validate $versionName workflow: '$invalidCase'$withInputsAddition" in { val wdl = mustExist(invalidTestCases.resolve(invalidCase).resolve(invalidCase + ".wdl").toFile) - val errorFile = ifExists(invalidTestCases.resolve(invalidCase).resolve("error.txt").toFile).map(f => File(f.getAbsolutePath).contentAsString) + val errorFile = ifExists(invalidTestCases.resolve(invalidCase).resolve("error.txt").toFile).map(f => + File(f.getAbsolutePath).contentAsString + ) val inputsArgs = inputsFile match { case Some(path) => Seq("-i", path.getAbsolutePath) case None => Seq.empty[String] @@ -109,18 +118,20 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val expectedErrorMessage = errorFile map { ef => ef.trim.replace(s"$${PWD_NAME}", presentWorkingDirectoryName) } WomtoolMain.runWomtool(Seq("validate", wdl.getAbsolutePath) ++ inputsArgs) match { - case UnsuccessfulTermination(msg) => expectedErrorMessage match { - case Some(expectedError) => - msg should include(expectedError) - case None => succeed - } - case other => fail(s"Expected UnsuccessfulTermination but got $other. Expected error message: ${System.lineSeparator()}${expectedErrorMessage.getOrElse("<>")}") + case UnsuccessfulTermination(msg) => + expectedErrorMessage match { + case Some(expectedError) => + msg should include(expectedError) + case None => succeed + } + case other => + fail(s"Expected UnsuccessfulTermination but got $other. Expected error message: ${System + .lineSeparator()}${expectedErrorMessage.getOrElse("<>")}") } } } } - behavior of "womtool validate with --list-dependencies flag" val validationWithImportsTests: File = File("womtool/src/test/resources/validate-with-imports") @@ -141,12 +152,13 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc Option(versionDirectory.list).toList.flatten.filterNot(s => s.pathAsString.contains(".DS")) foreach { validCase => val caseName = validCase.name - val wdlFile = mustExist(versionDirectory.path.resolve(s"../../validate/$versionName/valid/$caseName/$caseName.wdl").toFile) - wdlFile.getAbsolutePath.split("cromwell/womtool")(0) + val wdlFile = + mustExist(versionDirectory.path.resolve(s"../../validate/$versionName/valid/$caseName/$caseName.wdl").toFile) + wdlFile.getAbsolutePath.split("cromwell/womtool")(0) it should s"successfully validate and print the workflow dependencies for $versionName workflow: '$caseName'" in { val rawOutput = expectedOutput(versionDirectory, caseName, "expected_imports.txt") - //noinspection RegExpRedundantEscape + // noinspection RegExpRedundantEscape val importsExpectation = rawOutput.replaceAll("\\{REPLACE_WITH_ROOT_PATH\\}", workingDirectory) val res = WomtoolMain.runWomtool(Seq("validate", "-l", wdlFile.getAbsolutePath)) @@ -157,14 +169,17 @@ class WomtoolValidateSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc } } - - private def mustExist(file: java.io.File): java.io.File = if (file.exists) file else fail(s"No such file: ${file.getAbsolutePath}") + private def mustExist(file: java.io.File): java.io.File = + if (file.exists) file else fail(s"No such file: ${file.getAbsolutePath}") private def ifExists(file: java.io.File): Option[java.io.File] = if (file.exists) Option(file) else None // The filterNot(_.contains(".DS")) stuff prevents Mac 'Desktop Services' hidden directories from accidentally being picked up: - private def listFilesAndFilterDSFile(path: Path): immutable.Seq[String] = Option(path.toFile.list).toList.flatten.filterNot(_.contains(".DS")) + private def listFilesAndFilterDSFile(path: Path): immutable.Seq[String] = + Option(path.toFile.list).toList.flatten.filterNot(_.contains(".DS")) - //noinspection SameParameterValue + // noinspection SameParameterValue private def expectedOutput(versionDirectory: File, caseName: String, outputTextFileName: String): String = - File(mustExist(versionDirectory.path.resolve(caseName).resolve(outputTextFileName).toFile).getAbsolutePath).contentAsString.trim + File( + mustExist(versionDirectory.path.resolve(caseName).resolve(outputTextFileName).toFile).getAbsolutePath + ).contentAsString.trim } diff --git a/womtool/src/test/scala/womtool/graph/ExpressionBasedGraphOutputNodeSpec.scala b/womtool/src/test/scala/womtool/graph/ExpressionBasedGraphOutputNodeSpec.scala index a34e7c73fa6..70b40cfaff3 100644 --- a/womtool/src/test/scala/womtool/graph/ExpressionBasedGraphOutputNodeSpec.scala +++ b/womtool/src/test/scala/womtool/graph/ExpressionBasedGraphOutputNodeSpec.scala @@ -20,12 +20,16 @@ class ExpressionBasedGraphOutputNodeSpec extends WomDotGraphTest { // Declare the expression output using both i and j: val xOutputValidation = ExpressionBasedGraphOutputNode.fromInputMapping( - WomIdentifier("x_out"), ijExpression, ijExpression.fixedWomType, Map( - "i" -> iInputNode.singleOutputPort, - "j" -> jInputNode.singleOutputPort)) + WomIdentifier("x_out"), + ijExpression, + ijExpression.fixedWomType, + Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort) + ) import common.validation.ErrorOr.ShortCircuitingFlatMap - val graph = xOutputValidation flatMap { xOutput => Graph.validateAndConstruct(Set(iInputNode, jInputNode, jOutput, xOutput)) } + val graph = xOutputValidation flatMap { xOutput => + Graph.validateAndConstruct(Set(iInputNode, jInputNode, jOutput, xOutput)) + } graph match { case Valid(g) => g @@ -69,6 +73,8 @@ class ExpressionBasedGraphOutputNodeSpec extends WomDotGraphTest { |} |""".stripMargin - override val cases = List(WomDotGraphTestCase("ExpressionBasedGraphOutputNodes", expressionOutputGraph, expressionOutputDot)) + override val cases = List( + WomDotGraphTestCase("ExpressionBasedGraphOutputNodes", expressionOutputGraph, expressionOutputDot) + ) tests() } diff --git a/womtool/src/test/scala/womtool/graph/ExpressionNodeSpec.scala b/womtool/src/test/scala/womtool/graph/ExpressionNodeSpec.scala index a54a8828fbe..f959385b171 100644 --- a/womtool/src/test/scala/womtool/graph/ExpressionNodeSpec.scala +++ b/womtool/src/test/scala/womtool/graph/ExpressionNodeSpec.scala @@ -20,7 +20,11 @@ class ExpressionNodeSpec extends WomDotGraphTest { import common.validation.ErrorOr.ShortCircuitingFlatMap val graph = for { xDeclarationNode <- ExposedExpressionNode.fromInputMapping( - WomIdentifier("x"), ijExpression, WomIntegerType, Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort)) + WomIdentifier("x"), + ijExpression, + WomIntegerType, + Map("i" -> iInputNode.singleOutputPort, "j" -> jInputNode.singleOutputPort) + ) xOutputNode = PortBasedGraphOutputNode(WomIdentifier("x_out"), WomIntegerType, xDeclarationNode.singleOutputPort) g <- Graph.validateAndConstruct(Set(iInputNode, jInputNode, xDeclarationNode, xOutputNode)) } yield g diff --git a/womtool/src/test/scala/womtool/graph/OutputNameCollisionSpec.scala b/womtool/src/test/scala/womtool/graph/OutputNameCollisionSpec.scala index 085740def7d..3f38c265584 100644 --- a/womtool/src/test/scala/womtool/graph/OutputNameCollisionSpec.scala +++ b/womtool/src/test/scala/womtool/graph/OutputNameCollisionSpec.scala @@ -33,7 +33,8 @@ class OutputNameCollisionSpec extends WomDotGraphTest { val namespace = WdlNamespaceWithWorkflow.load(wdl, Seq.empty).get namespace.toWomBundle match { - case Right(bundle) => (bundle.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).head.graph + case Right(bundle) => + (bundle.allCallables.values.toSet.filterByType[WorkflowDefinition]: Set[WorkflowDefinition]).head.graph case Left(errors) => throw new Exception(errors.toList.mkString(", ")) } } @@ -61,7 +62,9 @@ class OutputNameCollisionSpec extends WomDotGraphTest { |} |""".stripMargin - override val cases = List(WomDotGraphTestCase("non_colliding_output_names", outputCollisionWdlGraph, outputCollisionWdlDot)) + override val cases = List( + WomDotGraphTestCase("non_colliding_output_names", outputCollisionWdlGraph, outputCollisionWdlDot) + ) tests() } diff --git a/womtool/src/test/scala/womtool/graph/WomDotGraphTest.scala b/womtool/src/test/scala/womtool/graph/WomDotGraphTest.scala index c2f6523d10d..0fe7ddc48dc 100644 --- a/womtool/src/test/scala/womtool/graph/WomDotGraphTest.scala +++ b/womtool/src/test/scala/womtool/graph/WomDotGraphTest.scala @@ -7,14 +7,13 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import wom.graph.Graph - trait WomDotGraphTest extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { def cases: List[WomDotGraphTestCase] behavior of "womgraph" - def tests() = { + def tests() = cases foreach { testCase => it should s"draw the right DOT graph for ${testCase.name}" in { val womGraph = new WomGraph(testCase.name, testCase.graph) @@ -25,15 +24,13 @@ trait WomDotGraphTest extends AnyFlatSpec with CromwellTimeoutSpec with Matchers standardResult should be(standardExpectation) } } - } - + def standardizifyResult(dot: String) = { val regex = "(NODE|PORT)-?[0-9]*".r val matches = regex.findAllIn(dot).toList.distinct.map(_.toString) val currentMatchId = new AtomicInteger(0) - def standardize(m: String) = { + def standardize(m: String) = m.substring(0, 4) + currentMatchId.getAndIncrement().toString - } def foldFunction(acc: String, m: String): String = acc.replaceAll(m, standardize(m)) matches.foldLeft(dot)(foldFunction).replaceAll("\n[\\s]*", "\n") @@ -41,5 +38,4 @@ trait WomDotGraphTest extends AnyFlatSpec with CromwellTimeoutSpec with Matchers } } - final case class WomDotGraphTestCase(name: String, graph: Graph, dotExpectation: String) From beb86bd997632a354be00abe757ad8cecb483f53 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Mon, 4 Dec 2023 20:59:38 -0500 Subject: [PATCH 20/87] WX-1351 Speed up `Centaur Horicromtal PapiV2 Beta` (#7329) --- core/src/main/resources/reference.conf | 3 +- src/ci/bin/test.inc.sh | 39 +++++++++++++++++++ .../bin/testCentaurHoricromtalPapiV2beta.sh | 4 +- src/ci/bin/testCentaurPapiV2beta.sh | 2 + .../papi_v2beta_horicromtal_application.conf | 6 +-- 5 files changed, 49 insertions(+), 5 deletions(-) diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 6ec05cf6025..3fa11b458ff 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -118,7 +118,8 @@ system { max-concurrent-workflows = 5000 # Cromwell will launch up to N submitted workflows at a time, regardless of how many open workflow slots exist - max-workflow-launch-count = 50 + # Deviating from 1 is not recommended for multi-runner setups due to possible deadlocks. [BW-962] + max-workflow-launch-count = 1 # Workflows will be grouped by the value of the specified field in their workflow options. # diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index eef3a9fab6b..7da8290b9a1 100755 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -1607,6 +1607,45 @@ cromwell::build::generate_code_coverage() { fi } +cromwell::build::print_workflow_statistics() { + echo "Total workflows" + mysql --host=127.0.0.1 --user=cromwell --password=test cromwell_test -e \ + "SELECT COUNT(*) as total_workflows_run FROM WORKFLOW_METADATA_SUMMARY_ENTRY;" + + echo "Late starters" + mysql --host=127.0.0.1 --user=cromwell --password=test cromwell_test -e \ + "SELECT WORKFlOW_NAME as name, + TIMESTAMPDIFF(MINUTE, START_TIMESTAMP, END_TIMESTAMP) as runtime_minutes, + START_TIMESTAMP as START, + END_TIMESTAMP as end + FROM WORKFLOW_METADATA_SUMMARY_ENTRY + WHERE PARENT_WORKFLOW_EXECUTION_UUID IS NULL # exclude subworkflows + ORDER BY START_TIMESTAMP DESC + LIMIT 20;" + + echo "Late finishers" + mysql --host=127.0.0.1 --user=cromwell --password=test cromwell_test -e \ + "SELECT WORKFlOW_NAME as name, + TIMESTAMPDIFF(MINUTE, START_TIMESTAMP, END_TIMESTAMP) as runtime_minutes, + START_TIMESTAMP as start, + END_TIMESTAMP as END + FROM WORKFLOW_METADATA_SUMMARY_ENTRY + WHERE PARENT_WORKFLOW_EXECUTION_UUID IS NULL + ORDER BY END_TIMESTAMP DESC + LIMIT 20;" + + echo "Long duration" + mysql --host=127.0.0.1 --user=cromwell --password=test cromwell_test -e \ + "SELECT WORKFlOW_NAME as name, + TIMESTAMPDIFF(MINUTE, START_TIMESTAMP, END_TIMESTAMP) as RUNTIME_MINUTES, + START_TIMESTAMP as start, + END_TIMESTAMP as end + FROM WORKFLOW_METADATA_SUMMARY_ENTRY + WHERE PARENT_WORKFLOW_EXECUTION_UUID IS NULL + ORDER BY RUNTIME_MINUTES DESC + LIMIT 20;" +} + cromwell::build::exec_retry_function() { local retried_function local retry_count diff --git a/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh b/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh index 9dba2f6f8ce..36b215a2f52 100755 --- a/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh +++ b/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh @@ -19,7 +19,7 @@ cromwell::build::assemble_jars cromwell::build::build_cromwell_docker cromwell::build::run_centaur \ - -p 100 \ + -p 500 \ -e localdockertest \ -e relative_output_paths \ -e relative_output_paths_colliding \ @@ -27,3 +27,5 @@ cromwell::build::run_centaur \ -e papi_v2alpha1_gcsa \ cromwell::build::generate_code_coverage + +cromwell::build::print_workflow_statistics diff --git a/src/ci/bin/testCentaurPapiV2beta.sh b/src/ci/bin/testCentaurPapiV2beta.sh index b3a1ef2a61e..43d5a7d62aa 100755 --- a/src/ci/bin/testCentaurPapiV2beta.sh +++ b/src/ci/bin/testCentaurPapiV2beta.sh @@ -25,3 +25,5 @@ cromwell::build::run_centaur \ -e papi_v2alpha1_gcsa \ cromwell::build::generate_code_coverage + +cromwell::build::print_workflow_statistics diff --git a/src/ci/resources/papi_v2beta_horicromtal_application.conf b/src/ci/resources/papi_v2beta_horicromtal_application.conf index c6bc9176c16..f0f2c45f381 100644 --- a/src/ci/resources/papi_v2beta_horicromtal_application.conf +++ b/src/ci/resources/papi_v2beta_horicromtal_application.conf @@ -1,8 +1,8 @@ include "papi_v2beta_application.conf" -system.max-workflow-launch-count=1 -system.new-workflow-poll-rate=10 -system.max-concurrent-workflows=30 +system.max-workflow-launch-count=10 +system.new-workflow-poll-rate=1 +system.max-concurrent-workflows=500 system.cromwell_id_random_suffix=false # Turn off token logging to reduce log volume From 786d315d99f5361e6a9050f102921516cd785d67 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Mon, 4 Dec 2023 22:42:08 -0500 Subject: [PATCH 21/87] WX-1351 CI CWL cleanup (#7327) --- src/ci/bin/test.inc.sh | 146 +----------------- src/ci/bin/testCentaurAws.sh | 1 - src/ci/bin/testCentaurTes.sh | 1 - src/ci/bin/testConformanceLocal.sh | 16 -- src/ci/bin/testConformancePapiV2alpha1.sh | 21 --- src/ci/bin/testConformancePapiV2beta.sh | 4 - src/ci/bin/testConformanceTesk.sh | 4 - src/ci/bin/test_gcpbatch.inc.sh | 4 - src/ci/bin/test_papi.inc.sh | 4 - .../centaur_cwl_runner_application.conf | 44 ------ src/ci/resources/cwl_conformance_test.wdl | 136 ---------------- .../ftp_centaur_cwl_runner.conf.ctmpl | 16 -- 12 files changed, 2 insertions(+), 395 deletions(-) delete mode 100755 src/ci/bin/testConformanceLocal.sh delete mode 100755 src/ci/bin/testConformancePapiV2alpha1.sh delete mode 100755 src/ci/bin/testConformancePapiV2beta.sh delete mode 100755 src/ci/bin/testConformanceTesk.sh delete mode 100644 src/ci/resources/centaur_cwl_runner_application.conf delete mode 100644 src/ci/resources/cwl_conformance_test.wdl delete mode 100644 src/ci/resources/ftp_centaur_cwl_runner.conf.ctmpl diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index 7da8290b9a1..6f76ee8c05c 100755 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -355,15 +355,11 @@ cromwell::private::create_build_variables() { backend_type="${backend_type#centaurWdlUpgrade}" backend_type="${backend_type#centaurHoricromtal}" backend_type="${backend_type#centaur}" - backend_type="${backend_type#conformance}" backend_type="$(echo "${backend_type}" | sed 's/\([A-Z]\)/_\1/g' | tr '[:upper:]' '[:lower:]' | cut -c 2-)" CROMWELL_BUILD_BACKEND_TYPE="${backend_type}" - if [[ "${CROMWELL_BUILD_TYPE}" == conformance* ]]; then - CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND="server/assembly centaurCwlRunner/assembly" - else - CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND="assembly" - fi + # It may be possible to trim this down to e.g. `server/assembly` for some jobs + CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND="assembly" if [[ "${CROMWELL_BUILD_GENERATE_COVERAGE}" == "true" ]]; then CROMWELL_BUILD_SBT_COVERAGE_COMMAND="coverage" @@ -747,45 +743,6 @@ cromwell::private::create_centaur_variables() { export CROMWELL_BUILD_DOCKER_TAG } -cromwell::private::create_conformance_variables() { - CROMWELL_BUILD_CWL_RUNNER_MODE="${CROMWELL_BUILD_BACKEND_TYPE}" - CROMWELL_BUILD_CWL_TOOL_VERSION="3.0.20200724003302" - CROMWELL_BUILD_CWL_TEST_VERSION="1.0.20190228134645" - CROMWELL_BUILD_CWL_TEST_COMMIT="1f501e38ff692a408e16b246ac7d64d32f0822c2" # use known git hash to avoid changes - CROMWELL_BUILD_CWL_TEST_RUNNER="${CROMWELL_BUILD_ROOT_DIRECTORY}/centaurCwlRunner/src/bin/centaur-cwl-runner.bash" - CROMWELL_BUILD_CWL_TEST_DIRECTORY="${CROMWELL_BUILD_ROOT_DIRECTORY}/common-workflow-language" - CROMWELL_BUILD_CWL_TEST_RESOURCES="${CROMWELL_BUILD_CWL_TEST_DIRECTORY}/v1.0/v1.0" - CROMWELL_BUILD_CWL_TEST_WDL="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/cwl_conformance_test.wdl" - CROMWELL_BUILD_CWL_TEST_INPUTS="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/cwl_conformance_test.inputs.json" - CROMWELL_BUILD_CWL_TEST_OUTPUT="${CROMWELL_BUILD_LOG_DIRECTORY}/cwl_conformance_test.out.txt" - - # Setting CROMWELL_BUILD_CWL_TEST_PARALLELISM too high will cause false negatives due to cromwell server timeouts. - case "${CROMWELL_BUILD_TYPE}" in - conformanceTesk) - # BA-6547: TESK is not currently tested in FC-Jenkins nor Travis - CROMWELL_BUILD_CWL_RUNNER_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/ftp_centaur_cwl_runner.conf" - CROMWELL_BUILD_CWL_TEST_PARALLELISM=8 - ;; - *) - CROMWELL_BUILD_CWL_RUNNER_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/centaur_cwl_runner_application.conf" - CROMWELL_BUILD_CWL_TEST_PARALLELISM=10 - ;; - esac - - export CROMWELL_BUILD_CWL_RUNNER_CONFIG - export CROMWELL_BUILD_CWL_RUNNER_MODE - export CROMWELL_BUILD_CWL_TOOL_VERSION - export CROMWELL_BUILD_CWL_TEST_VERSION - export CROMWELL_BUILD_CWL_TEST_COMMIT - export CROMWELL_BUILD_CWL_TEST_RUNNER - export CROMWELL_BUILD_CWL_TEST_DIRECTORY - export CROMWELL_BUILD_CWL_TEST_RESOURCES - export CROMWELL_BUILD_CWL_TEST_WDL - export CROMWELL_BUILD_CWL_TEST_INPUTS - export CROMWELL_BUILD_CWL_TEST_OUTPUT - export CROMWELL_BUILD_CWL_TEST_PARALLELISM -} - cromwell::private::verify_secure_build() { case "${CROMWELL_BUILD_PROVIDER}" in "${CROMWELL_BUILD_PROVIDER_TRAVIS}") @@ -1032,39 +989,6 @@ cromwell::private::start_docker_databases() { fi } -cromwell::private::install_cwltest() { - # TODO: No clue why these are needed for cwltool. If you know please update this comment. - sudo apt-get install procps || true - cromwell::private::pip_install cwltool=="${CROMWELL_BUILD_CWL_TOOL_VERSION}" --ignore-installed - cromwell::private::pip_install cwltest=="${CROMWELL_BUILD_CWL_TEST_VERSION}" -} - -cromwell::private::checkout_pinned_cwl() { - if [[ ! -d "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}" ]]; then - git clone \ - https://github.com/common-workflow-language/common-workflow-language.git \ - "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}" - ( - pushd "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}" > /dev/null - git checkout "${CROMWELL_BUILD_CWL_TEST_COMMIT}" - popd > /dev/null - ) - fi -} - -cromwell::private::write_cwl_test_inputs() { - cat <"${CROMWELL_BUILD_CWL_TEST_INPUTS}" -{ - "cwl_conformance_test.cwl_dir": "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}", - "cwl_conformance_test.test_result_output": "${CROMWELL_BUILD_CWL_TEST_OUTPUT}", - "cwl_conformance_test.centaur_cwl_runner": "${CROMWELL_BUILD_CWL_TEST_RUNNER}", - "cwl_conformance_test.conformance_expected_failures": - "${CROMWELL_BUILD_RESOURCES_DIRECTORY}/${CROMWELL_BUILD_BACKEND_TYPE}_conformance_expected_failures.txt", - "cwl_conformance_test.timeout": 2400 -} -JSON -} - cromwell::private::vault_run() { if cromwell::private::is_xtrace_enabled; then cromwell::private::exec_silent_function cromwell::private::vault_run "$@" @@ -1267,11 +1191,6 @@ cromwell::private::cat_centaur_log() { cat "${CROMWELL_BUILD_CENTAUR_LOG}" } -cromwell::private::cat_conformance_log() { - echo "CONFORMANCE LOG" - cat "${CROMWELL_BUILD_CWL_TEST_OUTPUT}" -} - cromwell::private::kill_build_heartbeat() { if [[ -n "${CROMWELL_BUILD_HEARTBEAT_PID:+set}" ]]; then cromwell::private::kill_tree "${CROMWELL_BUILD_HEARTBEAT_PID}" @@ -1337,47 +1256,6 @@ cromwell::private::kill_tree() { kill "${pid}" 2> /dev/null } -cromwell::private::start_conformance_cromwell() { - # Start the Cromwell server in the directory containing input files so it can access them via their relative path - pushd "${CROMWELL_BUILD_CWL_TEST_RESOURCES}" > /dev/null - - # Turn off call caching as hashing doesn't work since it sees local and not GCS paths. - # CWL conformance uses alpine images that do not have bash. - java \ - -Xmx2g \ - -Dconfig.file="${CROMWELL_BUILD_CROMWELL_CONFIG}" \ - -Dcall-caching.enabled=false \ - -Dsystem.job-shell=/bin/sh \ - -jar "${CROMWELL_BUILD_CROMWELL_JAR}" \ - server & - - CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID=$! - - popd > /dev/null - - cromwell::private::add_exit_function cromwell::private::kill_conformance_cromwell -} - -cromwell::private::kill_conformance_cromwell() { - if [[ -n "${CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID+set}" ]]; then - cromwell::build::kill_tree "${CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID}" - fi -} - -cromwell::private::run_conformance_wdl() { - pushd "${CROMWELL_BUILD_CWL_TEST_RESOURCES}" > /dev/null - - CENTAUR_CWL_JAVA_ARGS="-Dconfig.file=${CROMWELL_BUILD_CWL_RUNNER_CONFIG}" \ - java \ - -Xmx6g \ - -Dbackend.providers.Local.config.concurrent-job-limit="${CROMWELL_BUILD_CWL_TEST_PARALLELISM}" \ - -jar "${CROMWELL_BUILD_CROMWELL_JAR}" \ - run "${CROMWELL_BUILD_CWL_TEST_WDL}" \ - -i "${CROMWELL_BUILD_CWL_TEST_INPUTS}" - - popd > /dev/null -} - cromwell::build::exec_test_script() { cromwell::private::create_build_variables if [[ "${CROMWELL_BUILD_RUN_TESTS}" == "false" ]]; then @@ -1458,17 +1336,6 @@ cromwell::build::setup_centaur_environment() { fi } -cromwell::build::setup_conformance_environment() { - cromwell::private::create_centaur_variables - cromwell::private::create_conformance_variables - if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]]; then - cromwell::private::install_cwltest - fi - cromwell::private::checkout_pinned_cwl - cromwell::private::write_cwl_test_inputs - cromwell::private::add_exit_function cromwell::private::cat_conformance_log -} - cromwell::private::find_or_assemble_cromwell_jar() { cromwell::private::find_cromwell_jar if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]] || ! cromwell::private::exists_cromwell_jar; then @@ -1592,15 +1459,6 @@ cromwell::build::run_centaur() { "$@" } -cromwell::build::run_conformance() { - cromwell::private::start_conformance_cromwell - - # Give cromwell time to start up - sleep 30 - - cromwell::private::run_conformance_wdl -} - cromwell::build::generate_code_coverage() { if [[ "${CROMWELL_BUILD_GENERATE_COVERAGE}" == "true" ]]; then cromwell::private::generate_code_coverage diff --git a/src/ci/bin/testCentaurAws.sh b/src/ci/bin/testCentaurAws.sh index 83804005322..f20ebfb9e59 100755 --- a/src/ci/bin/testCentaurAws.sh +++ b/src/ci/bin/testCentaurAws.sh @@ -50,7 +50,6 @@ cromwell::build::run_centaur \ # -e scatter \ # -e long_cmd \ # -e runtwiceexpectingcallcaching \ -# -e cwl_cache_within_workflow \ # -e cachewithinwf cromwell::build::generate_code_coverage diff --git a/src/ci/bin/testCentaurTes.sh b/src/ci/bin/testCentaurTes.sh index 7cad5b26832..3d3a350cdd6 100755 --- a/src/ci/bin/testCentaurTes.sh +++ b/src/ci/bin/testCentaurTes.sh @@ -72,7 +72,6 @@ cromwell::build::run_centaur \ -e non_root_specified_user \ -e write_lines_files \ -e draft3_read_write_functions_local \ - -e cwl_input_json \ -e directory_type_local \ cromwell::build::generate_code_coverage diff --git a/src/ci/bin/testConformanceLocal.sh b/src/ci/bin/testConformanceLocal.sh deleted file mode 100755 index 74e2cabc462..00000000000 --- a/src/ci/bin/testConformanceLocal.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -set -o errexit -o nounset -o pipefail -# import in shellcheck / CI / IntelliJ compatible ways -# shellcheck source=/dev/null -source "${BASH_SOURCE%/*}/test.inc.sh" || source test.inc.sh - -cromwell::build::setup_common_environment - -cromwell::build::setup_conformance_environment - -cromwell::build::assemble_jars - -cromwell::build::run_conformance - -cromwell::build::generate_code_coverage diff --git a/src/ci/bin/testConformancePapiV2alpha1.sh b/src/ci/bin/testConformancePapiV2alpha1.sh deleted file mode 100755 index cb952694b4e..00000000000 --- a/src/ci/bin/testConformancePapiV2alpha1.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash - -set -o errexit -o nounset -o pipefail -export CROMWELL_BUILD_REQUIRES_SECURE=true -# import in shellcheck / CI / IntelliJ compatible ways -# shellcheck source=/dev/null -source "${BASH_SOURCE%/*}/test.inc.sh" || source test.inc.sh -# shellcheck source=/dev/null -source "${BASH_SOURCE%/*}/test_papi.inc.sh" || source test_papi.inc.sh - -cromwell::build::setup_common_environment - -cromwell::build::setup_conformance_environment - -cromwell::build::papi::setup_papi_conformance_environment - -cromwell::build::assemble_jars - -cromwell::build::run_conformance - -cromwell::build::generate_code_coverage diff --git a/src/ci/bin/testConformancePapiV2beta.sh b/src/ci/bin/testConformancePapiV2beta.sh deleted file mode 100755 index 7117a2d78ba..00000000000 --- a/src/ci/bin/testConformancePapiV2beta.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash - -echo "Test is temporarily disabled due to ongoing PAPI 503 flakiness." -exit 0 \ No newline at end of file diff --git a/src/ci/bin/testConformanceTesk.sh b/src/ci/bin/testConformanceTesk.sh deleted file mode 100755 index 6f4784888e1..00000000000 --- a/src/ci/bin/testConformanceTesk.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env bash - -echo "$(tput setab 1)$(tput blink)BA-6547: TESK is not currently tested in GOTC-Jenkins, FC-Jenkins, nor Travis... so this 'test' was quick!$(tput sgr 0)" -exit 0 diff --git a/src/ci/bin/test_gcpbatch.inc.sh b/src/ci/bin/test_gcpbatch.inc.sh index f75764b6006..1570bdb5f76 100644 --- a/src/ci/bin/test_gcpbatch.inc.sh +++ b/src/ci/bin/test_gcpbatch.inc.sh @@ -112,7 +112,3 @@ cromwell::build::batch::setup_batch_centaur_environment() { fi cromwell::private::batch::setup_batch_service_account } - -cromwell::build::batch::setup_batch_conformance_environment() { - cromwell::private::batch::setup_batch_service_account -} diff --git a/src/ci/bin/test_papi.inc.sh b/src/ci/bin/test_papi.inc.sh index e3c253d7455..98d77820435 100644 --- a/src/ci/bin/test_papi.inc.sh +++ b/src/ci/bin/test_papi.inc.sh @@ -122,7 +122,3 @@ cromwell::build::papi::setup_papi_centaur_environment() { cromwell::private::papi::setup_papi_service_account cromwell::private::papi::setup_papi_endpoint_url } - -cromwell::build::papi::setup_papi_conformance_environment() { - cromwell::private::papi::setup_papi_service_account -} diff --git a/src/ci/resources/centaur_cwl_runner_application.conf b/src/ci/resources/centaur_cwl_runner_application.conf deleted file mode 100644 index 69c63440901..00000000000 --- a/src/ci/resources/centaur_cwl_runner_application.conf +++ /dev/null @@ -1,44 +0,0 @@ -include required(classpath("application.conf")) - -centaur { - cwl-runner { - mode = "Error: BA-6546 The environment variable CROMWELL_BUILD_CWL_RUNNER_MODE must be set/export pointing to a valid mode such as 'local'" - mode = ${?CROMWELL_BUILD_CWL_RUNNER_MODE} - - papi { - default-input-gcs-prefix = "gs://centaur-cwl-conformance-1f501e3/cwl-inputs/" - } - tesk { - default-input-ftp-prefix = "ftp://ftp.hexdump.org/centaur-cwl-conformance/cwl-inputs/" - } - - google { - application-name = "centaur-cwl-runner" - genomics.location = "us-central1" - max-attempts = 3 - - auth = "Error: BA-6546 The environment variable CROMWELL_BUILD_PAPI_AUTH_MODE must be set/export pointing to a valid mode such as 'application-default'" - auth = ${?CROMWELL_BUILD_PAPI_AUTH_MODE} - json-dir = "Error: BA-6546 The environment variable CROMWELL_BUILD_RESOURCES_DIRECTORY must be set/export pointing to a valid mode such as 'target/ci/resources'" - json-dir = ${?CROMWELL_BUILD_RESOURCES_DIRECTORY} - - genomics { - endpoint-url = "Error: BA-6546 The environment variable CROMWELL_BUILD_PAPI_ENDPOINT_URL must be set/export pointing to a valid mode such as 'https://lifesciences.googleapis.com/'" - endpoint-url = ${?CROMWELL_BUILD_PAPI_ENDPOINT_URL} - } - - auths = [ - { - name = "application-default" - scheme = "application_default" - } - { - name = "service-account" - scheme = "service_account" - json-file = ${centaur.cwl-runner.google.json-dir}/cromwell-centaur-service-account.json - } - ] - - } - } -} diff --git a/src/ci/resources/cwl_conformance_test.wdl b/src/ci/resources/cwl_conformance_test.wdl deleted file mode 100644 index 75b544c9f53..00000000000 --- a/src/ci/resources/cwl_conformance_test.wdl +++ /dev/null @@ -1,136 +0,0 @@ -version 1.0 - -workflow cwl_conformance_test { - input { - String cwl_dir - String test_result_output - String centaur_cwl_runner - String conformance_expected_failures - Int timeout - } - - call get_test_count { - input: - cwl_dir = cwl_dir, - centaur_cwl_runner = centaur_cwl_runner - } - - scatter (test_index in range(get_test_count.test_count)) { - call run_test_index { - input: - cwl_dir = cwl_dir, - centaur_cwl_runner = centaur_cwl_runner, - test_index = test_index, - timeout = timeout - } - } - - call make_summary { - input: - test_count = get_test_count.test_count, - test_result_codes = run_test_index.test_result_code, - conformance_expected_failures = conformance_expected_failures, - test_result_outputs = run_test_index.out, - test_result_output = test_result_output - } - - output { - } -} - -task get_test_count { - input { - String cwl_dir - String centaur_cwl_runner - } - - command { - cd ~{cwl_dir} - ./run_test.sh RUNNER="~{centaur_cwl_runner}" -l | grep -c '^\[' - } - - output { - Int test_count = read_int(stdout()) - } -} - -task run_test_index { - input { - String cwl_dir - String centaur_cwl_runner - Int test_index - Int test_number = test_index + 1 - Int timeout - } - - # Weird -n/--timeout format is because ./run_test.sh doesn't pass through the timeout parameter to cwltest. - # Test 55 often runs over 10 minutes in Travis/PapiV2 and requires a longer timeout. - # So, we use the fact that run_test.sh conveniently doesn't sanitize -n to wire the --timeout _inside_ the -n arg. - command { - ( - cd ~{cwl_dir} - ./run_test.sh RUNNER="~{centaur_cwl_runner}" -n"~{test_number} --timeout=~{timeout}" 2>&1 - ) - echo $? > test_result_code - } - - output { - Int test_result_code = read_int("test_result_code") - File out = stdout() - } -} - -task make_summary { - input { - Int test_count - Array[String] test_result_outputs - Array[Int] test_result_codes - String test_result_output - String conformance_expected_failures - File test_result_output_lines = write_lines(test_result_outputs) - File test_result_code_lines = write_lines(test_result_codes) - } - - command <<< - TEST_PASSING=0 - touch unexpected_pass - touch unexpected_fail - - for TEST_NUMBER in $(seq ~{test_count}); do - # Check if test is supposed to fail - grep -q "^${TEST_NUMBER}$" "~{conformance_expected_failures}" - TEST_IN_EXPECTED_FAILED=$? - - # Get the test results - TEST_RESULT_OUTPUT="$(sed -n ${TEST_NUMBER}p ~{test_result_output_lines})" - TEST_RESULT_CODE="$(sed -n ${TEST_NUMBER}p ~{test_result_code_lines})" - - if [ "${TEST_RESULT_CODE}" -eq 0 ]; then - TEST_PASSING="$((${TEST_PASSING} + 1))" - fi - - # Check for unexpected results - if [ "${TEST_IN_EXPECTED_FAILED}" -eq 0 ] && [ "${TEST_RESULT_CODE}" -eq 0 ]; then - echo "${TEST_NUMBER}" >> unexpected_pass - elif [ "${TEST_IN_EXPECTED_FAILED}" -ne 0 ] && [ "${TEST_RESULT_CODE}" -ne 0 ]; then - echo "${TEST_NUMBER}" >> unexpected_fail - fi - - cat "$TEST_RESULT_OUTPUT" >> "~{test_result_output}" - echo "exited with code ${TEST_RESULT_CODE}" >> "~{test_result_output}" - done - - echo "---" >> "~{test_result_output}" - echo "Conformance percentage at $(( 100 * ${TEST_PASSING} / ~{test_count} ))%" >> "~{test_result_output}" - - if [ -s unexpected_pass ] || [ -s unexpected_fail ]; then - printf "Unexpected passing tests: (%s)\n" "$(paste -s -d ' ' unexpected_pass)" >> "~{test_result_output}" - printf "Unexpected failing tests: (%s)\n" "$(paste -s -d ' ' unexpected_fail)" >> "~{test_result_output}" - echo "Does ~{conformance_expected_failures} need to be updated?" >> "~{test_result_output}" - false - fi - >>> - - output { - } -} diff --git a/src/ci/resources/ftp_centaur_cwl_runner.conf.ctmpl b/src/ci/resources/ftp_centaur_cwl_runner.conf.ctmpl deleted file mode 100644 index 559b9e2f981..00000000000 --- a/src/ci/resources/ftp_centaur_cwl_runner.conf.ctmpl +++ /dev/null @@ -1,16 +0,0 @@ -include required(classpath("application.conf")) -include "centaur_cwl_runner_application.conf" - -{{with $cromwellFtp := secret (printf "secret/dsde/cromwell/common/cromwell-ftp")}} -centaur { - cwl-runner { - ftp { - auth { - username = {{$cromwellFtp.Data.username}} - password = {{$cromwellFtp.Data.password}} - } - connection-count-per-user = 1 - } - } -} -{{end}} From bb1e394ed6f537e7f83c4ee5a77e0ae29ea17cd7 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Tue, 5 Dec 2023 13:11:44 -0500 Subject: [PATCH 22/87] WX-1351 Remove slow/misbehaving localization test (#7330) --- .../localize_file_larger_than_disk_space.wdl | 27 ------------------- .../localize_file_larger_than_disk_space.test | 17 ------------ 2 files changed, 44 deletions(-) delete mode 100644 centaur/src/main/resources/standardTestCases/input_localization/localize_file_larger_than_disk_space.wdl delete mode 100644 centaur/src/main/resources/standardTestCases/localize_file_larger_than_disk_space.test diff --git a/centaur/src/main/resources/standardTestCases/input_localization/localize_file_larger_than_disk_space.wdl b/centaur/src/main/resources/standardTestCases/input_localization/localize_file_larger_than_disk_space.wdl deleted file mode 100644 index b90c63fe928..00000000000 --- a/centaur/src/main/resources/standardTestCases/input_localization/localize_file_larger_than_disk_space.wdl +++ /dev/null @@ -1,27 +0,0 @@ -version 1.0 - -task localize_file { - input { - File input_file - } - command { - cat "localizing file over 1 GB" - } - runtime { - docker: "ubuntu:latest" - disks: "local-disk 1 HDD" - } - output { - String out = read_string(stdout()) - } -} - -workflow localize_file_larger_than_disk_space { - File wf_input = "gs://cromwell_test_bucket/file_over_1_gb.txt" - - call localize_file { input: input_file = wf_input } - - output { - String content = localize_file.out - } -} diff --git a/centaur/src/main/resources/standardTestCases/localize_file_larger_than_disk_space.test b/centaur/src/main/resources/standardTestCases/localize_file_larger_than_disk_space.test deleted file mode 100644 index bee99677b01..00000000000 --- a/centaur/src/main/resources/standardTestCases/localize_file_larger_than_disk_space.test +++ /dev/null @@ -1,17 +0,0 @@ -name: localize_file_larger_than_disk_space -testFormat: workflowfailure -backends: [Papiv2] -workflowType: WDL -workflowTypeVersion: 1.0 -tags: ["wdl_1.0"] - -files { - workflow: input_localization/localize_file_larger_than_disk_space.wdl -} - -metadata { - workflowName: localize_file_larger_than_disk_space - status: Failed - "failures.0.message": "Workflow failed" - "failures.0.causedBy.0.message": "Task localize_file_larger_than_disk_space.localize_file:NA:1 failed. The job was stopped before the command finished. PAPI error code 9. Please check the log file for more details: gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/localize_file_larger_than_disk_space/<>/call-localize_file/localize_file.log." -} From fede8596e6f6b12d7ef5f46db125b996153803b6 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Wed, 6 Dec 2023 15:47:56 -0500 Subject: [PATCH 23/87] WX-1351 Split out restart tests (#7333) --- .github/workflows/integration_tests.yml | 3 +++ .../failures.restart_while_failing_jes.test | 1 + .../failures.restart_while_failing_local.test | 2 +- .../failures.restart_while_failing_tes.test | 2 +- .../bin/testCentaurHoricromtalPapiV2beta.sh | 1 + src/ci/bin/testCentaurPapiV2beta.sh | 1 + src/ci/bin/testCentaurPapiV2betaRestart.sh | 25 +++++++++++++++++++ .../papi_v2beta_restart_application.conf | 1 + 8 files changed, 34 insertions(+), 2 deletions(-) create mode 100755 src/ci/bin/testCentaurPapiV2betaRestart.sh create mode 100644 src/ci/resources/papi_v2beta_restart_application.conf diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml index ebafe51064c..61ed3c5af41 100644 --- a/.github/workflows/integration_tests.yml +++ b/.github/workflows/integration_tests.yml @@ -32,6 +32,9 @@ jobs: - build_type: centaurPapiV2beta build_mysql: 5.7 friendly_name: Centaur Papi V2 Beta with MySQL 5.7 + - build_type: centaurPapiV2betaRestart + build_mysql: 5.7 + friendly_name: Centaur Papi V2 Beta (restart) - build_type: dbms friendly_name: DBMS - build_type: centaurTes diff --git a/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_jes.test b/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_jes.test index 1ac12322f4f..182a833fbdb 100644 --- a/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_jes.test +++ b/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_jes.test @@ -2,6 +2,7 @@ name: failures.restart_while_failing_jes testFormat: WorkflowFailureRestartWithRecover callMark: restart_while_failing.B1 backends: [Papi] +tags: [restart] files { workflow: failures/restart_while_failing/restart_while_failing.wdl diff --git a/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_local.test b/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_local.test index f686b16c817..58abbf78245 100644 --- a/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_local.test +++ b/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_local.test @@ -3,7 +3,7 @@ testFormat: WorkflowFailureRestartWithRecover callMark: restart_while_failing.B1 backendsMode: "only" backends: [Local, LocalNoDocker] -tags: [localdockertest] +tags: [localdockertest, restart] files { workflow: failures/restart_while_failing/restart_while_failing.wdl diff --git a/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_tes.test b/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_tes.test index 64ea5fd8906..9bb9c1a55d6 100644 --- a/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_tes.test +++ b/centaur/src/main/resources/standardTestCases/failures.restart_while_failing_tes.test @@ -2,7 +2,7 @@ name: failures.restart_while_failing_tes testFormat: WorkflowFailureRestartWithoutRecover callMark: restart_while_failing.B1 backends: [TES] -tags: [localdockertest] +tags: [localdockertest, restart] files { workflow: failures/restart_while_failing/restart_while_failing.wdl diff --git a/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh b/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh index 36b215a2f52..c894909cd3f 100755 --- a/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh +++ b/src/ci/bin/testCentaurHoricromtalPapiV2beta.sh @@ -25,6 +25,7 @@ cromwell::build::run_centaur \ -e relative_output_paths_colliding \ -e standard_output_paths_colliding_prevented \ -e papi_v2alpha1_gcsa \ + -e restart \ cromwell::build::generate_code_coverage diff --git a/src/ci/bin/testCentaurPapiV2beta.sh b/src/ci/bin/testCentaurPapiV2beta.sh index 43d5a7d62aa..d66d2a14334 100755 --- a/src/ci/bin/testCentaurPapiV2beta.sh +++ b/src/ci/bin/testCentaurPapiV2beta.sh @@ -23,6 +23,7 @@ cromwell::build::run_centaur \ -e relative_output_paths_colliding \ -e standard_output_paths_colliding_prevented \ -e papi_v2alpha1_gcsa \ + -e restart \ cromwell::build::generate_code_coverage diff --git a/src/ci/bin/testCentaurPapiV2betaRestart.sh b/src/ci/bin/testCentaurPapiV2betaRestart.sh new file mode 100755 index 00000000000..e0d50bb18c6 --- /dev/null +++ b/src/ci/bin/testCentaurPapiV2betaRestart.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -o errexit -o nounset -o pipefail +export CROMWELL_BUILD_REQUIRES_SECURE=true +# import in shellcheck / CI / IntelliJ compatible ways +# shellcheck source=/dev/null +source "${BASH_SOURCE%/*}/test.inc.sh" || source test.inc.sh +# shellcheck source=/dev/null +source "${BASH_SOURCE%/*}/test_papi.inc.sh" || source test_papi.inc.sh + +cromwell::build::setup_common_environment + +cromwell::build::setup_centaur_environment + +cromwell::build::papi::setup_papi_centaur_environment + +cromwell::build::assemble_jars + +cromwell::build::run_centaur \ + -p 100 \ + -i restart \ + +cromwell::build::generate_code_coverage + +cromwell::build::print_workflow_statistics diff --git a/src/ci/resources/papi_v2beta_restart_application.conf b/src/ci/resources/papi_v2beta_restart_application.conf new file mode 100644 index 00000000000..2f0bb75860b --- /dev/null +++ b/src/ci/resources/papi_v2beta_restart_application.conf @@ -0,0 +1 @@ +include "papi_v2beta_application.conf" From f002a9644d7bb0c658151b4a2fe2451b839f6704 Mon Sep 17 00:00:00 2001 From: Tom Wiseman Date: Thu, 7 Dec 2023 10:24:59 -0500 Subject: [PATCH 24/87] [WX-1345] Automatic Token Acquisition for TES Config (#7256) --- .gitignore | 4 + project/ContinuousIntegration.scala | 16 +++- ...Repo template_ Cromwell server TES.run.xml | 20 +++++ ...emplate_ Cromwell server Terra TES.run.xml | 23 ++++++ src/ci/resources/acquire_b2c_token.sh | 42 ++++++++++ .../terra_tes_application.conf.ctmpl | 80 +++++++++++++++++++ src/ci/resources/tes_application.conf | 42 ++++------ 7 files changed, 197 insertions(+), 30 deletions(-) create mode 100644 runConfigurations/Repo template_ Cromwell server Terra TES.run.xml create mode 100755 src/ci/resources/acquire_b2c_token.sh create mode 100644 src/ci/resources/terra_tes_application.conf.ctmpl diff --git a/.gitignore b/.gitignore index 250b6aa3c16..94accae9038 100644 --- a/.gitignore +++ b/.gitignore @@ -58,6 +58,10 @@ tesk_application.conf **/venv/ exome_germline_single_sample_v1.3/ **/*.pyc +src/ci/resources/*.temp # GHA credentials gha-creds-*.json + +# jenv +.java-version diff --git a/project/ContinuousIntegration.scala b/project/ContinuousIntegration.scala index d5dd3262f87..f0516150cf5 100644 --- a/project/ContinuousIntegration.scala +++ b/project/ContinuousIntegration.scala @@ -9,6 +9,8 @@ object ContinuousIntegration { lazy val ciSettings: Seq[Setting[_]] = List( srcCiResources := sourceDirectory.value / "ci" / "resources", targetCiResources := target.value / "ci" / "resources", + envFile := srcCiResources.value / "env.temp", //generated by resources/acquire_b2c_token.sh + vaultToken := userHome / ".vault-token", copyCiResources := { IO.copyDirectory(srcCiResources.value, targetCiResources.value) @@ -26,7 +28,12 @@ object ContinuousIntegration { if (vaultToken.value.isDirectory) { sys.error(s"""The vault token file "${vaultToken.value}" should not be a directory.""") } - val cmd = List( + + // Only include the local file argument if the file exists (local development w/ acquire_b2c_token.sh) + // Don't include it otherwise (CI/CD and other development) + val localEnvFileArgs = if(envFile.value.exists()) List("-e", s"ENV_FILE=${envFile.value}") else List() + + val cmd: List[String] = List.concat(List( "docker", "run", "--rm", @@ -35,7 +42,9 @@ object ContinuousIntegration { "-v", s"${srcCiResources.value}:${srcCiResources.value}", "-v", - s"${targetCiResources.value}:${targetCiResources.value}", + s"${targetCiResources.value}:${targetCiResources.value}"), + localEnvFileArgs, + List( "-e", "ENVIRONMENT=not_used", "-e", @@ -44,7 +53,7 @@ object ContinuousIntegration { s"OUT_PATH=${targetCiResources.value}", "broadinstitute/dsde-toolbox:dev", "render-templates.sh" - ) + )) val result = cmd ! log if (result != 0) { sys.error( @@ -70,6 +79,7 @@ object ContinuousIntegration { private val srcCiResources: SettingKey[File] = settingKey[File]("Source directory for CI resources") private val targetCiResources: SettingKey[File] = settingKey[File]("Target directory for CI resources") private val vaultToken: SettingKey[File] = settingKey[File]("File with the vault token") + private val envFile: SettingKey[File] = settingKey[File]("File with the environment variables needed to render CI resources.") /** * For "reasons" these projects are excluded from the root aggregation in build.sbt. diff --git a/runConfigurations/Repo template_ Cromwell server TES.run.xml b/runConfigurations/Repo template_ Cromwell server TES.run.xml index 32127027ecc..192b73735b8 100644 --- a/runConfigurations/Repo template_ Cromwell server TES.run.xml +++ b/runConfigurations/Repo template_ Cromwell server TES.run.xml @@ -19,4 +19,24 @@