From bac9b46d71258481af513039c8608fd957c1d898 Mon Sep 17 00:00:00 2001 From: canton-network-da Date: Fri, 11 Oct 2024 05:15:39 -0400 Subject: [PATCH] Update Splice from CCI (#62) Signed-off-by: DA Automation Co-authored-by: DA Automation --- .../UpdateHistorySanityCheckPlugin.scala | 364 +++++++++++++----- ...SynchronizerMigrationIntegrationTest.scala | 3 - .../tests/ScanTimeBasedIntegrationTest.scala | 12 +- .../integration/tests/SpliceTests.scala | 4 - ...dsCollectionTimeBasedIntegrationTest.scala | 4 + ...0CredentialsPreflightIntegrationTest.scala | 37 -- .../tests/auth/PreflightAuthUtil.scala | 8 +- .../PreflightIntegrationTestUtil.scala | 18 - ...unbookSvPreflightIntegrationTestBase.scala | 6 +- .../ValidatorPreflightIntegrationTest.scala | 4 +- .../com/daml/network/util/Auth0Util.scala | 62 ++- .../daml/network/util/TriggerTestUtil.scala | 32 +- .../components/DateWithDurationDisplay.tsx | 37 ++ .../src/components/votes/ActionView.tsx | 41 +- .../src/components/votes/VoteModalContent.tsx | 5 +- build-tools/npm-install.sh | 2 +- build.sbt | 7 - .../validator/{token.py => get-token.py} | 2 +- cluster/helm/cn-docs/templates/docs.yaml | 8 +- .../splice-cometbft/templates/deployment.yaml | 4 +- .../helm/splice-cometbft/values.schema.json | 3 + .../helm/splice-domain/templates/domain.yaml | 4 +- .../templates/mediator.yaml | 4 +- .../templates/sequencer.yaml | 4 +- .../templates/load-tester.yaml | 4 +- .../templates/participant.yaml | 4 +- .../splice-participant/values.schema.json | 3 + .../helm/splice-postgres/values.schema.json | 3 + cluster/helm/splice-scan/templates/scan.yaml | 8 +- cluster/helm/splice-scan/values.schema.json | 3 + .../templates/splitwell.yaml | 4 +- .../templates/splitwell-web-ui.yaml | 4 +- .../values.schema.json | 3 + .../splice-sv-node/templates/sv-web-ui.yaml | 4 +- cluster/helm/splice-sv-node/templates/sv.yaml | 4 +- .../helm/splice-sv-node/values.schema.json | 3 + .../templates/ans-web-ui.yaml | 4 +- .../splice-validator/templates/validator.yaml | 4 +- .../templates/wallet-web-ui.yaml | 4 +- .../helm/splice-validator/values.schema.json | 3 + nix/canton-sources.json | 4 +- nix/shell.nix | 4 + 42 files changed, 505 insertions(+), 240 deletions(-) delete mode 100644 apps/app/src/test/scala/com/daml/network/integration/tests/auth/Auth0CredentialsPreflightIntegrationTest.scala create mode 100644 apps/common/frontend/src/components/DateWithDurationDisplay.tsx rename cluster/compose/validator/{token.py => get-token.py} (91%) diff --git a/apps/app/src/test/scala/com/daml/network/integration/plugins/UpdateHistorySanityCheckPlugin.scala b/apps/app/src/test/scala/com/daml/network/integration/plugins/UpdateHistorySanityCheckPlugin.scala index e690de5f..fa7b2ad4 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/plugins/UpdateHistorySanityCheckPlugin.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/plugins/UpdateHistorySanityCheckPlugin.scala @@ -1,20 +1,29 @@ package com.daml.network.integration.plugins +import cats.data.Chain import com.daml.ledger.javaapi.data.Identifier import com.daml.network.config.ConfigTransforms.updateAllScanAppConfigs_ import com.daml.network.config.SpliceConfig import com.daml.network.console.ScanAppBackendReference import com.daml.network.environment.EnvironmentImpl +import com.daml.network.http.v0.definitions.TreeEvent.members as treeEventMembers +import com.daml.network.http.v0.definitions.{ + AcsResponse, + TreeEvent, + UpdateHistoryItem, + UpdateHistoryReassignment, +} import com.daml.network.http.v0.definitions.UpdateHistoryItem.members import com.daml.network.http.v0.definitions.UpdateHistoryReassignment.Event.members as reassignmentMembers import com.daml.network.integration.tests.SpliceTests.SpliceTestConsoleEnvironment import com.daml.network.scan.automation.AcsSnapshotTrigger -import com.daml.network.util.QualifiedName +import com.daml.network.util.{QualifiedName, TriggerTestUtil} import com.digitalasset.canton.ScalaFuturesWithPatience +import com.digitalasset.canton.data.CantonTimestamp import com.digitalasset.canton.integration.EnvironmentSetupPlugin import com.digitalasset.canton.logging.NamedLoggerFactory import com.digitalasset.canton.tracing.TraceContext -import org.scalatest.Inspectors +import org.scalatest.{Inspectors, LoneElement} import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Span} @@ -32,7 +41,6 @@ import scala.util.control.NonFatal * which won't cause an error in the script. */ class UpdateHistorySanityCheckPlugin( - scanName: String, ignoredRootCreates: Seq[Identifier], ignoredRootExercises: Seq[(Identifier, String)], protected val loggerFactory: NamedLoggerFactory, @@ -40,7 +48,8 @@ class UpdateHistorySanityCheckPlugin( with Matchers with Eventually with Inspectors - with ScalaFuturesWithPatience { + with ScalaFuturesWithPatience + with LoneElement { override def beforeEnvironmentCreated(config: SpliceConfig): SpliceConfig = { updateAllScanAppConfigs_(config => config.copy(enableForcedAcsSnapshots = true))( @@ -52,115 +61,290 @@ class UpdateHistorySanityCheckPlugin( config: SpliceConfig, environment: SpliceTestConsoleEnvironment, ): Unit = { - // TODO(#14270): actually, we should be able to run this against any scan app, not just SV1 - // Only SV1 will work. - // Also, it might not be initialized if the test uses `manualStart` and it wasn't ever started. - environment.scans.local.find(scan => scan.name == scanName && scan.is_initialized).foreach { - scan => - // prevent races with the trigger when taking the forced manual snapshot - scan.automation.trigger[AcsSnapshotTrigger].pause().futureValue - - val snapshotRecordTime = scan.forceAcsSnapshotNow() - - paginateHistory(scan, None) - - val readLines = mutable.Buffer[String]() - val errorProcessor = ProcessLogger(line => readLines.append(line)) - try { - scala.sys.process - .Process( - Seq( - "python", - "scripts/scan-txlog/scan_txlog.py", - scan.httpClientConfig.url.toString(), - "--loglevel", - "DEBUG", - "--scan-balance-assertions", - "--stop-at-record-time", - snapshotRecordTime.toInstant.toString, - "--compare-acs-with-snapshot", - snapshotRecordTime.toInstant.toString, - ) ++ ignoredRootCreates.flatMap { templateId => - Seq("--ignore-root-create", QualifiedName(templateId).toString) - } ++ ignoredRootExercises.flatMap { case (templateId, choice) => - Seq("--ignore-root-exercise", s"${QualifiedName(templateId).toString}:$choice") - } - ) - .!(errorProcessor) - } catch { - case NonFatal(ex) => - logger.error("Failed to run scan_txlog.py. Dumping output.", ex)(TraceContext.empty) - readLines.foreach(logger.error(_)(TraceContext.empty)) - throw new RuntimeException("scan_txlog.py failed.", ex) - } + TraceContext.withNewTraceContext { implicit tc => + // A scan might not be initialized if the test uses `manualStart` and it wasn't ever started. + val initializedScans = environment.scans.local.filter(scan => scan.is_initialized) - readLines.filter { log => - log.contains("ERROR:") || log.contains("WARNING:") - } should be(empty) - forExactly(1, readLines) { line => - line should include("Reached end of stream") + TriggerTestUtil + .setTriggersWithin( + triggersToPauseAtStart = initializedScans.map(scan => + // prevent races with the trigger when taking the forced manual snapshot + scan.automation.trigger[AcsSnapshotTrigger] + ), + triggersToResumeAtStart = Seq(), + ) { + // This flag should have the same value on all scans + if (initializedScans.exists(_.config.updateHistoryBackfillEnabled)) { + initializedScans.foreach(waitUntilBackfillingComplete) + compareHistories(initializedScans) + compareSnapshots(initializedScans) + initializedScans.foreach(checkScanTxLogScript) + } else { + // Just call the /updates endpoint, make sure whatever happened in the test doesn't blow it up, + // and that pagination works as intended. + // Without backfilling, history only works on the founding SV + initializedScans.filter(_.config.isFirstSv).foreach(checkScanTxLogScript) + } } - - scan.automation.trigger[AcsSnapshotTrigger].resume() } } - // Just call the /updates endpoint, make sure whatever happened in the test doesn't blow it up, - // and that pagination works as intended. @tailrec private def paginateHistory( scan: ScanAppBackendReference, after: Option[(Long, String)], - ): Unit = { - val result = scan.getUpdateHistory(10, after, false) + acc: Chain[UpdateHistoryItem], + ): Chain[UpdateHistoryItem] = { + val result = scan.getUpdateHistory(10, after, lossless = false) + val newAcc = acc ++ Chain.fromSeq(result) result.lastOption match { - case None => () // done + case None => acc // done case Some(members.UpdateHistoryTransaction(last)) => - paginateHistory(scan, Some((last.migrationId, last.recordTime))) + paginateHistory( + scan, + Some((last.migrationId, last.recordTime)), + newAcc, + ) case Some(members.UpdateHistoryReassignment(last)) => last.event match { case reassignmentMembers.UpdateHistoryAssignment(event) => - paginateHistory(scan, Some((event.migrationId, last.recordTime))) + paginateHistory( + scan, + Some((event.migrationId, last.recordTime)), + newAcc, + ) case reassignmentMembers.UpdateHistoryUnassignment(event) => - paginateHistory(scan, Some((event.migrationId, last.recordTime))) + paginateHistory( + scan, + Some((event.migrationId, last.recordTime)), + newAcc, + ) } } } - // TODO(#14270): use this before running the scan_txlog.py script against a joining SV - def waitUntilBackfillingComplete( - scan: ScanAppBackendReference + private def compareHistories( + scans: Seq[ScanAppBackendReference] ): Unit = { - // Backfilling is initialized by ScanHistoryBackfillingTrigger, which should take 1-2 trigger invocations - // to complete. - // Most integration tests use config transforms to reduce the polling interval to 1sec, - // but some tests might not use the transforms and end up with the default long polling interval. - val estimatedTimeUntilBackfillingComplete = - 2 * scan.config.automation.pollingInterval.underlying + 5.seconds - - if (estimatedTimeUntilBackfillingComplete > 30.seconds) { - logger.warn( - s"Scan ${scan.name} has a long polling interval of ${scan.config.automation.pollingInterval.underlying}. " + - "Please disable UpdateHistorySanityCheckPlugin for this test or reduce the polling interval to avoid long waits." - )(TraceContext.empty) + val (founders, others) = scans.partition(_.config.isFirstSv) + val founder = founders.loneElement + val founderHistory = paginateHistory(founder, None, Chain.empty).toVector + forAll(others) { otherScan => + val otherScanHistory = paginateHistory(otherScan, None, Chain.empty).toVector + // One of them might be more advanced than the other. + // That's fine, we mostly want to check that backfilling works as expected. + val minSize = Math.min(founderHistory.size, otherScanHistory.size) + val otherComparable = otherScanHistory + .take(minSize) + .map(toComparableUpdateHistoryItem) + val founderComparable = founderHistory + .take(minSize) + .map(toComparableUpdateHistoryItem) + val different = otherComparable.zipWithIndex.collect { + case (otherItem, idx) if founderComparable(idx) != otherItem => + otherItem -> founderComparable(idx) + } + different should be(empty) + } + } + + // TODO (#14270): this is the same, or at least similar, to what we'll need to do for BFT reads. Adjust and DRY. + private def toComparableUpdateHistoryItem(item: UpdateHistoryItem): UpdateHistoryItem = + item match { + case members.UpdateHistoryTransaction(tx) => + // makes it deterministically by traversing the tree + def makeEventIdToNumber( + pending: List[TreeEvent], + acc: Map[String, Int], + currentN: Int, + ): Map[String, Int] = { + pending match { + case Nil => + acc + case tree :: tail => + tree match { + case treeEventMembers.CreatedEvent(value) => + makeEventIdToNumber(tail, acc + (value.eventId -> currentN), currentN + 1) + case treeEventMembers.ExercisedEvent(value) => + makeEventIdToNumber( + tail ++ value.childEventIds.map(tx.eventsById), + acc + (value.eventId -> currentN), + currentN + 1, + ) + } + } + } + val eventIdsMapping = makeEventIdToNumber( + tx.rootEventIds.map(tx.eventsById).toList, + Map.empty, + 0, + ) + members.UpdateHistoryTransaction( + tx.copy( + offset = "different across nodes", + rootEventIds = tx.rootEventIds.map(eventIdsMapping(_).toString), + eventsById = tx.eventsById.map { case (eventId, tree) => + eventIdsMapping(eventId).toString -> (tree match { + case treeEventMembers.CreatedEvent(value) => + treeEventMembers.CreatedEvent( + value.copy(eventId = eventIdsMapping(value.eventId).toString) + ) + case treeEventMembers.ExercisedEvent(value) => + treeEventMembers.ExercisedEvent( + value.copy( + eventId = eventIdsMapping(value.eventId).toString, + childEventIds = value.childEventIds.map(eventIdsMapping(_).toString), + ) + ) + }) + }, + ) + ) + case members.UpdateHistoryReassignment(assignment) => + val newEvent: UpdateHistoryReassignment.Event = assignment.event match { + case reassignmentMembers.UpdateHistoryAssignment(value) => + reassignmentMembers.UpdateHistoryAssignment( + value.copy(createdEvent = value.createdEvent.copy(eventId = "different across nodes")) + ) + case unassignment: reassignmentMembers.UpdateHistoryUnassignment => + unassignment + } + members.UpdateHistoryReassignment( + assignment.copy(offset = "different across nodes", event = newEvent) + ) + } + + private def checkScanTxLogScript(scan: ScanAppBackendReference)(implicit tc: TraceContext) = { + val snapshotRecordTime = scan.forceAcsSnapshotNow() + + val readLines = mutable.Buffer[String]() + val errorProcessor = ProcessLogger(line => readLines.append(line)) + try { + scala.sys.process + .Process( + Seq( + "python", + "scripts/scan-txlog/scan_txlog.py", + scan.httpClientConfig.url.toString(), + "--loglevel", + "DEBUG", + "--scan-balance-assertions", + "--stop-at-record-time", + snapshotRecordTime.toInstant.toString, + "--compare-acs-with-snapshot", + snapshotRecordTime.toInstant.toString, + ) ++ ignoredRootCreates.flatMap { templateId => + Seq("--ignore-root-create", QualifiedName(templateId).toString) + } ++ ignoredRootExercises.flatMap { case (templateId, choice) => + Seq("--ignore-root-exercise", s"${QualifiedName(templateId).toString}:$choice") + } + ) + .!(errorProcessor) + } catch { + case NonFatal(ex) => + logger.error("Failed to run scan_txlog.py. Dumping output.", ex) + readLines.foreach(logger.error(_)) + throw new RuntimeException("scan_txlog.py failed.", ex) } - withClue(s"Waiting for backfilling to complete on ${scan.name}") { - val patienceConfigForBackfillingInit: PatienceConfig = - PatienceConfig( - timeout = estimatedTimeUntilBackfillingComplete, - interval = Span(100, Millis), + readLines.filter { log => + log.contains("ERROR:") || log.contains("WARNING:") + } should be(empty) + forExactly(1, readLines) { line => + line should include("Reached end of stream") + } + } + + private def waitUntilBackfillingComplete( + scan: ScanAppBackendReference + )(implicit tc: TraceContext): Unit = { + if (scan.config.updateHistoryBackfillEnabled) { + // Backfilling is initialized by ScanHistoryBackfillingTrigger, which should take 1-2 trigger invocations + // to complete. + // Most integration tests use config transforms to reduce the polling interval to 1sec, + // but some tests might not use the transforms and end up with the default long polling interval. + val estimatedTimeUntilBackfillingComplete = + 2 * scan.config.automation.pollingInterval.underlying + 5.seconds + + if (estimatedTimeUntilBackfillingComplete > 30.seconds) { + logger.warn( + s"Scan ${scan.name} has a long polling interval of ${scan.config.automation.pollingInterval.underlying}. " + + "Please disable UpdateHistorySanityCheckPlugin for this test or reduce the polling interval to avoid long waits." + ) + } + + withClue(s"Waiting for backfilling to complete on ${scan.name}") { + val patienceConfigForBackfillingInit: PatienceConfig = + PatienceConfig( + timeout = estimatedTimeUntilBackfillingComplete, + interval = Span(100, Millis), + ) + eventually { + scan.automation.store.updateHistory + .getBackfillingState() + .futureValue + .exists(_.complete) should be(true) + }( + patienceConfigForBackfillingInit, + implicitly[org.scalatest.enablers.Retrying[org.scalatest.Assertion]], + implicitly[org.scalactic.source.Position], + ) + } + } else { + logger.debug("Backfilling is disabled, skipping wait.") + } + } + + private def compareSnapshots(scans: Seq[ScanAppBackendReference]) = { + val (founders, others) = scans.partition(_.config.isFirstSv) + val founder = founders.loneElement + val founderSnapshots = getAllSnapshots(founder, CantonTimestamp.MaxValue, Nil) + forAll(others) { otherScan => + val otherScanSnapshots = getAllSnapshots(otherScan, CantonTimestamp.MaxValue, Nil) + // One of them might have more snapshots than the other. + val minSize = Math.min(founderSnapshots.size, otherScanSnapshots.size) + val otherComparable = otherScanSnapshots.take(minSize).map(toComparableSnapshot) + val founderComparable = founderSnapshots.take(minSize).map(toComparableSnapshot) + val different = otherComparable.zipWithIndex.collect { + case (otherItem, idx) if founderComparable(idx) != otherItem => + otherItem -> founderComparable(idx) + } + different should be(empty) + } + } + + private def toComparableSnapshot(acsResponse: AcsResponse) = { + acsResponse.copy(createdEvents = + acsResponse.createdEvents.map(_.copy(eventId = "different across nodes")) + ) + } + + private def getAllSnapshots( + scan: ScanAppBackendReference, + before: CantonTimestamp, + acc: List[AcsResponse], + ): List[AcsResponse] = { + val acsSnapshotPeriodHours = scan.config.acsSnapshotPeriodHours + val migrationId = scan.config.domainMigrationId + scan.getDateOfMostRecentSnapshotBefore(before, migrationId) match { + case Some(snapshotDate) => + val snapshot = scan + .getAcsSnapshotAt( + CantonTimestamp.assertFromInstant(snapshotDate.toInstant), + migrationId, + pageSize = 1000, + ) + .getOrElse(throw new IllegalStateException("Snapshot must exist by this point")) + getAllSnapshots( + scan, + CantonTimestamp.assertFromInstant( + // +1 second because it's < date, instead of <= date + snapshotDate.minusHours(acsSnapshotPeriodHours.toLong).plusSeconds(1L).toInstant + ), + snapshot :: acc, ) - eventually { - scan.automation.store.updateHistory - .getBackfillingState()(TraceContext.empty) - .futureValue - .exists(_.complete) should be(true) - }( - patienceConfigForBackfillingInit, - implicitly[org.scalatest.enablers.Retrying[org.scalatest.Assertion]], - implicitly[org.scalactic.source.Position], - ) + case None => + acc } } } diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/DecentralizedSynchronizerMigrationIntegrationTest.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/DecentralizedSynchronizerMigrationIntegrationTest.scala index 8fe14c9a..34a7b123 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/DecentralizedSynchronizerMigrationIntegrationTest.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/DecentralizedSynchronizerMigrationIntegrationTest.scala @@ -117,9 +117,6 @@ class DecentralizedSynchronizerMigrationIntegrationTest private val splitwellDarPath = "daml/splitwell/.daml/dist/splitwell-current.dar" - // We want the scan instance after the migration which contains both old and new data. - override def updateHistoryScanName = "sv1ScanLocal" - override def environmentDefinition : BaseEnvironmentDefinition[EnvironmentImpl, SpliceTestConsoleEnvironment] = EnvironmentDefinition diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/ScanTimeBasedIntegrationTest.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/ScanTimeBasedIntegrationTest.scala index 195e40a6..65855dca 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/ScanTimeBasedIntegrationTest.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/ScanTimeBasedIntegrationTest.scala @@ -442,12 +442,14 @@ class ScanTimeBasedIntegrationTest .ofHours(sv1ScanBackend.config.acsSnapshotPeriodHours.toLong) .plusSeconds(1L) ) - val snapshotAfter = sv1ScanBackend.getDateOfMostRecentSnapshotBefore( - getLedgerTime, - migrationId, - ) - eventually() { + + val snapshotAfter = eventually() { + val snapshotAfter = sv1ScanBackend.getDateOfMostRecentSnapshotBefore( + getLedgerTime, + migrationId, + ) snapshotBefore should not(be(snapshotAfter)) + snapshotAfter } val snapshotAfterData = sv1ScanBackend.getAcsSnapshotAt( diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/SpliceTests.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/SpliceTests.scala index f95de61d..2582a01b 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/SpliceTests.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/SpliceTests.scala @@ -114,14 +114,12 @@ object SpliceTests extends LazyLogging { protected lazy val resetRequiredTopologyState: Boolean = true protected def runUpdateHistorySanityCheck: Boolean = true - protected def updateHistoryScanName: String = "sv1Scan" protected lazy val updateHistoryIgnoredRootCreates: Seq[Identifier] = Seq.empty protected lazy val updateHistoryIgnoredRootExercises: Seq[(Identifier, String)] = Seq.empty if (runUpdateHistorySanityCheck) { registerPlugin( new UpdateHistorySanityCheckPlugin( - updateHistoryScanName, updateHistoryIgnoredRootCreates, updateHistoryIgnoredRootExercises, loggerFactory, @@ -150,14 +148,12 @@ object SpliceTests extends LazyLogging { with LedgerApiExtensions { protected def runUpdateHistorySanityCheck: Boolean = true - protected def updateHistoryScanName: String = "sv1Scan" protected lazy val updateHistoryIgnoredRootCreates: Seq[Identifier] = Seq.empty protected lazy val updateHistoryIgnoredRootExercises: Seq[(Identifier, String)] = Seq.empty if (runUpdateHistorySanityCheck) { registerPlugin( new UpdateHistorySanityCheckPlugin( - updateHistoryScanName, updateHistoryIgnoredRootCreates, updateHistoryIgnoredRootExercises, loggerFactory, diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/SvExpiredRewardsCollectionTimeBasedIntegrationTest.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/SvExpiredRewardsCollectionTimeBasedIntegrationTest.scala index 7e70c3c2..c6d4ef51 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/SvExpiredRewardsCollectionTimeBasedIntegrationTest.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/SvExpiredRewardsCollectionTimeBasedIntegrationTest.scala @@ -78,5 +78,9 @@ class SvExpiredRewardsCollectionTimeBasedIntegrationTest ) }, ) + + // it seems that without this, the round-party-totals aggregations cannot be computed for SV-2, + // and the scan-txlog script fails because it expects those to be there. + advanceRoundsByOneTick } } diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/auth/Auth0CredentialsPreflightIntegrationTest.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/auth/Auth0CredentialsPreflightIntegrationTest.scala deleted file mode 100644 index 2d1a7961..00000000 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/auth/Auth0CredentialsPreflightIntegrationTest.scala +++ /dev/null @@ -1,37 +0,0 @@ -package com.daml.network.integration.tests.auth - -import com.daml.network.environment.EnvironmentImpl -import com.daml.network.integration.EnvironmentDefinition -import com.daml.network.integration.tests.SpliceTests.{ - IntegrationTestWithSharedEnvironment, - SpliceTestConsoleEnvironment, -} -import com.daml.network.integration.tests.runbook.PreflightIntegrationTestUtil -import com.digitalasset.canton.integration.BaseEnvironmentDefinition - -class Auth0CredentialsPreflightIntegrationTest - extends IntegrationTestWithSharedEnvironment - with PreflightIntegrationTestUtil - with PreflightAuthUtil { - - override lazy val resetRequiredTopologyState: Boolean = false - - override def environmentDefinition - : BaseEnvironmentDefinition[EnvironmentImpl, SpliceTestConsoleEnvironment] = - EnvironmentDefinition.preflightTopology( - this.getClass.getSimpleName - ) - - // Refreshes the auth0-preflight-token-cache secret on k8s if needed. This secret holds auth0 tokens for backends - // as needed for preflight tests. Note that currently, this function refreshes only tokens for those clients - // listed in PreflightAuthUtil.clientIds, while other preflight tests also create/refresh tokens for other - // clients. - // TODO(#10352) consider merging this secret with the FIXED_TOKENS cache created in Pulumi. - "Refresh auth0 credentials" in { _ => - clientIds.foreach { case (_, id) => - getAuth0ClientCredential(id, sys.env("OIDC_AUTHORITY_VALIDATOR_AUDIENCE"), auth0)( - noTracingLogger - ) - } - } -} diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/auth/PreflightAuthUtil.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/auth/PreflightAuthUtil.scala index 82d53f7a..6237d70c 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/auth/PreflightAuthUtil.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/auth/PreflightAuthUtil.scala @@ -2,8 +2,9 @@ package com.daml.network.integration.tests.auth import com.daml.network.console.SvAppClientReference import com.daml.network.environment.EnvironmentImpl -import com.daml.network.integration.tests.SpliceTests.{TestCommon, SpliceTestConsoleEnvironment} +import com.daml.network.integration.tests.SpliceTests.{SpliceTestConsoleEnvironment, TestCommon} import com.daml.network.integration.tests.runbook.PreflightIntegrationTestUtil +import com.daml.network.util.Auth0Util import com.digitalasset.canton.integration.BaseIntegrationTest trait PreflightAuthUtil extends PreflightIntegrationTestUtil { @@ -24,16 +25,15 @@ trait PreflightAuthUtil extends PreflightIntegrationTestUtil { "splitwell_validator" -> sys.env("SPLICE_OAUTH_DEV_CLIENT_ID_SPLITWELL_VALIDATOR"), ) - lazy val auth0 = auth0UtilFromEnvVars("dev") protected def svClientWithToken( name: String )(implicit env: SpliceTestConsoleEnvironment): SvAppClientReference = { val clientId = clientIds.get(name).value val token = eventuallySucceeds() { - getAuth0ClientCredential( + Auth0Util.getAuth0ClientCredential( clientId, sys.env("OIDC_AUTHORITY_SV_AUDIENCE"), - auth0, + sys.env("SPLICE_OAUTH_DEV_AUTHORITY"), )(noTracingLogger) } diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/PreflightIntegrationTestUtil.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/PreflightIntegrationTestUtil.scala index dd10b3a7..8d4f3c79 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/PreflightIntegrationTestUtil.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/PreflightIntegrationTestUtil.scala @@ -1,9 +1,6 @@ package com.daml.network.integration.tests.runbook -import com.daml.network.util.{Auth0Util, K8sUtil} import com.daml.network.integration.tests.SpliceTests.TestCommon -import com.typesafe.scalalogging.Logger - import scala.concurrent.duration.* trait PreflightIntegrationTestUtil extends TestCommon { @@ -34,19 +31,4 @@ trait PreflightIntegrationTestUtil extends TestCommon { )(check: String, checkFun: T => U): (T, U) = super.actAndCheck(timeUntilSuccess, maxPollInterval)(action, actionExpr)(check, checkFun) - protected def getAuth0ClientCredential( - clientId: String, - audience: String, - auth0: Auth0Util, - )(implicit logger: Logger): String = { - // lookup token from a cached k8s secret, or request a new one from auth0 if not found or expired - val cachedToken = - K8sUtil.PreflightTokenAccessor.getPreflightToken(clientId) - - cachedToken.getOrElse { - val token = auth0.getToken(clientId, audience) - K8sUtil.PreflightTokenAccessor.savePreflightToken(clientId, token) - token - }.accessToken - } } diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/RunbookSvPreflightIntegrationTestBase.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/RunbookSvPreflightIntegrationTestBase.scala index 9b77ffb8..c13186c1 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/RunbookSvPreflightIntegrationTestBase.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/RunbookSvPreflightIntegrationTestBase.scala @@ -6,6 +6,7 @@ import com.daml.network.integration.tests.SpliceTests.SpliceTestConsoleEnvironme import com.daml.network.integration.tests.FrontendIntegrationTestWithSharedEnvironment import com.daml.network.util.{ AnsFrontendTestUtil, + Auth0Util, FrontendLoginUtil, SvTestUtil, WalletFrontendTestUtil, @@ -281,12 +282,11 @@ abstract class RunbookSvPreflightIntegrationTestBase } "Key API endpoints are reachable and functional" in { implicit env => - val auth0 = auth0UtilFromEnvVars("sv") val token = eventuallySucceeds() { - getAuth0ClientCredential( + Auth0Util.getAuth0ClientCredential( sys.env("SPLICE_OAUTH_SV_TEST_CLIENT_ID_VALIDATOR"), "https://validator.example.com/api", - auth0, + sys.env("SPLICE_OAUTH_SV_TEST_AUTHORITY"), )(noTracingLogger) } val svValidatorClient = vc("svTestValidator").copy(token = Some(token)) diff --git a/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/ValidatorPreflightIntegrationTest.scala b/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/ValidatorPreflightIntegrationTest.scala index 9ee132d4..c4a02b9e 100644 --- a/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/ValidatorPreflightIntegrationTest.scala +++ b/apps/app/src/test/scala/com/daml/network/integration/tests/runbook/ValidatorPreflightIntegrationTest.scala @@ -84,10 +84,10 @@ abstract class ValidatorPreflightIntegrationTestBase val env = provideEnvironment("NotUsed") // retry on e.g. network errors and rate limits val token = eventuallySucceeds() { - getAuth0ClientCredential( + Auth0Util.getAuth0ClientCredential( validatorAuth0Secret, validatorAuth0Audience, - auth0, + auth0.domain, )(noTracingLogger) } diff --git a/apps/app/src/test/scala/com/daml/network/util/Auth0Util.scala b/apps/app/src/test/scala/com/daml/network/util/Auth0Util.scala index 05b20882..9b5f0d45 100644 --- a/apps/app/src/test/scala/com/daml/network/util/Auth0Util.scala +++ b/apps/app/src/test/scala/com/daml/network/util/Auth0Util.scala @@ -6,12 +6,14 @@ import com.auth0.client.mgmt.filter.UserFilter import com.auth0.json.mgmt.users.User import scala.jdk.CollectionConverters.* -import com.daml.network.auth.OAuthApi.TokenResponse -import com.daml.network.auth.AuthToken import com.digitalasset.canton.concurrent.Threading import com.digitalasset.canton.logging.{NamedLoggerFactory, NamedLogging} import com.digitalasset.canton.tracing.TraceContext +import com.typesafe.scalalogging.Logger +import scala.collection.mutable +import scala.util.control.NonFatal + class Auth0User(val id: String, val email: String, val password: String, val auth0: Auth0Util) extends AutoCloseable { override def close(): Unit = auth0.deleteUser(id) @@ -23,7 +25,7 @@ case class Auth0UserPage( ) class Auth0Util( - domain: String, + val domain: String, managementApiClientId: String, managementApiClientSecret: String, override val loggerFactory: NamedLoggerFactory, @@ -59,15 +61,6 @@ class Auth0Util( ) } - def getToken(clientId: String, audience: String): AuthToken = { - val client = executeManagementApiRequest(api.clients().get(clientId)) - val clientSecret = client.getClientSecret() - val appApi = new AuthAPI(domain, clientId, clientSecret) - val response = appApi.requestToken(audience).execute() - - AuthToken(TokenResponse(response.getAccessToken(), response.getExpiresIn())) - } - private def requestManagementAPIToken(): String = { auth.requestToken(s"${domain}/api/v2/").execute().getAccessToken() } @@ -80,4 +73,49 @@ class Auth0Util( Threading.sleep(500) req.execute() } + +} + +object Auth0Util { + def getAuth0ClientCredential( + clientId: String, + audience: String, + auth0Domain: String, + )(implicit logger: Logger): String = { + + val outLines = mutable.Buffer[String]() + val errLines = mutable.Buffer[String]() + val outProcessor = + scala.sys.process.ProcessLogger(line => outLines.append(line), line => errLines.append(line)) + try { + val ret = scala.sys.process + .Process( + Seq( + "build-tools/get-auth0-token.py", + "--client-id", + clientId, + "--audience", + audience, + "--auth0-domain", + auth0Domain, + ) + ) + .!(outProcessor) + if (ret != 0) { + // Stderr contains all actual output, stdout should consist of only the token if successful + logger.error("Failed to run get-auth0-token.py. Dumping output.") + errLines.foreach(logger.error(_)) + throw new RuntimeException("get-auth0-token.py failed") + } + } catch { + case NonFatal(ex) => + // Stderr contains all actual output, stdout should consist of only the token if successful + logger.error("Failed to run get-auth0-token.py. Dumping output.", ex) + errLines.foreach(logger.error(_)) + throw new RuntimeException("get-auth0-token.py failed.", ex) + } + + outLines.head + } + } diff --git a/apps/app/src/test/scala/com/daml/network/util/TriggerTestUtil.scala b/apps/app/src/test/scala/com/daml/network/util/TriggerTestUtil.scala index 4deeefc4..651181cf 100644 --- a/apps/app/src/test/scala/com/daml/network/util/TriggerTestUtil.scala +++ b/apps/app/src/test/scala/com/daml/network/util/TriggerTestUtil.scala @@ -4,7 +4,7 @@ import com.daml.network.automation.Trigger import com.daml.network.integration.EnvironmentDefinition.sv1Backend import com.daml.network.integration.tests.SpliceTests.SpliceTestConsoleEnvironment import com.daml.network.sv.automation.delegatebased.AdvanceOpenMiningRoundTrigger -import com.digitalasset.canton.BaseTest +import com.digitalasset.canton.{BaseTest, ScalaFuturesWithPatience} import scala.concurrent.duration.FiniteDuration @@ -15,15 +15,8 @@ trait TriggerTestUtil { self: BaseTest => def setTriggersWithin[T]( triggersToPauseAtStart: Seq[Trigger] = Seq.empty, triggersToResumeAtStart: Seq[Trigger] = Seq.empty, - )(codeBlock: => T) = { - try { - triggersToPauseAtStart.foreach(_.pause().futureValue) - triggersToResumeAtStart.foreach(_.resume()) - codeBlock - } finally { - triggersToPauseAtStart.foreach(_.resume()) - triggersToResumeAtStart.foreach(_.pause().futureValue) - } + )(codeBlock: => T): T = { + TriggerTestUtil.setTriggersWithin(triggersToPauseAtStart, triggersToResumeAtStart)(codeBlock) } // The trigger that advances rounds, running in the sv app @@ -40,3 +33,22 @@ trait TriggerTestUtil { self: BaseTest => } } } + +object TriggerTestUtil extends ScalaFuturesWithPatience { + + /** Enable/Disable triggers before executing a code block + */ + def setTriggersWithin[T]( + triggersToPauseAtStart: Seq[Trigger] = Seq.empty, + triggersToResumeAtStart: Seq[Trigger] = Seq.empty, + )(codeBlock: => T): T = { + try { + triggersToPauseAtStart.foreach(_.pause().futureValue) + triggersToResumeAtStart.foreach(_.resume()) + codeBlock + } finally { + triggersToPauseAtStart.foreach(_.resume()) + triggersToResumeAtStart.foreach(_.pause().futureValue) + } + } +} diff --git a/apps/common/frontend/src/components/DateWithDurationDisplay.tsx b/apps/common/frontend/src/components/DateWithDurationDisplay.tsx new file mode 100644 index 00000000..e4c79109 --- /dev/null +++ b/apps/common/frontend/src/components/DateWithDurationDisplay.tsx @@ -0,0 +1,37 @@ +// Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 +import { format } from 'date-fns'; +import dayjs from 'dayjs'; +import relativeTime from 'dayjs/plugin/relativeTime'; +import React from 'react'; + +dayjs.extend(relativeTime); + +interface DateWithDurationDisplayProps { + datetime: string | Date | undefined; + format?: string; + enableDuration?: boolean; +} + +const DateWithDurationDisplay: React.FC = ( + props: DateWithDurationDisplayProps +) => { + const { datetime, format: customFormat } = props; + + if (!datetime || datetime === 'initial') { + return <>initial; + } + + const f = customFormat || 'PPp O'; // Use custom format if provided, or default to 'PPp O' + const dateObj = typeof datetime === 'string' ? new Date(datetime) : datetime; + + const expireDuration = props.enableDuration ? `(${dayjs(dateObj).fromNow()})` : ''; + + return ( + <> + {format(dateObj, f)} {expireDuration} + + ); +}; + +export default DateWithDurationDisplay; diff --git a/apps/common/frontend/src/components/votes/ActionView.tsx b/apps/common/frontend/src/components/votes/ActionView.tsx index 052b5c8d..b27ced77 100644 --- a/apps/common/frontend/src/components/votes/ActionView.tsx +++ b/apps/common/frontend/src/components/votes/ActionView.tsx @@ -3,7 +3,6 @@ import { QueryObserverSuccessResult } from '@tanstack/react-query'; import { BaseVotesHooks, - DateDisplay, getAmuletConfigurationAsOfNow, Loading, PartyId, @@ -40,6 +39,7 @@ import { import { Time } from '@daml/types'; import AccordionList, { AccordionListProps } from '../AccordionList'; +import DateWithDurationDisplay from '../DateWithDurationDisplay'; import { DsoInfo } from '../Dso'; import { PrettyJsonDiff } from '../PrettyJsonDiff'; import { getAction } from './ListVoteRequests'; @@ -436,13 +436,16 @@ const AddFutureConfigValueTable: React.FC<{ actionName={amuletRulesAction.tag} valuesMap={{ 'Effective Time': ( - + ), }} accordionList={{ unfoldedAccordions: [ { - title: , + title: , content: ( ({ - title: , + title: , content: ( , + Time: , 'Comparing against config from': ( - + ), ScheduleItem: ( , + 'Effective Time': ( + + ), }} accordionList={{ unfoldedAccordions: [ { - title: , + title: , content: ( ({ - title: , + title: , content: ( { - if (props.datetime && (props.datetime instanceof Date || props.datetime !== 'initial')) { - return ; - } else { - return <>initial; - } -}; - const SetConfigValueTable: React.FC<{ votesHooks: BaseVotesHooks; dsoInfosQuery: QueryObserverSuccessResult; @@ -672,13 +672,10 @@ const SetConfigValueTable: React.FC<{ , - }} accordionList={{ unfoldedAccordions: [ { - title: , + title: , content: ( ({ - title: , + title: , content: , })), }} diff --git a/apps/common/frontend/src/components/votes/VoteModalContent.tsx b/apps/common/frontend/src/components/votes/VoteModalContent.tsx index 1b9a8de2..07c7d5a9 100644 --- a/apps/common/frontend/src/components/votes/VoteModalContent.tsx +++ b/apps/common/frontend/src/components/votes/VoteModalContent.tsx @@ -1,7 +1,7 @@ // Copyright (c) 2024 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 import { useVotesHooks } from 'common-frontend'; -import { CopyableTypography, DateDisplay, PartyId, SvVote } from 'common-frontend'; +import { CopyableTypography, PartyId, SvVote } from 'common-frontend'; import React, { ReactElement, useCallback } from 'react'; import CheckIcon from '@mui/icons-material/Check'; @@ -26,6 +26,7 @@ import { import { ContractId, Party } from '@daml/types'; import { Reason } from '../../models'; +import DateWithDurationDisplay from '../DateWithDurationDisplay'; import ActionView from './ActionView'; import { VoteRequestResultTableType } from './VoteResultsFilterTable'; @@ -128,7 +129,7 @@ const VoteModalContent: React.FC = ({ Expires At - + diff --git a/build-tools/npm-install.sh b/build-tools/npm-install.sh index 64ee2471..b625885a 100755 --- a/build-tools/npm-install.sh +++ b/build-tools/npm-install.sh @@ -7,5 +7,5 @@ if [ -z "${CI}" ]; then npm install else # shellcheck disable=SC2015 - for _ in {1..5}; do npm ci && break || sleep 15; done + for _ in {1..5}; do npm ci && break || npm cache verify && sleep 15; done fi diff --git a/build.sbt b/build.sbt index 41a249d4..5f32ef03 100644 --- a/build.sbt +++ b/build.sbt @@ -1424,8 +1424,6 @@ printTests := { name ) && name.contains("SvReOnboard") def isDamlCiupgradeVote(name: String): Boolean = name contains "DamlCIUpgradeVote" - def isAuth0CredentialsPreflightIntegrationTest(name: String): Boolean = - isPreflightIntegrationTest(name) && name.contains("Auth0Credentials") def isDockerComposeValidatorPreflightIntegrationTest(name: String): Boolean = isPreflightIntegrationTest(name) && name.contains("DockerComposeValidator") @@ -1471,11 +1469,6 @@ printTests := { "test-full-class-names-re-onboard-sv-runbook-preflight.log", (t: String) => isSvReOnboardPreflightIntegrationTest(t), ), - ( - "Fetch UI credentials from Auth0 and store them in a k8s secret", - "test-full-class-names-auth0-credentials-preflight.log", - (t: String) => isAuth0CredentialsPreflightIntegrationTest(t), - ), ( "Docker Compose validator preflight test", "test-full-class-names-docker-compose-validator-preflight.log", diff --git a/cluster/compose/validator/token.py b/cluster/compose/validator/get-token.py similarity index 91% rename from cluster/compose/validator/token.py rename to cluster/compose/validator/get-token.py index 7c21cb5d..f1028eb0 100755 --- a/cluster/compose/validator/token.py +++ b/cluster/compose/validator/get-token.py @@ -9,7 +9,7 @@ import time if (len(sys.argv) != 2): - print("Usage: token.py ") + print("Usage: get-token.py ") sys.exit(1) username = sys.argv[1] diff --git a/cluster/helm/cn-docs/templates/docs.yaml b/cluster/helm/cn-docs/templates/docs.yaml index 77c00276..aae21803 100644 --- a/cluster/helm/cn-docs/templates/docs.yaml +++ b/cluster/helm/cn-docs/templates/docs.yaml @@ -24,7 +24,9 @@ spec: containers: - name: docs image: "{{ .Values.imageRepo }}/docs:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).docs) }}" - imagePullPolicy: "Always" + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - name: http containerPort: 80 @@ -87,7 +89,9 @@ spec: containers: - name: gcs-proxy image: "{{ .Values.imageRepo }}/gcs-proxy:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).gcs_proxy) }}" - imagePullPolicy: "Always" + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - name: http containerPort: 8080 diff --git a/cluster/helm/splice-cometbft/templates/deployment.yaml b/cluster/helm/splice-cometbft/templates/deployment.yaml index 4289c570..1a2bbcb4 100644 --- a/cluster/helm/splice-cometbft/templates/deployment.yaml +++ b/cluster/helm/splice-cometbft/templates/deployment.yaml @@ -45,7 +45,9 @@ spec: containers: - name: "cometbft" image: {{ $.Values.imageRepo }}/cometbft:{{ $.Chart.AppVersion }}{{ (($.Values.imageDigests).cometbft) }} - imagePullPolicy: Always + {{- with $.Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - name: p2p containerPort: {{ .p2pPort }} diff --git a/cluster/helm/splice-cometbft/values.schema.json b/cluster/helm/splice-cometbft/values.schema.json index 4fd1c6f4..7279844a 100644 --- a/cluster/helm/splice-cometbft/values.schema.json +++ b/cluster/helm/splice-cometbft/values.schema.json @@ -8,6 +8,9 @@ "chainId" ], "properties": { + "imagePullPolicy": { + "type": "string" + }, "chainId": { "type": "string" }, diff --git a/cluster/helm/splice-domain/templates/domain.yaml b/cluster/helm/splice-domain/templates/domain.yaml index 76d6e617..0cdf32e2 100644 --- a/cluster/helm/splice-domain/templates/domain.yaml +++ b/cluster/helm/splice-domain/templates/domain.yaml @@ -43,7 +43,9 @@ spec: - name: CANTON_MEDIATOR_POSTGRES_DB value: {{ .Values.mediator.persistence.databaseName }} {{- end }} - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - containerPort: 5008 name: cd-pub-api diff --git a/cluster/helm/splice-global-domain/templates/mediator.yaml b/cluster/helm/splice-global-domain/templates/mediator.yaml index 0d2ef3c6..b50423f7 100644 --- a/cluster/helm/splice-global-domain/templates/mediator.yaml +++ b/cluster/helm/splice-global-domain/templates/mediator.yaml @@ -48,7 +48,9 @@ spec: {{- end }} {{- include "splice-util-lib.additional-env-vars" .Values.additionalEnvVars | indent 8}} {{- include "splice-util-lib.log-level" .Values | indent 8}} - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - containerPort: 5007 name: cm-adm-api diff --git a/cluster/helm/splice-global-domain/templates/sequencer.yaml b/cluster/helm/splice-global-domain/templates/sequencer.yaml index 80f5ef53..961cff9d 100644 --- a/cluster/helm/splice-global-domain/templates/sequencer.yaml +++ b/cluster/helm/splice-global-domain/templates/sequencer.yaml @@ -83,7 +83,9 @@ spec: {{- end }} {{- include "splice-util-lib.additional-env-vars" .Values.additionalEnvVars | indent 12}} {{- include "splice-util-lib.log-level" .Values | indent 12}} - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - containerPort: 5008 name: cs-pub-api diff --git a/cluster/helm/splice-load-tester/templates/load-tester.yaml b/cluster/helm/splice-load-tester/templates/load-tester.yaml index 975fcc43..d3245427 100644 --- a/cluster/helm/splice-load-tester/templates/load-tester.yaml +++ b/cluster/helm/splice-load-tester/templates/load-tester.yaml @@ -23,7 +23,9 @@ spec: containers: - name: load-tester image: "{{ .Values.imageRepo }}/load-tester:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).load_tester) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: EXTERNAL_CONFIG value: {{ .Values.config | quote }} diff --git a/cluster/helm/splice-participant/templates/participant.yaml b/cluster/helm/splice-participant/templates/participant.yaml index 059bfad8..ae22477a 100644 --- a/cluster/helm/splice-participant/templates/participant.yaml +++ b/cluster/helm/splice-participant/templates/participant.yaml @@ -29,7 +29,9 @@ spec: containers: - name: participant image: "{{ .Values.imageRepo }}/canton-participant:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).canton_participant) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: JAVA_TOOL_OPTIONS value: {{ .Values.defaultJvmOptions }} {{ .Values.additionalJvmOptions }} diff --git a/cluster/helm/splice-participant/values.schema.json b/cluster/helm/splice-participant/values.schema.json index 8668c85c..db18720f 100644 --- a/cluster/helm/splice-participant/values.schema.json +++ b/cluster/helm/splice-participant/values.schema.json @@ -15,6 +15,9 @@ "imageDigests": { "type": "object" }, + "imagePullPolicy": { + "type": "string" + }, "defaultJvmOptions": { "type": "string" }, diff --git a/cluster/helm/splice-postgres/values.schema.json b/cluster/helm/splice-postgres/values.schema.json index 11d7d3d1..fe76b7c3 100644 --- a/cluster/helm/splice-postgres/values.schema.json +++ b/cluster/helm/splice-postgres/values.schema.json @@ -6,6 +6,9 @@ "imageRepo": { "type": "string" }, + "imagePullPolicy": { + "type": "string" + }, "resources": { "type": "object", "properties": { diff --git a/cluster/helm/splice-scan/templates/scan.yaml b/cluster/helm/splice-scan/templates/scan.yaml index 362c766e..4cb4c347 100644 --- a/cluster/helm/splice-scan/templates/scan.yaml +++ b/cluster/helm/splice-scan/templates/scan.yaml @@ -86,7 +86,9 @@ spec: value: {{ .Values.spliceInstanceNames.nameServiceNameAcronym | quote }} {{- include "splice-util-lib.additional-env-vars" .Values.additionalEnvVars | indent 8}} {{- include "splice-util-lib.log-level" .Values | indent 8}} - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} ports: - containerPort: 5012 name: scan-api @@ -202,7 +204,9 @@ spec: - name: SPLICE_APP_UI_NAME_SERVICE_NAME_ACRONYM value: {{ .Values.spliceInstanceNames.nameServiceNameAcronym | quote }} image: "{{ .Values.imageRepo }}/scan-web-ui:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).scan_web_ui) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} name: scan-web-ui ports: - containerPort: 80 diff --git a/cluster/helm/splice-scan/values.schema.json b/cluster/helm/splice-scan/values.schema.json index 331b1fcb..573fae29 100644 --- a/cluster/helm/splice-scan/values.schema.json +++ b/cluster/helm/splice-scan/values.schema.json @@ -9,6 +9,9 @@ "imageDigests": { "type": "object" }, + "imagePullPolicy": { + "type": "string" + }, "defaultJvmOptions": { "type": "string" }, diff --git a/cluster/helm/splice-splitwell-app/templates/splitwell.yaml b/cluster/helm/splice-splitwell-app/templates/splitwell.yaml index d2f1bf0d..e88b7733 100644 --- a/cluster/helm/splice-splitwell-app/templates/splitwell.yaml +++ b/cluster/helm/splice-splitwell-app/templates/splitwell.yaml @@ -24,7 +24,9 @@ spec: containers: - name: splitwell-app image: "{{ .Values.imageRepo }}/splitwell-app:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).splitwell_app) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: JAVA_TOOL_OPTIONS value: {{ .Values.defaultJvmOptions }} {{ .Values.additionalJvmOptions }} diff --git a/cluster/helm/splice-splitwell-web-ui/templates/splitwell-web-ui.yaml b/cluster/helm/splice-splitwell-web-ui/templates/splitwell-web-ui.yaml index ccf766be..3da630cd 100644 --- a/cluster/helm/splice-splitwell-web-ui/templates/splitwell-web-ui.yaml +++ b/cluster/helm/splice-splitwell-web-ui/templates/splitwell-web-ui.yaml @@ -23,7 +23,9 @@ spec: containers: - name: splitwell-web-ui image: "{{ .Values.imageRepo }}/splitwell-web-ui:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).splitwell_web_ui) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: SPLICE_APP_UI_AUTH_CLIENT_ID valueFrom: diff --git a/cluster/helm/splice-splitwell-web-ui/values.schema.json b/cluster/helm/splice-splitwell-web-ui/values.schema.json index 9f6d9500..eb4e537b 100644 --- a/cluster/helm/splice-splitwell-web-ui/values.schema.json +++ b/cluster/helm/splice-splitwell-web-ui/values.schema.json @@ -15,6 +15,9 @@ { "required": ["spliceInstanceNames", "auth"], "properties": { + "imagePullPolicy": { + "type": "string" + }, "auth": { "type": "object", "properties": { diff --git a/cluster/helm/splice-sv-node/templates/sv-web-ui.yaml b/cluster/helm/splice-sv-node/templates/sv-web-ui.yaml index ebd273c4..eb388b8d 100644 --- a/cluster/helm/splice-sv-node/templates/sv-web-ui.yaml +++ b/cluster/helm/splice-sv-node/templates/sv-web-ui.yaml @@ -54,7 +54,9 @@ spec: - name: SPLICE_APP_UI_NAME_SERVICE_NAME_ACRONYM value: {{ .Values.spliceInstanceNames.nameServiceNameAcronym | quote }} image: "{{ .Values.imageRepo }}/sv-web-ui:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).sv_web_ui) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} name: sv-web-ui ports: - containerPort: 80 diff --git a/cluster/helm/splice-sv-node/templates/sv.yaml b/cluster/helm/splice-sv-node/templates/sv.yaml index d054026b..c34acfe0 100644 --- a/cluster/helm/splice-sv-node/templates/sv.yaml +++ b/cluster/helm/splice-sv-node/templates/sv.yaml @@ -29,7 +29,9 @@ spec: containers: - name: sv-app image: "{{ .Values.imageRepo }}/sv-app:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).sv_app) }}" - imagePullPolicy: "Always" + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: JAVA_TOOL_OPTIONS value: {{ .Values.defaultJvmOptions }} {{ .Values.additionalJvmOptions }} diff --git a/cluster/helm/splice-sv-node/values.schema.json b/cluster/helm/splice-sv-node/values.schema.json index da5c16e2..c6bed108 100644 --- a/cluster/helm/splice-sv-node/values.schema.json +++ b/cluster/helm/splice-sv-node/values.schema.json @@ -15,6 +15,9 @@ { "required": ["auth", "spliceInstanceNames"], "properties": { + "imagePullPolicy": { + "type": "string" + }, "auth": { "type": "object", "properties": { diff --git a/cluster/helm/splice-validator/templates/ans-web-ui.yaml b/cluster/helm/splice-validator/templates/ans-web-ui.yaml index 5f628267..c54213db 100644 --- a/cluster/helm/splice-validator/templates/ans-web-ui.yaml +++ b/cluster/helm/splice-validator/templates/ans-web-ui.yaml @@ -28,7 +28,9 @@ spec: containers: - name: ans-web-ui image: "{{ .Values.imageRepo }}/ans-web-ui:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).ans_web_ui) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: SPLICE_APP_UI_AUTH_CLIENT_ID valueFrom: diff --git a/cluster/helm/splice-validator/templates/validator.yaml b/cluster/helm/splice-validator/templates/validator.yaml index c03042ee..bdef27dd 100644 --- a/cluster/helm/splice-validator/templates/validator.yaml +++ b/cluster/helm/splice-validator/templates/validator.yaml @@ -29,7 +29,9 @@ spec: containers: - name: validator-app image: "{{ .Values.imageRepo }}/validator-app:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).validator_app) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} env: - name: JAVA_TOOL_OPTIONS value: {{ .Values.defaultJvmOptions }} {{ .Values.additionalJvmOptions }} diff --git a/cluster/helm/splice-validator/templates/wallet-web-ui.yaml b/cluster/helm/splice-validator/templates/wallet-web-ui.yaml index 4c2d08fb..b7a1efda 100644 --- a/cluster/helm/splice-validator/templates/wallet-web-ui.yaml +++ b/cluster/helm/splice-validator/templates/wallet-web-ui.yaml @@ -54,7 +54,9 @@ spec: - name: SPLICE_APP_UI_NAME_SERVICE_NAME_ACRONYM value: {{ .Values.spliceInstanceNames.nameServiceNameAcronym | quote }} image: "{{ .Values.imageRepo }}/wallet-web-ui:{{ .Chart.AppVersion }}{{ ((.Values.imageDigests).wallet_web_ui) }}" - imagePullPolicy: Always + {{- with .Values.imagePullPolicy }} + imagePullPolicy: {{ . }} + {{- end }} name: wallet-web-ui ports: - containerPort: 80 diff --git a/cluster/helm/splice-validator/values.schema.json b/cluster/helm/splice-validator/values.schema.json index 4ae2330d..b287a338 100644 --- a/cluster/helm/splice-validator/values.schema.json +++ b/cluster/helm/splice-validator/values.schema.json @@ -2,6 +2,9 @@ "$schema": "http://json-schema.org/schema#", "type": "object", "properties": { + "imagePullPolicy": { + "type": "string" + }, "auth": { "type": "object", "description": "The authentication configuration for the application" diff --git a/nix/canton-sources.json b/nix/canton-sources.json index f8e7eb60..e47b4bdf 100644 --- a/nix/canton-sources.json +++ b/nix/canton-sources.json @@ -1,5 +1,5 @@ { - "version": "3.1.0-snapshot.20241003.13782.0.v74d2c1e2", + "version": "3.1.0-snapshot.20241008.13784.0.v91ce1f64", "tooling_sdk_version": "3.1.0-snapshot.20240717.13187.0.va47ab77f", - "sha256": "sha256:12rri5b1rg29nrdkwwb4v48bckr3hlml2y10bfx3mmca3b6lrlfs" + "sha256": "sha256:0aq0aljq2w751v8lqqhx84w7m5h8za56687sqkpnbf898p9h5dzp" } diff --git a/nix/shell.nix b/nix/shell.nix index db05f7d6..357267e9 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -17,6 +17,7 @@ in pkgs.mkShell { istioctl ammonite auth0-cli + bc cabal2nix canton circleci-cli @@ -54,6 +55,7 @@ in pkgs.mkShell { pulumi-bin python3 python3Packages.aiohttp + python3Packages.auth0-python python3Packages.colorlog (python3Packages.datadog.overrideAttrs (old: { doCheck = false; @@ -61,6 +63,8 @@ in pkgs.mkShell { })) python3Packages.GitPython python3Packages.gql + python3Packages.jsonpickle + python3Packages.kubernetes python3Packages.marshmallow-dataclass python3Packages.polib python3Packages.pyjwt