Skip to content

Commit abbfa2e

Browse files
Merge pull request #2664 from hyperledger-labs/martinflorian-da/hls-2074-merge-main-take-2
3.4: Bump Canton binary+fork and sync up with main
2 parents 6447ce5 + f18cc19 commit abbfa2e

File tree

2,354 files changed

+132974
-112156
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

2,354 files changed

+132974
-112156
lines changed

.envrc.vars

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ export PUBLIC_CONFIGS_PATH=${SPLICE_ROOT}/cluster/configs/configs
1616

1717
# Inrease code heap sizes to avoid issues
1818
# Defaults NonNMethodCodeHeapSize=7M,NonProfiledCodeHeapSize=122M,ProfiledCodeHeapSize=122M
19-
export SBT_OPTS="-Xmx6G -Xms2G -Xss2M -XX:+UseG1GC -XX:NonNMethodCodeHeapSize=32M -XX:NonProfiledCodeHeapSize=256M -XX:ProfiledCodeHeapSize=256M -XX:ReservedCodeCacheSize=544M"
19+
export SBT_OPTS="-Xmx8G -Xms2G -Xss2M -XX:+UseG1GC -XX:NonNMethodCodeHeapSize=32M -XX:NonProfiledCodeHeapSize=256M -XX:ProfiledCodeHeapSize=256M -XX:ReservedCodeCacheSize=544M"
2020

2121
# Provide a simple way to get the path to `sbt-launch.jar` for IntelliJ setup
2222
export SBT_LAUNCH_PATH="$(dirname "$(dirname "$(which sbt)")")/share/sbt/bin/sbt-launch.jar"
@@ -31,7 +31,6 @@ export POSTGRES_HOST="localhost"
3131
export POSTGRES_USER=postgres
3232
export POSTGRES_PASSWORD=postgres
3333

34-
3534
# ** Docker&Helm registry configs
3635

3736
export GHCR=ghcr.io
@@ -47,7 +46,6 @@ export RELEASE_HELM_REGISTRY=$RELEASE_REGISTRY/helm
4746
export OCI_RELEASE_HELM_REGISTRY=oci://$RELEASE_HELM_REGISTRY
4847
export RELEASE_DOCKER_REGISTRY=$RELEASE_REGISTRY/docker
4948

50-
5149
# ** Cluster deployment configs
5250

5351
export CLOUDSDK_COMPUTE_REGION="us-central1"
@@ -102,7 +100,6 @@ export SPLICE_OAUTH_DEV_CLIENT_ID_SPLITWELL_VALIDATOR=hqpZ6TP0wGyG2yYwhH6NLpuo0M
102100
export SPLICE_OAUTH_SV_TEST_AUTHORITY=canton-network-sv-test.us.auth0.com
103101
export SPLICE_OAUTH_SV_TEST_CLIENT_ID_VALIDATOR=bUfFRpl2tEfZBB7wzIo9iRNGTj8wMeIn
104102

105-
106103
# Force auth through gke-gcloud-auth-plugin
107104
# See https://cloud.google.com/blog/products/containers-kubernetes/kubectl-auth-changes-in-gke
108105
export USE_GKE_GCLOUD_AUTH_PLUGIN=true

.github/actions/tests/skip_on_static/action.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ runs:
2626
# is _before_ the approval already e.g. when an external contributor
2727
# created the PR and not when the maintainer approved it after adding the static label.
2828
pr_labels=$(curl -sSL --fail-with-body -H "Authorization: Bearer ${{ inputs.gh_token }}" \
29+
--retry 10 --retry-delay 10 --retry-all-errors \
2930
-H "Accept: application/vnd.github.v3+json" \
3031
"${{ github.event.pull_request.url }}" | jq '.labels')
3132
echo "Pull request labels: $pr_labels"
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
name: Auto-assign issues from external contributors
2+
3+
on:
4+
issues:
5+
types: [opened]
6+
7+
jobs:
8+
assign:
9+
runs-on: ubuntu-24.04
10+
steps:
11+
- name: Assign issue from external contributors
12+
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
13+
with:
14+
script: |
15+
const issue = context.payload.issue;
16+
const author = issue.user.login.toLowerCase();
17+
const isInternalContributor = author.endsWith('-da') || author === 'cocreature';
18+
if (issue.assignees.length === 0 && !isInternalContributor) {
19+
console.log('Assigning issue to the triage team...');
20+
await github.rest.issues.addAssignees({
21+
issue_number: issue.number,
22+
owner: context.repo.owner,
23+
repo: context.repo.repo,
24+
assignees: ['isegall-da', 'martinflorian-da', 'ray-roestenburg-da'],
25+
});
26+
}

LATEST_RELEASE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
0.4.18
1+
0.4.20

MAINTENANCE.md

Lines changed: 14 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,16 +14,17 @@
1414
## Bumping Canton
1515

1616
1. Generate a patch file of the JSON API v2 OpenAPI definition by running `diff-openapi.sh` in `token-standard/dependencies/canton-json-api-v2/openapi/`.
17-
2. Update the Canton Enterprise `version` in `nix/canton-sources.json`. The currently published versions on
17+
2. Choose the Canton version you wish to upgrade to. The currently published versions on
1818
Artifactory can be found [here](https://digitalasset.jfrog.io/ui/repos/tree/General/canton-enterprise).
19-
3. Update the `sha256` hash in the same file by first running `direnv reload` to make the hash validation fail
20-
and using the 'got' hash printed by nix. This is usually easier and more accurate than copying the sha256 hash
21-
displayed for the release version in Artifactory.
22-
4. In case you have also made configuration changes to Canton in `simple-topology-canton.conf`, remember
19+
3. Compute the hashes of the corresponding enterprise and oss versions by running:
20+
`nix store prefetch-file --json --hash-type sha256 https://digitalasset.jfrog.io/artifactory/canton-enterprise/canton-enterprise-<version>.tar.gz | jq -r '.hash'` and
21+
`nix store prefetch-file --json --hash-type sha256 https://www.canton.io/releases/canton-open-source-<version>.tar.gz | jq -r '.hash'`
22+
4. Update the Canton version and hashes of the oss and enterprise versions in `nix/canton-sources.json`.
23+
5. In case you have also made configuration changes to Canton in `simple-topology-canton.conf`, remember
2324
to also make the corresponding changes for our cluster deployments. It is recommended to test any configuration
2425
changes on scratchnet first.
25-
5. Update the OpenAPI definitions from step 1 by running `update-openapi.sh` in `token-standard/dependencies/canton-json-api-v2/openapi/`.
26-
6. Cleanup the `openapi.patch` file.
26+
6. Update the OpenAPI definitions from step 1 by running `update-openapi.sh` in `token-standard/dependencies/canton-json-api-v2/openapi/`.
27+
7. Cleanup the `openapi.patch` file.
2728
Check `token-standard/dependencies/canton-json-api-v2/openapi/CHANGES.md` and apply any changes manually if CI breaks due to
2829
token standard CLI issues that look caused by bad OpenAPI definitions.
2930

@@ -46,7 +47,7 @@ Initial setup:
4647
1. Check out the [Canton **Open Source** repo](https://github.com/digital-asset/canton)
4748
2. Define the environment variable used in the commands below using `export PATH_TO_CANTON_OSS=<your-canton-oss-repo-path>`. This can be added to your private env vars.
4849

49-
Current Canton commit: `0467621f75718cedee33887a535fab598954b639`
50+
Current Canton commit: `79e645eb60ba378536a6d62cabbeab78d1be6c61`
5051

5152
1. Checkout the **current Canton commit listed above** in the Canton open source repo from above, so we can diff our current fork against this checkout.
5253
2. Change to your checkout of the Splice repo and execute the following steps:
@@ -66,9 +67,11 @@ Current Canton commit: `0467621f75718cedee33887a535fab598954b639`
6667
1. The current Canton commit in this `README.md`
6768
2. If we're also updating the sdk version (this can lead to dar changes so we might skip it)
6869
1. Set `version` in `CantonDependencies.scala` to the SDK version from Step 3.1
69-
2. Set `sdk_version` in `nix/canton-sources.json` to the SDK release version from Step 3.1.
70-
3. Bump the sdk version in our own `daml.yaml` and `*.nix` files via `./set-sdk.sh $sdkversion` to the same Daml SDK version.
71-
4. Change the hashes for both the linux and macos releases in `daml2js.nix`. To do so change a character of the `sha256` digest (e.g. "ef..." -> "0f...") in `daml2js.nix`,
70+
2. Set `tooling_sdk_version` in `nix/canton-sources.json` to the SDK release version from Step 3.1.
71+
3. Find in [Daml releases](https://github.com/digital-asset/daml/releases) the daml release that is "based on SDK" with the SDK from Step 3.1.
72+
Set `daml_release` in `nix/cantno-sources.json` to that release.
73+
4. Bump the sdk version in our own `daml.yaml` and `*.nix` files via `./set-sdk.sh $sdkversion` to the same Daml SDK version.
74+
5. Change the hashes for both the linux and macos releases in `daml2js.nix`. To do so change a character of the `sha256` digest (e.g. "ef..." -> "0f...") in `daml2js.nix`,
7275
and then call `direnv reload` to make the hash validation fail. Adjust the `sha256` digest by copying back the new hash when Nix throws an error during validation.
7376
Note that nix may print the hash in base64, when you specified it in base16, or vice versa. Just copying the 'got' hash should work in either case.
7477
6. Create another commit, `git add -A && git reset '*.rej' && git commit -s -m"Bump Canton commit and Canton/SDK versions" --no-verify`

Makefile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ wallet-payments-dar := ${SPLICE_ROOT}/daml/splice-wallet-payments/.daml/dist/spl
2222
build: $(app-bundle) $(load-tester) cluster/build ## Build the Splice app bundle and ensure cluster scripts are ready to run.
2323

2424
$(app-bundle): $(canton-amulet-dar) $(wallet-payments-dar)
25-
sbt --batch bundle
25+
sbt --client --batch bundle
2626

2727
$(canton-amulet-dar) $(wallet-payments-dar) &:
2828
sbt --batch 'splice-amulet-daml'/damlBuild 'splice-wallet-payments-daml'/damlBuild
@@ -31,7 +31,7 @@ $(load-tester):
3131
cd "${SPLICE_ROOT}/load-tester" && npm ci && npm run build
3232

3333
$(party-allocator):
34-
sbt --batch 'party-allocator/npmBuild'
34+
sbt --client --batch 'party-allocator/npmBuild'
3535

3636
.PHONY: update-expected
3737
update-expected: cluster/pulumi/update-expected
@@ -52,13 +52,13 @@ clean: cluster/clean
5252

5353
.PHONY: clean-all
5454
clean-all: clean ## Completely clean all local build state, including model codegen.
55-
sbt --batch clean-splice
55+
sbt --client --batch clean-splice
5656
find . -type d -name ".daml" -exec rm -rf {} +
5757
find . -type d -name "target" -exec rm -rf {} +
5858

5959
.PHONY: format
6060
format: cluster/format ## Automatically reformat and apply scalaFix to source code
61-
sbt --batch formatFix
61+
sbt --client --batch formatFix
6262

6363
.PHONY: help
6464
help: ## Show list of available make targets

apps/app/src/main/scala/org/lfdecentralizedtrust/splice/console/SpliceInstanceReference.scala

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import org.lfdecentralizedtrust.splice.environment.{
1616
}
1717
import org.lfdecentralizedtrust.splice.util.HasHealth
1818
import com.daml.scalautil.Statement.discard
19+
import com.digitalasset.canton.LfPackageId
1920
import com.digitalasset.canton.admin.api.client.commands.GrpcAdminCommand
2021
import com.digitalasset.canton.admin.api.client.data.NodeStatus
2122
import com.digitalasset.canton.config.NonNegativeDuration
@@ -40,6 +41,8 @@ import com.digitalasset.canton.logging.NamedLoggerFactory
4041
import com.digitalasset.canton.participant.config.RemoteParticipantConfig
4142
import com.digitalasset.canton.synchronizer.sequencer.config.RemoteSequencerConfig
4243
import com.digitalasset.canton.topology.NodeIdentity
44+
import com.digitalasset.canton.topology.admin.grpc.TopologyStoreId
45+
import com.digitalasset.canton.topology.transaction.VettedPackage
4346

4447
import java.io.File
4548
import scala.concurrent.ExecutionContext
@@ -250,10 +253,25 @@ class ParticipantClientReference(
250253
def upload_dar_unless_exists(
251254
path: String
252255
): Unit = {
253-
val hash = DarParser.assertReadArchiveFromFile(new File(path)).main.getHash
256+
val dar = DarParser.assertReadArchiveFromFile(new File(path))
257+
val hash = dar.main.getHash
254258
val pkgs = this.ledger_api.packages.list()
255259
if (!pkgs.map(_.packageId).contains(hash)) {
256-
discard[String](this.dars.upload(path))
260+
discard[String](this.dars.upload(path, vetAllPackages = false))
261+
val connected = this.synchronizers.list_connected()
262+
if (connected.isEmpty) {
263+
logger.error(s"Trying to vet $path on ${this.id} but not connected to any synchronizer")
264+
}
265+
connected.foreach { sync =>
266+
this.topology.vetted_packages.propose_delta(
267+
this.id,
268+
adds = dar.all
269+
.map(p => LfPackageId.assertFromString(p.getHash))
270+
.distinct
271+
.map(VettedPackage(_, None, None)),
272+
store = TopologyStoreId.Synchronizer(sync.synchronizerId),
273+
)
274+
}
257275
}
258276
}
259277
}

apps/app/src/main/scala/org/lfdecentralizedtrust/splice/console/SvAppReference.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -343,10 +343,10 @@ class SvAppBackendReference(
343343
}
344344

345345
@Help.Summary("Prepare a validator onboarding and return an onboarding secret (via admin API)")
346-
def prepareValidatorOnboarding(expiresIn: FiniteDuration): String =
346+
def prepareValidatorOnboarding(expiresIn: FiniteDuration, partyHint: Option[String]): String =
347347
consoleEnvironment.run {
348348
httpCommand(
349-
HttpSvAdminAppClient.PrepareValidatorOnboarding(expiresIn)
349+
HttpSvAdminAppClient.PrepareValidatorOnboarding(expiresIn, partyHint)
350350
)
351351
}
352352

apps/app/src/main/scala/org/lfdecentralizedtrust/splice/environment/ScanApps.scala

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,10 @@ import com.digitalasset.canton.concurrent.ExecutionContextIdlenessExecutorServic
1010
import com.digitalasset.canton.config.ProcessingTimeout
1111
import com.digitalasset.canton.environment.ManagedNodes
1212
import com.digitalasset.canton.logging.NamedLoggerFactory
13-
import com.digitalasset.canton.resource.DbMigrationsFactory
1413

1514
/** Scan app instances. */
1615
class ScanApps(
1716
create: (String, ScanAppBackendConfig) => ScanAppBootstrap,
18-
migrationsFactory: DbMigrationsFactory,
1917
_timeouts: ProcessingTimeout,
2018
configs: Map[String, ScanAppBackendConfig],
2119
parametersFor: String => SharedSpliceAppParameters,
@@ -30,7 +28,6 @@ class ScanApps(
3028
ScanAppBootstrap,
3129
](
3230
create,
33-
migrationsFactory,
3431
_timeouts,
3532
configs,
3633
parametersFor,

apps/app/src/main/scala/org/lfdecentralizedtrust/splice/environment/SpliceConsoleEnvironment.scala

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ import com.digitalasset.canton.console.{
1111
NodeReferences,
1212
StandardConsoleOutput,
1313
}
14+
import com.digitalasset.daml.lf.data.Ref.PackageId
15+
import com.digitalasset.daml.lf.typesig.PackageSignature
1416
import org.apache.pekko.actor.ActorSystem
1517
import org.lfdecentralizedtrust.splice.config.SpliceConfig
1618
import org.lfdecentralizedtrust.splice.console.*
@@ -31,16 +33,11 @@ class SpliceConsoleEnvironment(
3133

3234
override type Config = SpliceConfig
3335

34-
val packageSignatures = ResourceTemplateDecoder.loadPackageSignaturesFromResources(
35-
DarResources.TokenStandard.allPackageResources.flatMap(_.all) ++
36-
DarResources.splitwell.all ++
37-
DarResources.validatorLifecycle.all ++
38-
DarResources.wallet.all ++
39-
DarResources.amulet.all ++
40-
DarResources.dsoGovernance.all
41-
)
4236
implicit val actorSystem: ActorSystem = environment.actorSystem
43-
val templateDecoder = new ResourceTemplateDecoder(packageSignatures, environment.loggerFactory)
37+
private lazy val templateDecoder = new ResourceTemplateDecoder(
38+
SpliceConsoleEnvironment.packageSignatures,
39+
environment.loggerFactory,
40+
)
4441

4542
lazy val httpCommandRunner: ConsoleHttpCommandRunner = new ConsoleHttpCommandRunner(
4643
environment,
@@ -341,3 +338,17 @@ class SpliceConsoleEnvironment(
341338
case _ => 5
342339
}
343340
}
341+
342+
object SpliceConsoleEnvironment {
343+
344+
private lazy val packageSignatures: Map[PackageId, PackageSignature] =
345+
ResourceTemplateDecoder.loadPackageSignaturesFromResources(
346+
DarResources.TokenStandard.allPackageResources.flatMap(_.all) ++
347+
DarResources.splitwell.all ++
348+
DarResources.validatorLifecycle.all ++
349+
DarResources.wallet.all ++
350+
DarResources.amulet.all ++
351+
DarResources.dsoGovernance.all
352+
)
353+
354+
}

0 commit comments

Comments
 (0)